8000
We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 02f9fb8 commit 6208751Copy full SHA for 6208751
llama_cpp/llama.py
@@ -696,10 +696,12 @@ def create_chat_completion(
696
Generated chat completion or a stream of chat completion chunks.
697
"""
698
stop = stop if stop is not None else []
699
+ chat_history = "".join(
700
+ f'### {"Human" if message["role"] == "user" else "Assistant"}:{message["content"]}'
701
for message in messages
702
)
- PROMPT = f" \n\n### Instructions:{instructions}\n\n### Inputs:{chat_history}\n\n### Response:\nassistant: "
- PROMPT_STOP = ["###", "\nuser: ", "\nassistant: ", "\nsystem: "]
703
+ PROMPT = chat_history + "### Assistant:"
704
+ PROMPT_STOP = ["### Assistant:", "### Human:", "\n"]
705
completion_or_chunks = self(
706
prompt=PROMPT,
707
stop=PROMPT_STOP + stop,
0 commit comments