8000 Add mistral instruct chat format as "mistral-instruct" (#799) · devilcoder01/llama-cpp-python@ce38dbd · GitHub
[go: up one dir, main page]

Skip to content

Commit ce38dbd

Browse files
Add mistral instruct chat format as "mistral-instruct" (abetlen#799)
* Added mistral instruct chat format as "mistral" * Fix stop sequence (merge issue) * Update chat format name to `mistral-instruct` --------- Co-authored-by: Andrei <abetlen@gmail.com>
1 parent 52c4a84 commit ce38dbd

File tree

1 file changed

+16
-0
lines changed

1 file changed

+16
-0
lines changed

llama_cpp/llama_chat_format.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -877,6 +877,22 @@ def format_chatml(
877877
return ChatFormatterResponse(prompt=_prompt, stop=_sep)
878878

879879

880+
@register_chat_format("mistral-instruct")
881+
def format_mistral(
882+
messages: List[llama_types.ChatCompletionRequestMessage],
883+
**kwargs: Any,
884+
) -> ChatFormatterResponse:
885+
_roles = dict(user="[INST] ", assistant="[/INST]")
886+
_sep = " "
887+
system_template = """<s>{system_message}"""
888+
system_message = _get_system_message(messages)
889+
system_message = system_template.format(system_message=system_message)
890+
_messages = _map_roles(messages, _roles)
891+
_messages.append((_roles["assistant"], None))
892+
_prompt = _format_no_colon_single(system_message, _messages, _sep)
893+
return ChatFormatterResponse(prompt=_prompt)
894+
895+
880896
@register_chat_format("chatglm3")
881897
def format_chatglm3(
882898
messages: List[llama_types.ChatCompletionRequestMessage],

0 commit comments

Comments
 (0)
0