8000 Add Saiga chat format. (#1050) · sjanaX01/llama-cpp-python@907b9e9 · GitHub
[go: up one dir, main page]

Skip to content

Commit 907b9e9

Browse files
authored
Add Saiga chat format. (abetlen#1050)
1 parent f766b70 commit 907b9e9

File tree

1 file changed

+22
-0
lines changed

1 file changed

+22
-0
lines changed

llama_cpp/llama_chat_format.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -734,6 +734,28 @@ def format_openchat(
734734
return ChatFormatterResponse(prompt=_prompt, stop=_sep)
735735

736736

737+
# Chat format for Saiga models, see more details and available models:
738+
# https://huggingface.co/collections/IlyaGusev/saiga2-saigamistral-6505d4ccc3d1e53166b636cd
739+
@register_chat_format("saiga")
740+
def format_saiga(
741+
messages: list[llama_types.ChatCompletionRequestMessage],
742+
**kwargs,
743+
) -> ChatFormatterResponse:
744+
_message_template = "<s>{role}\n{content}</s>"
745+
_roles = dict(user="user", bot="bot", system="system")
746+
_messages = _map_roles(messages, _roles)
747+
748+
_prompt = ""
749+
for role, content in _messages:
750+
if content:
751+
_prompt += _message_template.format(role=role, content=content)
752+
else:
753+
_prompt += f"<s>{role}\n"
754+
# Response template
755+
_prompt += "<s>bot"
756+
return ChatFormatterResponse(prompt=_prompt.strip())
757+
758+
737759
@register_chat_completion_handler("functionary")
738760
def functionary_chat_handler(
739761
llama: llama.Llama,

0 commit comments

Comments
 (0)
0