8000 add user-assistant chat format · DrewWalkup/llama-cpp-python@0407527 · GitHub
[go: up one dir, main page]

Skip to content

Commit 0407527

Browse files
committed
add user-assistant chat format
1 parent 6eb2523 commit 0407527

File tree

1 file changed

+17
-0
lines changed

1 file changed

+17
-0
lines changed

llama_cpp/llama_chat_format.py

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1037,6 +1037,23 @@ def format_gemma(
10371037
return ChatFormatterResponse(prompt=_prompt, stop=_sep)
10381038

10391039

1040+
# Chat format for Nous-Capybara models, see more details:
1041+
# https://huggingface.co/NousResearch/Nous-Capybara-34B
1042+
@register_chat_format("user-assistant")
1043+
def format_user_assistant(
1044+
messages: List[llama_types.ChatCompletionRequestMessage],
1045+
**kwargs: Any,
1046+
) -> ChatFormatterResponse:
1047+
stop_str = "</s>"
1048+
system_message = _get_system_message(messages)
1049+
_roles = dict(user="USER:", assistant="ASSISTANT:")
1050+
_sep = "\n"
1051+
_messages = _map_roles(messages, _roles)
1052+
_messages.append((_roles["assistant"], None))
1053+
_prompt = _format_chatml(system_message, _messages, _sep)
1054+
return ChatFormatterResponse(prompt=_prompt, stop=stop_str)
1055+
1056+
10401057
# Tricky chat formats that require custom chat handlers
10411058

10421059

0 commit comments

Comments
 (0)
0