8000 Add Pygmalion chat format (#986) · shawnx11/llama-cpp-python@b938ccc · GitHub
[go: up one dir, main page]

Skip to content

Commit b938ccc

Browse files
authored
Add Pygmalion chat format (abetlen#986)
1 parent 6bbeea0 commit b938ccc

File tree

1 file changed

+17
-0
lines changed

1 file changed

+17
-0
lines changed

llama_cpp/llama_chat_format.py

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -637,6 +637,23 @@ def format_zephyr(
637637
_prompt = _format_chatml(system_message, _messages, _sep)
638638
return ChatFormatterResponse(prompt=_prompt, stop=_sep)
639639

640+
641+
@register_chat_format("pygmalion")
642+
def format_pygmalion(
643+
messages: List[llama_types.ChatCompletionRequestMessage],
644+
**kwargs: Any,
645+
) -> ChatFormatterResponse:
646+
system_template = """<|system|>{system_message}"""
647+
system_message = _get_system_message(messages)
648+
system_message = system_template.format(system_message=system_message)
649+
_roles = dict(user="<|user|>", assistant="<|model|>")
650+
_sep = "\n"
651+
_messages = _map_roles(messages, _roles)
652+
_messages.append((_roles["assistant"], None))
653+
_prompt = _format_chatml(system_message, _messages, _sep)
654+
return ChatFormatterResponse(prompt=_prompt, stop=_sep)
655+
656+
640657
@register_chat_format("chatml")
641658
def format_chatml(
642659
messages: List[llama_types.ChatCompletionRequestMessage],

0 commit comments

Comments
 (0)
0