8000 debug · themrzmaster/llama-cpp-python@ec4435f · GitHub
[go: up one dir, main page]

Skip to content

Commit ec4435f

Browse files
committed
debug
1 parent c4b2f87 commit ec4435f

File tree

1 file changed

+2
-3
lines changed

1 file changed

+2
-3
lines changed

llama_cpp/llama_chat_format.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2459,7 +2459,7 @@ def base_function_calling(
24592459
)
24602460

24612461
follow_up_gbnf_tool_grammar = (
2462-
f"""root ::= functions | "</done>"\n"""
2462+
f"""root ::= functions | "{end_token}"\n"""
24632463
f"""functions ::= {function_names}\n"""
24642464
)
24652465

@@ -2600,7 +2600,7 @@ def base_function_calling(
26002600

26012601
response = cast(llama_types.CreateCompletionResponse, response)
26022602
print(response["choices"][0])
2603-
if response["choices"][0]["text"] == "</done>":
2603+
if end_token in response["choices"][0]["text"]:
26042604
break
26052605
tool_name = response["choices"][0]["text"][len("functions.") :].replace(":", "")
26062606
tool = next(
@@ -2815,7 +2815,6 @@ def vicuna_function_calling(
28152815
'\n{ "arg1": "value1", "arg2": "value2" };'
28162816
"\nfunctions.<another_function_name>:"
28172817
'\n{ "arg1": "value3", "arg2": "value4" }'
2818-
"\n\nWhen you are done with the function calls, end the message with </done>."
28192818
"\n\nTo respond with a message begin the message with 'message:', use the following format:"
28202819
"\n\nmessage:"
28212820
"\n<message> </s>"

0 commit comments

Comments
 (0)
0