8000 prompt · themrzmaster/llama-cpp-python@a9e221e · GitHub
[go: up one dir, main page]

Skip to content

Commit a9e221e

Browse files
committed
prompt
1 parent 7bb01e5 commit a9e221e

File tree

1 file changed

+5
-9
lines changed

1 file changed

+5
-9
lines changed

llama_cpp/llama_chat_format.py

Lines changed: 5 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2462,11 +2462,7 @@ def base_function_calling(
24622462
f"""root ::= functions | "</done>"\n"""
24632463
f"""functions ::= {function_names}\n"""
24642464
)
2465-
follow_up_msg_gbnf_tool_grammar = (
2466-
f"""root ::= functions | "</s>"\n"""
2467-
f"""functions ::= {function_names}\n"""
2468-
)
2469-
2465+
24702466

24712467
prompt = template_renderer.render(
24722468
messages=messages,
@@ -2523,9 +2519,9 @@ def base_function_calling(
25232519
mirostat_eta=mirostat_eta,
25242520
model=model,
25252521
logits_processor=logits_processor,
2526-
grammar=llama_grammar.LlamaGrammar.from_string(
2527-
follow_up_msg_gbnf_tool_grammar, verbose=llama.verbose
2528-
),
2522+
# grammar=llama_grammar.LlamaGrammar.from_string(
2523+
# follow_up_gbnf_tool_grammar, verbose=llama.verbose
2524+
# ),
25292525
),
25302526
stream=stream,
25312527
)
@@ -2813,7 +2809,7 @@ def vicuna_function_calling(
28132809
"\nfunctions.{{ tool.function.name }}:\n"
28142810
"{{ tool.function.parameters | tojson }}"
28152811
"\n{% endfor %}"
2816-
"\n\nYou can respond to users messages with either a single message or multiple function calls."
2812+
"\n\nYou can respond to users messages with either a single message or multiple function calls, never both. Prioritize function calls over messages, when applicable."
28172813
"\n\nTo respond with a message begin the message with 'message:', use the following format:"
28182814
"\n\nmessage:"
28192815
"\n<message>"

0 commit comments

Comments
 (0)
0