10000 Merge branch 'main' of github.com:themrzmaster/llama-cpp-python · themrzmaster/llama-cpp-python@8d29275 · GitHub
[go: up one dir, main page]

Skip to content

Commit 8d29275

Browse files
committed
Merge branch 'main' of github.com:themrzmaster/llama-cpp-python
2 parents eec49bb + 0dfe52e commit 8d29275

File tree

1 file changed

+5
-3
lines changed

1 file changed

+5
-3
lines changed

llama_cpp/llama_chat_format.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2470,6 +2470,7 @@ def base_function_calling(
24702470
tool_calls=True,
24712471
add_generation_prompt=True,
24722472
)
2473+
print(prompt)
24732474
completion_or_chunks = llama.create_completion(
24742475
prompt=prompt,
24752476
temperature=temperature,
@@ -2495,6 +2496,7 @@ def base_function_calling(
24952496
)
24962497
completion: llama_types.CreateCompletionResponse = completion_or_chunks # type: ignore
24972498
text = completion["choices"][0]["text"]
2499+
print(text)
24982500
if "message" in text:
24992501
return _convert_completion_to_chat(
25002502
llama.create_completion(
@@ -3118,9 +3120,9 @@ def mixtral_function_calling(
31183120
mirostat_eta=mirostat_eta,
31193121
model=model,
31203122
logits_processor=logits_processor,
3121-
# grammar=llama_grammar.LlamaGrammar.from_string(
3122-
# initial_gbnf_tool_grammar, verbose=llama.verbose
3123-
# ),
3123+
grammar=llama_grammar.LlamaGrammar.from_string(
3124+
initial_gbnf_tool_grammar, verbose=llama.verbose
3125+
),
31243126
)
31253127
completion: llama_types.CreateCompletionResponse = completion_or_chunks # type: ignore
31263128
text = completion["choices"][0]["text"]

0 commit comments

Comments
 (0)
0