8000
We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 7dea619 commit c4b2f87Copy full SHA for c4b2f87
llama_cpp/llama_chat_format.py
@@ -2571,6 +2571,7 @@ def base_function_calling(
2571
completions.append(completion_or_chunks)
2572
completions_tool_name.append(tool_name)
2573
prompt += completion_or_chunks["choices"][0]["text"]
2574
+ print(prompt)
2575
prompt += "\n"
2576
response = llama.create_completion(
2577
prompt=prompt,
@@ -2598,7 +2599,7 @@ def base_function_calling(
2598
2599
)
2600
2601
response = cast(llama_types.CreateCompletionResponse, response)
-
2602
+ print(response["choices"][0])
2603
if response["choices"][0]["text"] == "</done>":
2604
break
2605
tool_name = response["choices"][0]["text"][len("functions.") :].replace(":", "")
0 commit comments