8000 Update chat parameters · coderonion/llama-cpp-python@83b2be6 · GitHub
[go: up one dir, main page]

Skip to content

Commit 83b2be6

Browse files
committed
Update chat parameters
1 parent 6208751 commit 83b2be6

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

llama_cpp/llama.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -672,12 +672,12 @@ def _convert_text_completion_chunks_to_chat(
672672
def create_chat_completion(
673673
self,
674674
messages: List[ChatCompletionMessage],
675-
temperature: float = 0.8,
675+
temperature: float = 0.2,
676676
top_p: float = 0.95,
677677
top_k: int = 40,
678678
stream: bool = False,
679679
stop: Optional[List[str]] = [],
680-
max_tokens: int = 128,
680+
max_tokens: int = 256,
681681
repeat_penalty: float = 1.1,
682682
) -> Union[ChatCompletion, Iterator[ChatCompletionChunk]]:
683683
"""Generate a chat completion from a list of messages.

0 commit comments

Comments
 (0)
0