8000 Remove workaround · coderonion/llama-cpp-python@5c50af7 · GitHub
[go: up one dir, main page]

Skip to content

Commit 5c50af7

Browse files
committed
Remove workaround
1 parent c3972b6 commit 5c50af7

File tree

1 file changed

+0
-5
lines changed

1 file changed

+0
-5
lines changed

llama_cpp/llama.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -176,11 +176,6 @@ def sample(
176176
The sampled token.
177177
"""
178178
assert self.ctx is not None
179-
# Temporary workaround for https://github.com/ggerganov/llama.cpp/issues/684
180-
if temp == 0.0:
181-
temp = 1.0
182-
top_p = 0.0
183-
top_k = 1
184179
return llama_cpp.llama_sample_top_p_top_k(
185180
ctx=self.ctx,
186181
last_n_tokens_data=(llama_cpp.llama_token * self.last_n_tokens_size)(

0 commit comments

Comments
 (0)
0