8000 forward reset argument to allow user control when to reset model state · abetlen/llama-cpp-python@bfd9058 · GitHub
[go: up one dir, main page]

Skip to content

Commit bfd9058

Browse files
Neroro64NuoChenBCT
authored andcommitted
forward reset argument to allow user control when to reset model state
1 parent 4244151 commit bfd9058

File tree

1 file changed

+6
-0
lines changed

1 file changed

+6
-0
lines changed

llama_cpp/llama.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1034,6 +1034,7 @@ def _create_completion(
10341034
logits_processor: Optional[LogitsProcessorList] = None,
10351035
grammar: Optional[LlamaGrammar] = None,
10361036
logit_bias: Optional[Dict[str, float]] = None,
1037+
reset: bool = True,
10371038
) -> Union[
10381039
Iterator[CreateCompletionResponse], Iterator[CreateCompletionStreamResponse]
10391040
]:
@@ -1222,6 +1223,7 @@ def logit_bias_processor(
12221223
stopping_criteria=stopping_criteria,
12231224
logits_processor=logits_processor,
12241225
grammar=grammar,
1226+
reset=reset,
12251227
):
12261228
assert self._model.model is not None
12271229
if llama_cpp.llama_token_is_eog(self._model.model, token):
@@ -1653,6 +1655,7 @@ def create_completion(
16531655
logits_processor: Optional[LogitsProcessorList] = None,
16541656
grammar: Optional[LlamaGrammar] = None,
16551657
logit_bias: Optional[Dict[str, float]] = None,
1658+
reset: bool = True,
16561659
) -> Union[CreateCompletionResponse, Iterator[CreateCompletionStreamResponse]]:
16571660
"""Generate text from a prompt.
16581661
@@ -1716,6 +1719,7 @@ def create_completion(
17161719
logits_processor=logits_processor,
17171720
grammar=grammar,
17181721
logit_bias=logit_bias,
1722+
reset=reset,
17191723
)
17201724
if stream:
17211725
chunks: Iterator[CreateCompletionStreamResponse] = completion_or_chunks
@@ -1746,6 +1750,7 @@ def __call__(
17461750
mirostat_tau: float = 5.0,
17471751
mirostat_eta: float = 0.1,
17481752
model: Optional[str] = None,
1753+
reset: bool = True,
17491754
stopping_criteria: Optional[StoppingCriteriaList] = None,
17501755
logits_processor: Optional[LogitsProcessorList] = None,
17511756
grammar: Optional[LlamaGrammar] = None,
@@ -1813,6 +1818,7 @@ def __call__(
18131818
logits_processor=logits_processor,
18141819
grammar=grammar,
18151820
logit_bias=logit_bias,
1821+
reset=reset,
18161822
)
18171823

18181824
def create_chat_completion(

0 commit comments

Comments
 (0)
0