8000
We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent cfb7da9 commit 9c36688Copy full SHA for 9c36688
llama_cpp/server/settings.py
@@ -60,7 +60,7 @@ class ModelSettings(BaseSettings):
60
seed: int = Field(
61
default=llama_cpp.LLAMA_DEFAULT_SEED, description="Random seed. -1 for random."
62
)
63
- n_ctx: int = Field(default=2048, ge=1, description="The context size.")
+ n_ctx: int = Field(default=2048, ge=0, description="The context size.")
64
n_batch: int = Field(
65
default=512, ge=1, description="The batch size to use per eval."
66
0 commit comments