mirror of
https://github.com/theroyallab/tabbyAPI.git
synced 2026-04-20 14:28:54 +00:00
Model: Auto-scale max_tokens by default
If max_tokens is None, it automatically scales to fill up the context. This does not mean the generation will fill up that context since EOS stops also exist. Originally suggested by #86 Signed-off-by: kingbri <bdashore3@proton.me>
This commit is contained in:
@@ -14,7 +14,7 @@ class BaseSamplerRequest(BaseModel):
|
||||
"""Common class for sampler params that are used in APIs"""
|
||||
|
||||
max_tokens: Optional[int] = Field(
|
||||
default_factory=lambda: get_default_sampler_value("max_tokens", 150),
|
||||
default_factory=lambda: get_default_sampler_value("max_tokens"),
|
||||
examples=[150],
|
||||
)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user