mirror of
https://github.com/theroyallab/tabbyAPI.git
synced 2026-03-14 15:57:27 +00:00
Model: Remove generate_window
Not required since we error with exceeding the max_seq_len Signed-off-by: kingbri <8082010+kingbri1@users.noreply.github.com>
This commit is contained in:
@@ -1242,9 +1242,6 @@ class ExllamaV2Container:
|
||||
grammar_handler = ExLlamaV2Grammar()
|
||||
banned_strings = []
|
||||
|
||||
# TODO: Not used for some reason?
|
||||
generate_window = max(params.generate_window, self.config.max_seq_len // 8)
|
||||
|
||||
self.assign_gen_params(
|
||||
params,
|
||||
gen_settings,
|
||||
|
||||
@@ -41,12 +41,6 @@ class BaseSamplerRequest(BaseModel):
|
||||
ge=0,
|
||||
)
|
||||
|
||||
generate_window: Optional[int] = Field(
|
||||
default_factory=lambda: get_default_sampler_value("generate_window", 512),
|
||||
examples=[512],
|
||||
ge=0,
|
||||
)
|
||||
|
||||
stop: Optional[Union[str, List[Union[str, int]]]] = Field(
|
||||
default_factory=lambda: get_default_sampler_value("stop", []),
|
||||
validation_alias=AliasChoices("stop", "stop_sequence"),
|
||||
|
||||
@@ -14,9 +14,6 @@ max_tokens:
|
||||
min_tokens:
|
||||
override: 0
|
||||
force: false
|
||||
generate_window:
|
||||
override: 512
|
||||
force: false
|
||||
stop:
|
||||
override: []
|
||||
force: false
|
||||
|
||||
Reference in New Issue
Block a user