mirror of
https://github.com/theroyallab/tabbyAPI.git
synced 2026-04-25 16:59:09 +00:00
OAI: support stream_options argument
This commit is contained in:
@@ -64,3 +64,4 @@ class ChatCompletionStreamChunk(BaseModel):
|
||||
created: int = Field(default_factory=lambda: int(time()))
|
||||
model: str
|
||||
object: str = "chat.completion.chunk"
|
||||
usage: Optional[UsageStats] = None
|
||||
|
||||
@@ -18,6 +18,10 @@ class CompletionResponseFormat(BaseModel):
|
||||
type: str = "text"
|
||||
|
||||
|
||||
class ChatCompletionStreamOptions(BaseModel):
|
||||
include_usage: Optional[bool] = False
|
||||
|
||||
|
||||
class CommonCompletionRequest(BaseSamplerRequest):
|
||||
"""Represents a common completion request."""
|
||||
|
||||
@@ -27,6 +31,7 @@ class CommonCompletionRequest(BaseSamplerRequest):
|
||||
|
||||
# Generation info (remainder is in BaseSamplerRequest superclass)
|
||||
stream: Optional[bool] = False
|
||||
stream_options: Optional[ChatCompletionStreamOptions] = None
|
||||
logprobs: Optional[int] = Field(
|
||||
default_factory=lambda: get_default_sampler_value("logprobs", 0)
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user