OAI: support stream_options argument

This commit is contained in:
Volodymyr Kuznetsov
2024-07-08 13:42:54 -07:00
parent 073e9fa6f0
commit b149d3398d
3 changed files with 24 additions and 1 deletions

View File

@@ -64,3 +64,4 @@ class ChatCompletionStreamChunk(BaseModel):
created: int = Field(default_factory=lambda: int(time()))
model: str
object: str = "chat.completion.chunk"
usage: Optional[UsageStats] = None

View File

@@ -18,6 +18,10 @@ class CompletionResponseFormat(BaseModel):
type: str = "text"
class ChatCompletionStreamOptions(BaseModel):
include_usage: Optional[bool] = False
class CommonCompletionRequest(BaseSamplerRequest):
"""Represents a common completion request."""
@@ -27,6 +31,7 @@ class CommonCompletionRequest(BaseSamplerRequest):
# Generation info (remainder is in BaseSamplerRequest superclass)
stream: Optional[bool] = False
stream_options: Optional[ChatCompletionStreamOptions] = None
logprobs: Optional[int] = Field(
default_factory=lambda: get_default_sampler_value("logprobs", 0)
)