mirror of
https://github.com/theroyallab/tabbyAPI.git
synced 2026-03-14 15:57:27 +00:00
OAI: Implement completion API endpoint
Add support for /v1/completions with the option to use streaming if needed. Also rewrite API endpoints to use async when possible since that improves request performance. Model container parameter names also needed rewrites as well and set fallback cases to their disabled values. Signed-off-by: kingbri <bdashore3@proton.me>
This commit is contained in:
19
OAI/utils.py
Normal file
19
OAI/utils.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from OAI.models.completions import CompletionResponse, CompletionRespChoice
|
||||
from OAI.models.common import UsageStats
|
||||
from typing import Optional
|
||||
|
||||
def create_completion_response(text: str, index: int, model_name: Optional[str]):
|
||||
# TODO: Add method to get token amounts in model for UsageStats
|
||||
|
||||
choice = CompletionRespChoice(
|
||||
finish_reason="Generated",
|
||||
index = index,
|
||||
text = text
|
||||
)
|
||||
|
||||
response = CompletionResponse(
|
||||
choices = [choice],
|
||||
model = model_name or ""
|
||||
)
|
||||
|
||||
return response
|
||||
Reference in New Issue
Block a user