mirror of
https://github.com/theroyallab/tabbyAPI.git
synced 2026-04-27 01:38:56 +00:00
[WIP] OpenAI Tools Support/Function calling (#154)
* returning stop str if exists from gen * added chat template for firefunctionv2 * pulling tool vars from template * adding parsing for tool inputs/outputs * passing tool data from endpoint to chat template, adding tool_start to the stop list * loosened typing on the response tool call, leaning more on the user supplying a quality schema if they want a particular format * non streaming generation prototype * cleaning template * Continued work with type, ingestion into template, and chat template for fire func * Correction - streaming toolcall comes back as delta obj not inside chatcomprespchoice per chat_completion_chunk.py inside OAI lib. * Ruff Formating * Moved stop string and tool updates out of prompt creation func Updated tool pydantic to match OAI Support for streaming Updated generate tool calls to use flag within chat_template and insert tool reminder * Llama 3.1 chat templates Updated fire func template * renamed llama3.1 to chatml_with_headers.. * update name of template * Support for calling a tool start token rather than the string. Simplified tool_params Warning when gen_settings are being overidden becuase user set temp to 0 Corrected schema and tools to correct types for function args. Str for some reason * draft groq tool use model template * changed headers to vars for readablity (but mostly because some models are weird about newlines after headers, so this is an easier way to change globally) * Clean up comments and code in chat comp * Post processed tool call to meet OAI spec rather than forcing model to write json in a string in the middle of the call. * changes example back to args as json rather than string of json * Standardize chat templates to each other * cleaning/rewording * stop elements can also be ints (tokens) * Cleaning/formatting * added special tokens for tools and tool_response as specified in description * Cleaning * removing aux templates - going to live in llm-promp-templates repo instead * Tree: Format Signed-off-by: kingbri <bdashore3@proton.me> * Chat Completions: Don't include internal tool variables in OpenAPI Use SkipJsonSchema to supress inclusion with the OpenAPI JSON. The location of these variables may need to be changed in the future. Signed-off-by: kingbri <bdashore3@proton.me> * Templates: Deserialize metadata on template load Since we're only looking for specific template variables that are static in the template, it makes more sense to render when the template is initialized. Signed-off-by: kingbri <bdashore3@proton.me> * Tools: Fix comments Adhere to the format style of comments in the rest of the project. Signed-off-by: kingbri <bdashore3@proton.me> --------- Co-authored-by: Ben Gitter <gitterbd@gmail.com> Signed-off-by: kingbri <bdashore3@proton.me>
This commit is contained in:
@@ -1,9 +1,11 @@
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic.json_schema import SkipJsonSchema
|
||||
from time import time
|
||||
from typing import Union, List, Optional, Dict
|
||||
from uuid import uuid4
|
||||
|
||||
from endpoints.OAI.types.common import UsageStats, CommonCompletionRequest
|
||||
from endpoints.OAI.types.tools import ToolSpec, ToolCall, tool_call_schema
|
||||
|
||||
|
||||
class ChatCompletionLogprob(BaseModel):
|
||||
@@ -19,12 +21,16 @@ class ChatCompletionLogprobs(BaseModel):
|
||||
class ChatCompletionMessage(BaseModel):
|
||||
role: Optional[str] = None
|
||||
content: Optional[str] = None
|
||||
tool_calls: Optional[List[ToolCall]] = None
|
||||
|
||||
|
||||
class ChatCompletionRespChoice(BaseModel):
|
||||
# Index is 0 since we aren't using multiple choices
|
||||
index: int = 0
|
||||
finish_reason: Optional[str] = None
|
||||
|
||||
# let's us understand why it stopped and if we need to generate a tool_call
|
||||
stop_str: Optional[str] = None
|
||||
message: ChatCompletionMessage
|
||||
logprobs: Optional[ChatCompletionLogprobs] = None
|
||||
|
||||
@@ -42,12 +48,28 @@ class ChatCompletionRequest(CommonCompletionRequest):
|
||||
# Messages
|
||||
# Take in a string as well even though it's not part of the OAI spec
|
||||
# support messages.content as a list of dict
|
||||
messages: Union[str, List[Dict[str, Union[str, List[Dict[str, str]]]]]]
|
||||
|
||||
# WIP this can probably be tightened, or maybe match the OAI lib type
|
||||
# in openai\types\chat\chat_completion_message_param.py
|
||||
messages: Union[str, List[Dict]]
|
||||
prompt_template: Optional[str] = None
|
||||
add_generation_prompt: Optional[bool] = True
|
||||
template_vars: Optional[dict] = {}
|
||||
response_prefix: Optional[str] = None
|
||||
|
||||
# tools is follows the format OAI schema, functions is more flexible
|
||||
# both are available in the chat template.
|
||||
|
||||
tools: Optional[List[ToolSpec]] = None
|
||||
functions: Optional[List[Dict]] = None
|
||||
|
||||
# Typically collected from Chat Template.
|
||||
# Don't include this in the OpenAPI docs
|
||||
# TODO: Use these custom parameters
|
||||
tool_call_start: SkipJsonSchema[Optional[List[Union[str, int]]]] = None
|
||||
tool_call_end: SkipJsonSchema[Optional[str]] = None
|
||||
tool_call_schema: SkipJsonSchema[Optional[dict]] = tool_call_schema
|
||||
|
||||
|
||||
class ChatCompletionResponse(BaseModel):
|
||||
id: str = Field(default_factory=lambda: f"chatcmpl-{uuid4().hex}")
|
||||
|
||||
58
endpoints/OAI/types/tools.py
Normal file
58
endpoints/OAI/types/tools.py
Normal file
@@ -0,0 +1,58 @@
|
||||
from pydantic import BaseModel
|
||||
from typing import Dict, Literal
|
||||
|
||||
tool_call_schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {"type": "string"},
|
||||
"function": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"arguments": {
|
||||
# Converted to OAI's string in post process
|
||||
"type": "object"
|
||||
},
|
||||
},
|
||||
"required": ["name", "arguments"],
|
||||
},
|
||||
"type": {"type": "string", "enum": ["function"]},
|
||||
},
|
||||
"required": ["id", "function", "type"],
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class Function(BaseModel):
|
||||
"""Represents a description of a tool function."""
|
||||
|
||||
name: str
|
||||
description: str
|
||||
parameters: Dict[str, object]
|
||||
|
||||
|
||||
class ToolSpec(BaseModel):
|
||||
"""Wrapper for an inner tool function."""
|
||||
|
||||
function: Function
|
||||
type: Literal["function"]
|
||||
|
||||
|
||||
class Tool(BaseModel):
|
||||
"""Represents an OAI tool description."""
|
||||
|
||||
name: str
|
||||
|
||||
# Makes more sense to be a dict, but OAI knows best
|
||||
arguments: str
|
||||
|
||||
|
||||
class ToolCall(BaseModel):
|
||||
"""Represents an OAI tool description."""
|
||||
|
||||
id: str
|
||||
function: Tool
|
||||
type: Literal["function"]
|
||||
Reference in New Issue
Block a user