mirror of
https://github.com/theroyallab/tabbyAPI.git
synced 2026-03-14 15:57:27 +00:00
Templates: Add clarity statements
Lets the user know if a file not found (OSError) occurs and prints the applied template on model load. Also fix some remaining references to fastchat. Signed-off-by: kingbri <bdashore3@proton.me>
This commit is contained in:
7
model.py
7
model.py
@@ -120,11 +120,14 @@ class ModelContainer:
|
||||
self.prompt_template = get_template_from_file(template_name)
|
||||
break
|
||||
except OSError:
|
||||
# Silently set the prompt template to none on a file lookup error
|
||||
# The template couldn't be found in the user's filesystem
|
||||
print(f"Could not find template file with name {prompt_template_name}.jinja")
|
||||
self.prompt_template = None
|
||||
|
||||
# Catch all for template lookup errors
|
||||
if self.prompt_template is None:
|
||||
if self.prompt_template:
|
||||
print(f"Using template {self.prompt_template.name} for chat completions.")
|
||||
else:
|
||||
print("Chat completions are disabled because a prompt template wasn't provided or auto-detected.")
|
||||
|
||||
# Set num of experts per token if provided
|
||||
|
||||
@@ -14,10 +14,10 @@ class PromptTemplate(BaseModel):
|
||||
def get_prompt_from_template(messages, prompt_template: PromptTemplate, add_generation_prompt: bool):
|
||||
if version.parse(package_version("jinja2")) < version.parse("3.0.0"):
|
||||
raise ImportError(
|
||||
"Parsing these chat completion messages requires fastchat 0.2.23 or greater. "
|
||||
"Parsing these chat completion messages requires jinja2 3.0.0 or greater. "
|
||||
f"Current version: {version('jinja2')}\n"
|
||||
"Please upgrade fastchat by running the following command: "
|
||||
"pip install -U fschat[model_worker]"
|
||||
"Please upgrade jinja by running the following command: "
|
||||
"pip install --upgrade jinja2"
|
||||
)
|
||||
|
||||
compiled_template = _compile_template(prompt_template.template)
|
||||
|
||||
Reference in New Issue
Block a user