API: Move prompt template construction to utils

Best to move the inner workings within its inner function. Also fix
an edge case where stop strings can be a string rather than an array.

Signed-off-by: kingbri <bdashore3@proton.me>
This commit is contained in:
kingbri
2024-03-29 02:24:13 -04:00
parent 190a0b26c3
commit e8b6a02aa8
2 changed files with 16 additions and 5 deletions

View File

@@ -130,18 +130,32 @@ def _create_stream_chunk(
def format_prompt_with_template(data: ChatCompletionRequest):
"""
Compile the prompt and get any additional stop strings from the template.
Template stop strings can be overriden by sampler overrides if force is true.
"""
try:
special_tokens_dict = model.container.get_special_tokens(
unwrap(data.add_bos_token, True),
unwrap(data.ban_eos_token, False),
)
return get_prompt_from_template(
prompt, template_stop_strings = get_prompt_from_template(
data.messages,
model.container.prompt_template,
data.add_generation_prompt,
special_tokens_dict,
)
# Append template stop strings
if isinstance(data.stop, str):
data.stop = [data.stop] + template_stop_strings
else:
data.stop += template_stop_strings
return prompt
except KeyError as exc:
raise HTTPException(
400,