mirror of
https://github.com/turboderp-org/exui.git
synced 2026-04-20 14:29:52 +00:00
Add Phind-CodeLlama format
This commit is contained in:
@@ -147,13 +147,44 @@ class PromptFormat_tinyllama(PromptFormat_chatml):
|
||||
return False
|
||||
|
||||
|
||||
class PromptFormat_phind_codellama(PromptFormat):
|
||||
|
||||
description = "Vicuna/Alpaca-like format for Phind-CodeLlama"
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
pass
|
||||
|
||||
def is_instruct(self):
|
||||
return True
|
||||
|
||||
def stop_conditions(self, tokenizer, settings):
|
||||
return \
|
||||
[tokenizer.eos_token_id, "\n### "]
|
||||
|
||||
def format(self, prompt, response, system_prompt, settings):
|
||||
text = ""
|
||||
if system_prompt and system_prompt.strip() != "":
|
||||
text += "### System Prompt\n"
|
||||
text += system_prompt
|
||||
text += "\n\n"
|
||||
text += "### User Message\n"
|
||||
text += prompt
|
||||
text += "\n\n### Assistant\n"
|
||||
if response:
|
||||
text += response
|
||||
text += "\n\n"
|
||||
return text
|
||||
|
||||
|
||||
prompt_formats = \
|
||||
{
|
||||
"Chat-RP": PromptFormat_raw,
|
||||
"Llama-chat": PromptFormat_llama,
|
||||
"ChatML": PromptFormat_chatml,
|
||||
"TinyLlama-chat": PromptFormat_tinyllama,
|
||||
"MistralLite": PromptFormat_mistrallite
|
||||
"MistralLite": PromptFormat_mistrallite,
|
||||
"Phind-CodeLlama": PromptFormat_phind_codellama
|
||||
}
|
||||
|
||||
def list_prompt_formats():
|
||||
|
||||
Reference in New Issue
Block a user