mirror of
https://github.com/theroyallab/tabbyAPI.git
synced 2026-03-14 15:57:27 +00:00
Model: Add prompt logging to ExllamaV3
Signed-off-by: kingbri <8082010+kingbri1@users.noreply.github.com>
This commit is contained in:
@@ -28,6 +28,7 @@ from common.concurrency import iterate_in_threadpool
|
||||
from common.gen_logging import (
|
||||
log_generation_params,
|
||||
log_metrics,
|
||||
log_prompt,
|
||||
)
|
||||
from common.hardware import hardware_supports_flash_attn
|
||||
from common.health import HealthManager
|
||||
@@ -840,6 +841,12 @@ class ExllamaV3Container(BaseModelContainer):
|
||||
f"max_seq_len {self.max_seq_len}"
|
||||
)
|
||||
|
||||
# Log prompt to console. Add the BOS token if specified
|
||||
log_prompt(
|
||||
f"{self.tokenizer.bos_token if add_bos_token else ''}{prompt}",
|
||||
request_id,
|
||||
)
|
||||
|
||||
generation = {}
|
||||
job = AsyncJob(
|
||||
self.generator,
|
||||
|
||||
@@ -29,7 +29,7 @@ def log_generation_params(**kwargs):
|
||||
logger.info(f"Generation options: {kwargs}\n")
|
||||
|
||||
|
||||
def log_prompt(prompt: str, request_id: str, negative_prompt: Optional[str]):
|
||||
def log_prompt(prompt: str, request_id: str, negative_prompt: Optional[str] = None):
|
||||
"""Logs the prompt to console."""
|
||||
if config.logging.log_prompt:
|
||||
formatted_prompt = "\n" + prompt
|
||||
|
||||
Reference in New Issue
Block a user