From 3461f8294faf613f013834d95660420ec8ee687b Mon Sep 17 00:00:00 2001 From: kingbri Date: Sat, 23 Dec 2023 20:58:50 -0500 Subject: [PATCH] Logging: Clarify preferences Preferences are preferences, not a config. Signed-off-by: kingbri --- OAI/types/model.py | 4 ++-- gen_logging.py | 18 +++++++++--------- main.py | 2 +- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/OAI/types/model.py b/OAI/types/model.py index 3721477..0951ebd 100644 --- a/OAI/types/model.py +++ b/OAI/types/model.py @@ -4,7 +4,7 @@ from typing import List, Optional from pydantic import BaseModel, Field, ConfigDict -from gen_logging import LogConfig +from gen_logging import LogPreferences class ModelCardParameters(BaseModel): @@ -28,7 +28,7 @@ class ModelCard(BaseModel): object: str = "model" created: int = Field(default_factory=lambda: int(time())) owned_by: str = "tabbyAPI" - logging: Optional[LogConfig] = None + logging: Optional[LogPreferences] = None parameters: Optional[ModelCardParameters] = None diff --git a/gen_logging.py b/gen_logging.py index bd35c97..0ad5abb 100644 --- a/gen_logging.py +++ b/gen_logging.py @@ -9,7 +9,7 @@ from logger import init_logger logger = init_logger(__name__) -class LogConfig(BaseModel): +class LogPreferences(BaseModel): """Logging preference config.""" prompt: bool = False @@ -17,28 +17,28 @@ class LogConfig(BaseModel): # Global reference to logging preferences -CONFIG = LogConfig() +PREFERENCES = LogPreferences() def update_from_dict(options_dict: Dict[str, bool]): """Wrapper to set the logging config for generations""" - global CONFIG + global PREFERENCES # Force bools on the dict for value in options_dict.values(): if value is None: value = False - CONFIG = LogConfig.model_validate(options_dict) + PREFERENCES = LogPreferences.model_validate(options_dict) def broadcast_status(): """Broadcasts the current logging status""" enabled = [] - if CONFIG.prompt: + if PREFERENCES.prompt: enabled.append("prompts") - if CONFIG.generation_params: + if PREFERENCES.generation_params: enabled.append("generation params") if len(enabled) > 0: @@ -49,19 +49,19 @@ def broadcast_status(): def log_generation_params(**kwargs): """Logs generation parameters to console.""" - if CONFIG.generation_params: + if PREFERENCES.generation_params: logger.info(f"Generation options: {kwargs}\n") def log_prompt(prompt: str): """Logs the prompt to console.""" - if CONFIG.prompt: + if PREFERENCES.prompt: formatted_prompt = "\n" + prompt logger.info(f"Prompt: {formatted_prompt if prompt else 'Empty'}\n") def log_response(response: str): """Logs the response to console.""" - if CONFIG.prompt: + if PREFERENCES.prompt: formatted_response = "\n" + response logger.info(f"Response: {formatted_response if response else 'Empty'}\n") diff --git a/main.py b/main.py index 6d5d304..60589a2 100644 --- a/main.py +++ b/main.py @@ -108,7 +108,7 @@ async def get_current_model(): cache_mode="FP8" if MODEL_CONTAINER.cache_fp8 else "FP16", prompt_template=prompt_template.name if prompt_template else None, ), - logging=gen_logging.CONFIG, + logging=gen_logging.PREFERENCES, ) if MODEL_CONTAINER.draft_config: