diff --git a/config_sample.yml b/config_sample.yml index dd448f7..dc36241 100644 --- a/config_sample.yml +++ b/config_sample.yml @@ -23,6 +23,10 @@ model: # A model can be loaded later via the API. model_name: A model name + # Sends dummy model names when the models endpoint is queried + # Enable this if the program is looking for a specific OAI model + use_dummy_models: False + # The below parameters apply only if model_name is set # Maximum model context length (default: 4096) diff --git a/main.py b/main.py index 8c9b776..c817027 100644 --- a/main.py +++ b/main.py @@ -60,6 +60,8 @@ async def list_models(): draft_model_dir = draft_config.get("draft_model_dir") models = get_model_list(model_path.resolve(), draft_model_dir) + if model_config.get("use_dummy_models") or False: + models.data.insert(0, ModelCard(id = "gpt-3.5-turbo")) return models