mirror of
https://github.com/theroyallab/tabbyAPI.git
synced 2026-04-28 02:01:24 +00:00
@@ -8,6 +8,7 @@ logger = init_logger(__name__)
|
|||||||
|
|
||||||
GLOBAL_CONFIG: dict = {}
|
GLOBAL_CONFIG: dict = {}
|
||||||
|
|
||||||
|
|
||||||
def read_config_from_file(config_path: pathlib.Path):
|
def read_config_from_file(config_path: pathlib.Path):
|
||||||
"""Sets the global config from a given file path"""
|
"""Sets the global config from a given file path"""
|
||||||
global GLOBAL_CONFIG
|
global GLOBAL_CONFIG
|
||||||
@@ -23,24 +24,29 @@ def read_config_from_file(config_path: pathlib.Path):
|
|||||||
)
|
)
|
||||||
GLOBAL_CONFIG = {}
|
GLOBAL_CONFIG = {}
|
||||||
|
|
||||||
|
|
||||||
def get_model_config():
|
def get_model_config():
|
||||||
"""Returns the model config from the global config"""
|
"""Returns the model config from the global config"""
|
||||||
return unwrap(GLOBAL_CONFIG.get("model"), {})
|
return unwrap(GLOBAL_CONFIG.get("model"), {})
|
||||||
|
|
||||||
|
|
||||||
def get_draft_model_config():
|
def get_draft_model_config():
|
||||||
"""Returns the draft model config from the global config"""
|
"""Returns the draft model config from the global config"""
|
||||||
model_config = unwrap(GLOBAL_CONFIG.get("model"), {})
|
model_config = unwrap(GLOBAL_CONFIG.get("model"), {})
|
||||||
return unwrap(model_config.get("draft"), {})
|
return unwrap(model_config.get("draft"), {})
|
||||||
|
|
||||||
|
|
||||||
def get_lora_config():
|
def get_lora_config():
|
||||||
"""Returns the lora config from the global config"""
|
"""Returns the lora config from the global config"""
|
||||||
model_config = unwrap(GLOBAL_CONFIG.get("model"), {})
|
model_config = unwrap(GLOBAL_CONFIG.get("model"), {})
|
||||||
return unwrap(model_config.get("lora"), {})
|
return unwrap(model_config.get("lora"), {})
|
||||||
|
|
||||||
|
|
||||||
def get_network_config():
|
def get_network_config():
|
||||||
"""Returns the network config from the global config"""
|
"""Returns the network config from the global config"""
|
||||||
return unwrap(GLOBAL_CONFIG.get("network"), {})
|
return unwrap(GLOBAL_CONFIG.get("network"), {})
|
||||||
|
|
||||||
|
|
||||||
def get_gen_logging_config():
|
def get_gen_logging_config():
|
||||||
"""Returns the generation logging config from the global config"""
|
"""Returns the generation logging config from the global config"""
|
||||||
return unwrap(GLOBAL_CONFIG.get("logging"), {})
|
return unwrap(GLOBAL_CONFIG.get("logging"), {})
|
||||||
|
|||||||
2
main.py
2
main.py
@@ -19,7 +19,7 @@ from config import (
|
|||||||
get_model_config,
|
get_model_config,
|
||||||
get_draft_model_config,
|
get_draft_model_config,
|
||||||
get_lora_config,
|
get_lora_config,
|
||||||
get_network_config
|
get_network_config,
|
||||||
)
|
)
|
||||||
from generators import call_with_semaphore, generate_with_semaphore
|
from generators import call_with_semaphore, generate_with_semaphore
|
||||||
from model import ModelContainer
|
from model import ModelContainer
|
||||||
|
|||||||
4
model.py
4
model.py
@@ -523,9 +523,7 @@ class ModelContainer:
|
|||||||
"installed ExLlamaV2 version."
|
"installed ExLlamaV2 version."
|
||||||
)
|
)
|
||||||
|
|
||||||
if (unwrap(kwargs.get("top_a"), False)) and not hasattr (
|
if (unwrap(kwargs.get("top_a"), False)) and not hasattr(gen_settings, "top_a"):
|
||||||
gen_settings, "top_a"
|
|
||||||
):
|
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Top-A is not supported by the currently "
|
"Top-A is not supported by the currently "
|
||||||
"installed ExLlamaV2 version."
|
"installed ExLlamaV2 version."
|
||||||
|
|||||||
Reference in New Issue
Block a user