Files
tabbyAPI/backends/exllamav2/utils.py
kingbri 6eeb62b82c Requirements: Update exllamav2, torch, and FA2
Torch to 2.2, exllamav2 to 0.0.13, FA2 to 2.4.2 on Windows and 2.5.2
on Linux.

Signed-off-by: kingbri <bdashore3@proton.me>
2024-02-02 23:53:42 -05:00

32 lines
1.1 KiB
Python

from packaging import version
from importlib.metadata import version as package_version
from common.logger import init_logger
logger = init_logger(__name__)
def check_exllama_version():
"""Verifies the exllama version"""
required_version = "0.0.13"
current_version = package_version("exllamav2").split("+")[0]
if version.parse(current_version) < version.parse(required_version):
raise SystemExit(
f"ERROR: TabbyAPI requires ExLlamaV2 {required_version} "
f"or greater. Your current version is {current_version}.\n"
"Please upgrade your environment by running a start script "
"(start.bat or start.sh)\n\n"
"Or you can manually run a requirements update "
"using the following command:\n\n"
"For CUDA 12.1:\n"
"pip install --upgrade -r requirements.txt\n\n"
"For CUDA 11.8:\n"
"pip install --upgrade -r requirements-cu118.txt\n\n"
"For ROCm:\n"
"pip install --upgrade -r requirements-amd.txt\n\n"
)
else:
logger.info(f"ExllamaV2 version: {current_version}")