mirror of
https://github.com/theroyallab/tabbyAPI.git
synced 2026-04-25 16:59:09 +00:00
Dependencies: Update torch, exllamav2, and flash-attn
Torch - 2.6.0 ExllamaV2 - 0.2.8 Flash-attn - 2.7.4.post1 Cuda wheels are now 12.4 instead of 12.1, feature names need to be migrated over. Signed-off-by: kingbri <8082010+kingbri1@users.noreply.github.com>
This commit is contained in:
@@ -52,8 +52,6 @@ def supports_paged_attn():
|
||||
"using the following command:\n\n"
|
||||
"For CUDA 12.1:\n"
|
||||
"pip install --upgrade .[cu121]\n\n"
|
||||
"For CUDA 11.8:\n"
|
||||
"pip install --upgrade .[cu118]\n\n"
|
||||
"NOTE: Windows users must use CUDA 12.x to use flash-attn."
|
||||
)
|
||||
|
||||
|
||||
@@ -16,8 +16,6 @@ def check_exllama_version():
|
||||
"using the following command:\n\n"
|
||||
"For CUDA 12.1:\n"
|
||||
"pip install --upgrade .[cu121]\n\n"
|
||||
"For CUDA 11.8:\n"
|
||||
"pip install --upgrade .[cu118]\n\n"
|
||||
"For ROCm:\n"
|
||||
"pip install --upgrade .[amd]\n\n"
|
||||
)
|
||||
@@ -25,7 +23,7 @@ def check_exllama_version():
|
||||
if not dependencies.exllamav2:
|
||||
raise SystemExit(("Exllamav2 is not installed.\n" + install_message))
|
||||
|
||||
required_version = version.parse("0.2.3")
|
||||
required_version = version.parse("0.2.8")
|
||||
current_version = version.parse(package_version("exllamav2").split("+")[0])
|
||||
|
||||
unsupported_message = (
|
||||
|
||||
Reference in New Issue
Block a user