mirror of
https://github.com/theroyallab/tabbyAPI.git
synced 2026-05-11 08:20:08 +00:00
Dependencies: Pin correct xformers version torch 2.9
This commit is contained in:
@@ -165,7 +165,7 @@ These are short-form instructions for other methods that users can use to instal
|
||||
> [!NOTE]
|
||||
> If you are planning to use custom versions of dependencies such as dev ExllamaV2, make sure to build the Docker image yourself!
|
||||
|
||||
1. Install Docker and docker compose from the [docs](https://docs.docker.com/compose/install/
|
||||
1. Install Docker and docker compose from the [docs](https://docs.docker.com/compose/install/)
|
||||
2. Install the Nvidia container compatibility layer
|
||||
1. For Linux: [Nvidia container toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html)
|
||||
2. For Windows: [Cuda Toolkit on WSL](https://docs.nvidia.com/cuda/wsl-user-guide/index.html)
|
||||
|
||||
@@ -72,8 +72,9 @@ cu12 = [
|
||||
"triton ; platform_system == 'Linux' and platform_machine == 'x86_64'",
|
||||
"triton-windows ; platform_system == 'Windows'",
|
||||
|
||||
# xformers
|
||||
"xformers",
|
||||
# xformers (cu128 wheels from the PyTorch index)
|
||||
"xformers @ https://download.pytorch.org/whl/cu128/xformers-0.0.33-cp39-abi3-manylinux_2_28_x86_64.whl ; platform_system == 'Linux' and platform_machine == 'x86_64'",
|
||||
"xformers @ https://download.pytorch.org/whl/cu128/xformers-0.0.33-cp39-abi3-win_amd64.whl ; platform_system == 'Windows'",
|
||||
|
||||
# Exl2
|
||||
"exllamav2 @ https://github.com/turboderp-org/exllamav2/releases/download/v0.3.2/exllamav2-0.3.2+cu128.torch2.9.0-cp313-cp313-win_amd64.whl ; platform_system == 'Windows' and python_version == '3.13'",
|
||||
|
||||
Reference in New Issue
Block a user