mirror of
https://github.com/theroyallab/tabbyAPI.git
synced 2026-04-28 10:11:39 +00:00
Dependencies: Remove requirements-*.txt files
Pyproject.toml replaces these files. Signed-off-by: kingbri <bdashore3@proton.me>
This commit is contained in:
@@ -1,17 +0,0 @@
|
||||
# Torch
|
||||
--extra-index-url https://download.pytorch.org/whl/rocm5.6
|
||||
torch ~= 2.2
|
||||
|
||||
# Exllamav2
|
||||
https://github.com/turboderp/exllamav2/releases/download/v0.0.15/exllamav2-0.0.15+rocm5.6-cp311-cp311-linux_x86_64.whl; python_version == "3.11"
|
||||
https://github.com/turboderp/exllamav2/releases/download/v0.0.15/exllamav2-0.0.15+rocm5.6-cp310-cp310-linux_x86_64.whl; python_version == "3.10"
|
||||
|
||||
# Pip dependencies
|
||||
fastapi
|
||||
pydantic >= 2.0.0
|
||||
PyYAML
|
||||
rich
|
||||
uvicorn
|
||||
jinja2 >= 3.0.0
|
||||
loguru
|
||||
sse-starlette
|
||||
@@ -1,27 +0,0 @@
|
||||
# Torch
|
||||
--extra-index-url https://download.pytorch.org/whl/cu118
|
||||
torch ~= 2.2
|
||||
|
||||
# Exllamav2
|
||||
|
||||
# Windows
|
||||
https://github.com/turboderp/exllamav2/releases/download/v0.0.15/exllamav2-0.0.15+cu121-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
|
||||
https://github.com/turboderp/exllamav2/releases/download/v0.0.15/exllamav2-0.0.15+cu118-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
|
||||
|
||||
# Linux
|
||||
https://github.com/turboderp/exllamav2/releases/download/v0.0.15/exllamav2-0.0.15+cu118-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
|
||||
https://github.com/turboderp/exllamav2/releases/download/v0.0.15/exllamav2-0.0.15+cu118-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
|
||||
|
||||
# Pip dependencies
|
||||
fastapi
|
||||
pydantic >= 2.0.0
|
||||
PyYAML
|
||||
rich
|
||||
uvicorn
|
||||
jinja2 >= 3.0.0
|
||||
loguru
|
||||
sse-starlette
|
||||
|
||||
# Linux FA2 from https://github.com/Dao-AILab/flash-attention/releases
|
||||
https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.2/flash_attn-2.5.2+cu118torch2.2cxx11abiFALSE-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
|
||||
https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.2/flash_attn-2.5.2+cu118torch2.2cxx11abiFALSE-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
|
||||
@@ -1,15 +0,0 @@
|
||||
# formatting
|
||||
ruff==0.3.2
|
||||
|
||||
## Implement below dependencies when support is added
|
||||
|
||||
# type checking
|
||||
# mypy==0.991
|
||||
# types-PyYAML
|
||||
# types-requests
|
||||
# types-setuptools
|
||||
|
||||
# testing
|
||||
# pytest
|
||||
# pytest-forked
|
||||
# pytest-asyncio
|
||||
@@ -1,9 +0,0 @@
|
||||
# Pip dependencies
|
||||
fastapi
|
||||
pydantic >= 2.0.0
|
||||
PyYAML
|
||||
rich
|
||||
uvicorn
|
||||
jinja2 >= 3.0.0
|
||||
loguru
|
||||
sse-starlette
|
||||
@@ -1,33 +0,0 @@
|
||||
# Torch
|
||||
--extra-index-url https://download.pytorch.org/whl/cu121
|
||||
torch ~= 2.2
|
||||
|
||||
# Exllamav2
|
||||
|
||||
# Windows
|
||||
https://github.com/turboderp/exllamav2/releases/download/v0.0.15/exllamav2-0.0.15+cu121-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
|
||||
https://github.com/turboderp/exllamav2/releases/download/v0.0.15/exllamav2-0.0.15+cu121-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
|
||||
|
||||
# Linux
|
||||
https://github.com/turboderp/exllamav2/releases/download/v0.0.15/exllamav2-0.0.15+cu121-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
|
||||
https://github.com/turboderp/exllamav2/releases/download/v0.0.15/exllamav2-0.0.15+cu121-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
|
||||
|
||||
# Pip dependencies
|
||||
fastapi
|
||||
pydantic >= 2.0.0
|
||||
PyYAML
|
||||
rich
|
||||
uvicorn
|
||||
jinja2 >= 3.0.0
|
||||
loguru
|
||||
sse-starlette
|
||||
|
||||
# Flash attention v2
|
||||
|
||||
# Windows FA2 from https://github.com/bdashore3/flash-attention/releases
|
||||
https://github.com/bdashore3/flash-attention/releases/download/v2.5.2/flash_attn-2.5.2+cu122torch2.2.0cxx11abiFALSE-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
|
||||
https://github.com/bdashore3/flash-attention/releases/download/v2.5.2/flash_attn-2.5.2+cu122torch2.2.0cxx11abiFALSE-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
|
||||
|
||||
# Linux FA2 from https://github.com/Dao-AILab/flash-attention/releases
|
||||
https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.2/flash_attn-2.5.2+cu122torch2.2cxx11abiFALSE-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
|
||||
https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.2/flash_attn-2.5.2+cu122torch2.2cxx11abiFALSE-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
|
||||
Reference in New Issue
Block a user