Dependencies: Remove requirements-*.txt files

Pyproject.toml replaces these files.

Signed-off-by: kingbri <bdashore3@proton.me>
This commit is contained in:
kingbri
2024-03-20 00:47:53 -04:00
committed by Brian Dashore
parent 72b08624a3
commit 1059101b23
5 changed files with 0 additions and 101 deletions

View File

@@ -1,17 +0,0 @@
# Torch
--extra-index-url https://download.pytorch.org/whl/rocm5.6
torch ~= 2.2
# Exllamav2
https://github.com/turboderp/exllamav2/releases/download/v0.0.15/exllamav2-0.0.15+rocm5.6-cp311-cp311-linux_x86_64.whl; python_version == "3.11"
https://github.com/turboderp/exllamav2/releases/download/v0.0.15/exllamav2-0.0.15+rocm5.6-cp310-cp310-linux_x86_64.whl; python_version == "3.10"
# Pip dependencies
fastapi
pydantic >= 2.0.0
PyYAML
rich
uvicorn
jinja2 >= 3.0.0
loguru
sse-starlette

View File

@@ -1,27 +0,0 @@
# Torch
--extra-index-url https://download.pytorch.org/whl/cu118
torch ~= 2.2
# Exllamav2
# Windows
https://github.com/turboderp/exllamav2/releases/download/v0.0.15/exllamav2-0.0.15+cu121-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
https://github.com/turboderp/exllamav2/releases/download/v0.0.15/exllamav2-0.0.15+cu118-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
# Linux
https://github.com/turboderp/exllamav2/releases/download/v0.0.15/exllamav2-0.0.15+cu118-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
https://github.com/turboderp/exllamav2/releases/download/v0.0.15/exllamav2-0.0.15+cu118-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
# Pip dependencies
fastapi
pydantic >= 2.0.0
PyYAML
rich
uvicorn
jinja2 >= 3.0.0
loguru
sse-starlette
# Linux FA2 from https://github.com/Dao-AILab/flash-attention/releases
https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.2/flash_attn-2.5.2+cu118torch2.2cxx11abiFALSE-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.2/flash_attn-2.5.2+cu118torch2.2cxx11abiFALSE-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"

View File

@@ -1,15 +0,0 @@
# formatting
ruff==0.3.2
## Implement below dependencies when support is added
# type checking
# mypy==0.991
# types-PyYAML
# types-requests
# types-setuptools
# testing
# pytest
# pytest-forked
# pytest-asyncio

View File

@@ -1,9 +0,0 @@
# Pip dependencies
fastapi
pydantic >= 2.0.0
PyYAML
rich
uvicorn
jinja2 >= 3.0.0
loguru
sse-starlette

View File

@@ -1,33 +0,0 @@
# Torch
--extra-index-url https://download.pytorch.org/whl/cu121
torch ~= 2.2
# Exllamav2
# Windows
https://github.com/turboderp/exllamav2/releases/download/v0.0.15/exllamav2-0.0.15+cu121-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
https://github.com/turboderp/exllamav2/releases/download/v0.0.15/exllamav2-0.0.15+cu121-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
# Linux
https://github.com/turboderp/exllamav2/releases/download/v0.0.15/exllamav2-0.0.15+cu121-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
https://github.com/turboderp/exllamav2/releases/download/v0.0.15/exllamav2-0.0.15+cu121-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
# Pip dependencies
fastapi
pydantic >= 2.0.0
PyYAML
rich
uvicorn
jinja2 >= 3.0.0
loguru
sse-starlette
# Flash attention v2
# Windows FA2 from https://github.com/bdashore3/flash-attention/releases
https://github.com/bdashore3/flash-attention/releases/download/v2.5.2/flash_attn-2.5.2+cu122torch2.2.0cxx11abiFALSE-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
https://github.com/bdashore3/flash-attention/releases/download/v2.5.2/flash_attn-2.5.2+cu122torch2.2.0cxx11abiFALSE-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
# Linux FA2 from https://github.com/Dao-AILab/flash-attention/releases
https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.2/flash_attn-2.5.2+cu122torch2.2cxx11abiFALSE-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.2/flash_attn-2.5.2+cu122torch2.2cxx11abiFALSE-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"