From a8d159771ef58dbee0384c52d4b47bffc684673a Mon Sep 17 00:00:00 2001 From: SkqLiao Date: Fri, 14 Mar 2025 23:09:37 +0800 Subject: [PATCH] fix flash_attn whl path --- .github/workflows/install.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/install.yml b/.github/workflows/install.yml index deb1db9..31d37bb 100644 --- a/.github/workflows/install.yml +++ b/.github/workflows/install.yml @@ -40,7 +40,8 @@ jobs: conda activate ktransformers-dev pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu126 pip3 install packaging ninja cpufeature numpy - pip install ~/flash_attn-2.7.4.post1+cu12torch2.6cxx11abiTRUE-cp311-cp311-linux_x86_64.whl + wget + pip install https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu12torch2.2cxx11abiTRUE-cp311-cp311-linux_x86_64.whl - name: Install KTransformers run: | source /home/qujing3/anaconda3/etc/profile.d/conda.sh