Files
ktransformers/.github/workflows/install.yml
2025-03-15 03:11:26 +08:00

76 lines
3.5 KiB
YAML

name: Install / Test KTransformers
run-name: Install / Test KTransformers
on:
workflow_dispatch:
inputs:
job_to_run:
description: "Which job to run?"
required: true
default: "test"
type: choice
options:
- create-install-test
- install-test
- test
jobs:
Install-Test-KTransformers:
runs-on: self-hosted
steps:
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
- run: echo "🔎 The name of your branch is ${{ github.ref }} and your repository is ${{ github.repository }}."
- name: Check out repository code
uses: actions/checkout@v4
- run: echo "💡 The ${{ github.repository }} repository has been cloned to the runner."
- name: Remove old conda environment
continue-on-error: true
if: contains(inputs.job_to_run, 'create')
run: |
source /home/qujing3/anaconda3/etc/profile.d/conda.sh
conda env remove --name ktransformers-dev -y
- name: Create conda environment
if: contains(inputs.job_to_run, 'create')
run: |
source /home/qujing3/anaconda3/etc/profile.d/conda.sh
conda create --name ktransformers-dev python=3.11
conda activate ktransformers-dev
conda install -c conda-forge libstdcxx-ng -y
- name: Install dependencies
if: contains(inputs.job_to_run, 'create')
run: |
source /home/qujing3/anaconda3/etc/profile.d/conda.sh
conda activate ktransformers-dev
pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu126
pip3 install packaging ninja cpufeature numpy
pip install ~/flash_attn-2.7.4.post1+cu12torch2.6cxx11abiTRUE-cp311-cp311-linux_x86_64.whl
- name: Install KTransformers
if: contains(inputs.job_to_run, 'install')
run: |
source /home/qujing3/anaconda3/etc/profile.d/conda.sh
conda activate ktransformers-dev
pip3 uninstall ktransformers -y
cd ${{ github.workspace }}
git submodule init
git submodule update
bash install.sh
- name: Test Local Chat
env:
TERM: xterm-256color
run: |
set -e
source /home/qujing3/anaconda3/etc/profile.d/conda.sh
conda activate ktransformers-dev
export PATH=/usr/local/cuda-12.4/bin:$PATH
export LD_LIBRARY_PATH=/usr/local/cuda-12.4/lib64:$LD_LIBRARY_PATH
export CUDA_HOME=/usr/local/cuda-12.4
cd ${{ github.workspace }}
echo "Running Local Chat 1...(book.txt)"
python ktransformers/local_chat_test.py --model_path /home/qujing3/models/DeepSeek-R1-Q4_K_M/config --gguf_path /home/qujing3/models/DeepSeek-R1-Q4_K_M/ --max_new_tokens 256 --cache_len 1536 --cpu_infer 64 --prompt_file /home/qujing3/prompts/book.txt > log1.txt
output=$(awk '/Prompt:/ {found=1} found' log1.txt) || exit_code=$?
echo "$output"
echo "Running Local Chat 2...(chinese.txt)"
python ktransformers/local_chat_test.py --model_path /home/qujing3/models/DeepSeek-R1-Q4_K_M/config --gguf_path /home/qujing3/models/DeepSeek-R1-Q4_K_M/ --max_new_tokens 256 --cache_len 1536 --cpu_infer 64 --prompt_file /home/qujing3/prompts/chinese.txt > log2.txt
output=$(awk '/Prompt:/ {found=1} found' log2.txt) || exit_code=$?
echo "$output"
- run: echo "This job's status is ${{ job.status }}."