From 9812d57c1196856f9f6c526d0ad0191cad403f6b Mon Sep 17 00:00:00 2001 From: SkqLiao Date: Sat, 15 Mar 2025 02:31:49 +0800 Subject: [PATCH] fix typo, logging to file --- .github/workflows/install.yml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/install.yml b/.github/workflows/install.yml index d8f199a..c16dda8 100644 --- a/.github/workflows/install.yml +++ b/.github/workflows/install.yml @@ -60,6 +60,11 @@ jobs: export LD_LIBRARY_PATH=/usr/local/cuda-12.4/lib64:$LD_LIBRARY_PATH export CUDA_HOME=/usr/local/cuda-12.4 cd ${{ github.workspace }} - python ktransformers/local_chat.py --model_path /home/qujing3/models/DeepSeek-R1-Q4_K_M/config --gguf_path /home/qujing3/models/DeepSeek-R1-Q4_K_M/ --max_new_tokens 100 --cache_len 1536 --cpu_infer 64 --prompt_file /home/qujing3/promptsbook.txt - DeepSeek-R1-Q4_K_M/config --gguf_path /home/qujing3/models/DeepSeek-R1-Q4_K_M/ --max_new_tokens 100 --cache_len 1536 --cpu_infer 64 --prompt_file /home/qujing3/prompts/chinese.txt + echo "Running Local Chat 1" + python ktransformers/local_chat.py --model_path /home/qujing3/models/DeepSeek-R1-Q4_K_M/config --gguf_path /home/qujing3/models/DeepSeek-R1-Q4_K_M/ --max_new_tokens 256 --cache_len 1536 --cpu_infer 64 --prompt_file /home/qujing3/prompts/book.txt > log1.txt + sed -n '/Prompt:,$p' log1.txt + echo "Running Local Chat 2" + python ktransformers/local_chat.py --model_path /home/qujing3/models/DeepSeek-R1-Q4_K_M/config --gguf_path /home/qujing3/models/DeepSeek-R1-Q4_K_M/ --max_new_tokens 256 --cache_len 1536 --cpu_infer 64 --prompt_file /home/qujing3/prompts/chinese.txt > log2.txt + sed -n '/Prompt:,$p' log2.txt + - run: echo "This job's status is ${{ job.status }}."