From bb8f61924a297c939e2daaa5d721c477c3804b3c Mon Sep 17 00:00:00 2001 From: "Jonathan C. McKinney" Date: Sun, 25 Aug 2024 13:18:54 -0700 Subject: [PATCH] For Issue #1812 --- docs/linux_install.sh | 2 +- src/version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/linux_install.sh b/docs/linux_install.sh index aa4c5e47d..b92430a7f 100755 --- a/docs/linux_install.sh +++ b/docs/linux_install.sh @@ -199,7 +199,7 @@ if [[ "${PIP_EXTRA_INDEX_URL}" == *"cu118"* ]]; then pip install auto-gptq==0.7.1 --extra-index-url https://huggingface.github.io/autogptq-index/whl/cu118/ echo "cuda118 for awq, see: https://github.com/casper-hansen/AutoAWQ_kernels/releases/" -else +elif [[ -v CUDA_HOME ]]; then #* GPU Optional: For exllama support on x86_64 linux #pip uninstall -y exllama ; pip install https://github.com/jllllll/exllama/releases/download/0.0.18/exllama-0.0.18+cu121-cp310-cp310-linux_x86_64.whl --no-cache-dir -c reqs_optional/reqs_constraints.txt # See [exllama](README_GPU.md#exllama) about running exllama models. diff --git a/src/version.py b/src/version.py index 905ad060a..4a60edd51 100644 --- a/src/version.py +++ b/src/version.py @@ -1 +1 @@ -__version__ = "61f362b3d7cf90f25a3f2787f4da8bc6297ff5ee" +__version__ = "4e2b2421998b1f7cd0a93a2b579fdb41cfaa5c90"