From fd836a08f7cfb27a91f0707d0979afaa38698aa9 Mon Sep 17 00:00:00 2001 From: Josh XT Date: Thu, 11 Jul 2024 13:17:50 -0400 Subject: [PATCH] update llamacpp and diffusers --- cuda-requirements.txt | 2 +- cuda.Dockerfile | 2 +- requirements.txt | 3 ++- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/cuda-requirements.txt b/cuda-requirements.txt index 717b71b..008b57f 100644 --- a/cuda-requirements.txt +++ b/cuda-requirements.txt @@ -6,7 +6,7 @@ requests==2.32.0 tiktoken==0.6.0 python-dotenv==1.0.1 beautifulsoup4==4.12.3 -faster-whisper==1.0.2 +faster-whisper==1.0.3 pydub==0.25.1 ffmpeg-python==0.2.0 torch==2.3.1+cu121 diff --git a/cuda.Dockerfile b/cuda.Dockerfile index 97c2d92..242361d 100644 --- a/cuda.Dockerfile +++ b/cuda.Dockerfile @@ -13,7 +13,7 @@ ENV HOST=0.0.0.0 \ LLAMA_CUBLAS=1 COPY cuda-requirements.txt . RUN python3 -m pip install --upgrade pip cmake scikit-build setuptools wheel --no-cache-dir && \ - CMAKE_ARGS="-DLLAMA_CUDA=on" FORCE_CMAKE=1 pip install llama-cpp-python==0.2.79 --no-cache-dir && \ + CMAKE_ARGS="-DLLAMA_CUDA=on" FORCE_CMAKE=1 pip install llama-cpp-python==0.2.82 --no-cache-dir && \ pip install --no-cache-dir -r cuda-requirements.txt RUN git clone https://github.com/Josh-XT/DeepSeek-VL deepseek && \ cd deepseek && \ diff --git a/requirements.txt b/requirements.txt index c68c9d5..5e4933e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -24,4 +24,5 @@ openai pdfplumber optimum onnx -llama-cpp-python==0.2.79 \ No newline at end of file +diffusers[torch] +llama-cpp-python==0.2.82 \ No newline at end of file