From 5c9ada9468b4d8b663d611776f62f61d5de12961 Mon Sep 17 00:00:00 2001 From: martin legrand Date: Sat, 29 Mar 2025 13:55:01 +0100 Subject: [PATCH] fix : bug --- server/install.sh | 5 +++++ server/sources/llamacpp.py | 2 +- sources/llm_provider.py | 1 - 3 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 server/install.sh diff --git a/server/install.sh b/server/install.sh new file mode 100644 index 0000000..97658be --- /dev/null +++ b/server/install.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +pip3 install --upgrade packaging +pip3 install --upgrade pip setuptools +pip3 install -r requirements.txt \ No newline at end of file diff --git a/server/sources/llamacpp.py b/server/sources/llamacpp.py index 1e4aede..49e4ceb 100644 --- a/server/sources/llamacpp.py +++ b/server/sources/llamacpp.py @@ -15,7 +15,7 @@ class LlamacppLLM(GeneratorLLM): if self.llm is None: self.llm = Llama.from_pretrained( repo_id=self.model, - filename="*q8_0.gguf", + filename="*Q8_0.gguf", verbose=True ) return diff --git a/sources/llm_provider.py b/sources/llm_provider.py index 4dc3649..fffde94 100644 --- a/sources/llm_provider.py +++ b/sources/llm_provider.py @@ -115,7 +115,6 @@ class Provider: try: requests.post(route_setup, json={"model": self.model}) - pretty_print("Setting up server...", color="status") requests.post(route_gen, json={"messages": history}) is_complete = False while not is_complete: