From 0c3a07f208880188a2f0cf2011b9479cc7bcae33 Mon Sep 17 00:00:00 2001 From: martin legrand Date: Sat, 29 Mar 2025 13:32:46 +0100 Subject: [PATCH] fix : server problem with llamacpp --- server/sources/llamacpp.py | 1 + 1 file changed, 1 insertion(+) diff --git a/server/sources/llamacpp.py b/server/sources/llamacpp.py index 76252ec..d1625ad 100644 --- a/server/sources/llamacpp.py +++ b/server/sources/llamacpp.py @@ -18,6 +18,7 @@ class LlamacppLLM(GeneratorLLM): filename="*q8_0.gguf", verbose=True ) + return self.logger.info(f"Using {self.model} for generation with Llama.cpp") self.llm.create_chat_completion( messages = history