diff --git a/server/sources/llamacpp.py b/server/sources/llamacpp.py index 76252ec..d1625ad 100644 --- a/server/sources/llamacpp.py +++ b/server/sources/llamacpp.py @@ -18,6 +18,7 @@ class LlamacppLLM(GeneratorLLM): filename="*q8_0.gguf", verbose=True ) + return self.logger.info(f"Using {self.model} for generation with Llama.cpp") self.llm.create_chat_completion( messages = history