From 69f276955ad0ce76db0f070aa66790d5815b497e Mon Sep 17 00:00:00 2001 From: martin legrand Date: Sun, 30 Mar 2025 12:04:06 +0200 Subject: [PATCH] fix: ollama server issue --- server/sources/ollama_handler.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/server/sources/ollama_handler.py b/server/sources/ollama_handler.py index 2ff130a..f9028a1 100644 --- a/server/sources/ollama_handler.py +++ b/server/sources/ollama_handler.py @@ -24,18 +24,14 @@ class OllamaLLM(GeneratorLLM): messages=history, stream=True, ) - for chunk in stream: - if type(chunk) != dict: - self.logger.error(f"Error: chunk not a dict") - continue content = chunk['message']['content'] - if '.' in content: - self.logger.info(self.state.current_buffer) - self.state.last_complete_sentence = self.state.current_buffer + if '\n' in content: + self.logger.info(content) with self.state.lock: self.state.current_buffer += content + except Exception as e: if "404" in str(e): self.logger.info(f"Downloading {self.model}...")