fix : bug from weird ollama behavior

This commit is contained in:
martin legrand 2025-03-30 12:00:20 +02:00
parent 61d83e6614
commit 6b31516cd9

View File

@ -26,6 +26,9 @@ class OllamaLLM(GeneratorLLM):
) )
for chunk in stream: for chunk in stream:
if type(chunk) != dict:
self.logger.error(f"Error: chunk not a dict")
continue
content = chunk['message']['content'] content = chunk['message']['content']
if '.' in content: if '.' in content:
self.logger.info(self.state.current_buffer) self.logger.info(self.state.current_buffer)