fix : llamacpp handler problem

This commit is contained in:
martin legrand 2025-03-29 19:41:29 +01:00
parent ddb533a255
commit 5862c98f3e

View File

@ -29,7 +29,7 @@ class LlamacppLLM(GeneratorLLM):
messages = history
)
with self.state.lock:
self.state.current_buffer = output
self.state.current_buffer = output['choices'][0]['content']
except Exception as e:
self.logger.error(f"Error: {e}")
finally: