fix : llamacpp handler problem again

This commit is contained in:
martin legrand 2025-03-29 19:47:44 +01:00
parent 5862c98f3e
commit 1dd8e0a016

View File

@ -29,7 +29,7 @@ class LlamacppLLM(GeneratorLLM):
messages = history messages = history
) )
with self.state.lock: with self.state.lock:
self.state.current_buffer = output['choices'][0]['content'] self.state.current_buffer = output['choices'][0]['message']['content']
except Exception as e: except Exception as e:
self.logger.error(f"Error: {e}") self.logger.error(f"Error: {e}")
finally: finally: