From 3c66eb646e31aa26d7898aa8fe471549d8fe8449 Mon Sep 17 00:00:00 2001 From: martin legrand Date: Tue, 8 Apr 2025 19:48:18 +0200 Subject: [PATCH] fix : vllm Nonetype --- server/app.py | 4 ++-- server/sources/vllm_handler.py | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/server/app.py b/server/app.py index b46a314..13817b9 100644 --- a/server/app.py +++ b/server/app.py @@ -9,13 +9,13 @@ from sources.ollama_handler import OllamaLLM from sources.vllm_handler import Vllm parser = argparse.ArgumentParser(description='AgenticSeek server script') -parser.add_argument('--provider', type=str, help='LLM backend library to use. set to [ollama] or [llamacpp]', required=True) +parser.add_argument('--provider', type=str, help='LLM backend library to use. set to [ollama], [vllm] or [llamacpp]', required=True) parser.add_argument('--port', type=int, help='port to use', required=True) args = parser.parse_args() app = Flask(__name__) -assert args.provider in ["ollama", "llamacpp"], f"Provider {args.provider} does not exists. see --help for more information" +assert args.provider in ["ollama", "llamacpp", "vllm"], f"Provider {args.provider} does not exists. see --help for more information" handler_map = { "ollama": OllamaLLM(), diff --git a/server/sources/vllm_handler.py b/server/sources/vllm_handler.py index c3479a3..6fd122b 100644 --- a/server/sources/vllm_handler.py +++ b/server/sources/vllm_handler.py @@ -10,7 +10,7 @@ class Vllm(GeneratorLLM): """ super().__init__() self.logger = logging.getLogger(__name__) - self.llm = LLM(model=self.model) + self.llm = None def convert_history_to_prompt(self, history: List[Dict[str, str]]) -> str: """ @@ -37,6 +37,8 @@ class Vllm(GeneratorLLM): history: List of dictionaries in OpenAI format [{"role": "user", "content": "..."}, ...] """ self.logger.info(f"Using {self.model} for generation with vLLM") + if self.llm is None: + self.llm = LLM(model=self.model) try: with self.state.lock: