mirror of
https://github.com/tcsenpai/agenticSeek.git
synced 2025-06-05 02:25:27 +00:00
fix : vllm Nonetype
This commit is contained in:
parent
82cf54706b
commit
3c66eb646e
@ -9,13 +9,13 @@ from sources.ollama_handler import OllamaLLM
|
||||
from sources.vllm_handler import Vllm
|
||||
|
||||
parser = argparse.ArgumentParser(description='AgenticSeek server script')
|
||||
parser.add_argument('--provider', type=str, help='LLM backend library to use. set to [ollama] or [llamacpp]', required=True)
|
||||
parser.add_argument('--provider', type=str, help='LLM backend library to use. set to [ollama], [vllm] or [llamacpp]', required=True)
|
||||
parser.add_argument('--port', type=int, help='port to use', required=True)
|
||||
args = parser.parse_args()
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
assert args.provider in ["ollama", "llamacpp"], f"Provider {args.provider} does not exists. see --help for more information"
|
||||
assert args.provider in ["ollama", "llamacpp", "vllm"], f"Provider {args.provider} does not exists. see --help for more information"
|
||||
|
||||
handler_map = {
|
||||
"ollama": OllamaLLM(),
|
||||
|
@ -10,7 +10,7 @@ class Vllm(GeneratorLLM):
|
||||
"""
|
||||
super().__init__()
|
||||
self.logger = logging.getLogger(__name__)
|
||||
self.llm = LLM(model=self.model)
|
||||
self.llm = None
|
||||
|
||||
def convert_history_to_prompt(self, history: List[Dict[str, str]]) -> str:
|
||||
"""
|
||||
@ -37,6 +37,8 @@ class Vllm(GeneratorLLM):
|
||||
history: List of dictionaries in OpenAI format [{"role": "user", "content": "..."}, ...]
|
||||
"""
|
||||
self.logger.info(f"Using {self.model} for generation with vLLM")
|
||||
if self.llm is None:
|
||||
self.llm = LLM(model=self.model)
|
||||
|
||||
try:
|
||||
with self.state.lock:
|
||||
|
Loading…
x
Reference in New Issue
Block a user