diff --git a/server/app.py b/server/app.py index a95fbfb..85abcc8 100644 --- a/server/app.py +++ b/server/app.py @@ -11,6 +11,7 @@ log = logging.getLogger('werkzeug') log.setLevel(logging.ERROR) parser = argparse.ArgumentParser(description='AgenticSeek server script') +parser.add_argument('--provider', type=str, help='LLM backend library to use. set to [ollama] or [llamacpp]', required=True) args = parser.parse_args() app = Flask(__name__) @@ -31,14 +32,13 @@ def start_generation(): def setup(): data = request.get_json() model = data.get('model', None) - provider = data.get('provider', None) - if provider is not None and generator is None: - if provider == "ollama": - generator = OllamaLLM() - elif provider == "llamacpp": - generator = LlamacppLLM() - else: - return jsonify({"error": "Provider not supported + provider = args.provider + if provider == "ollama": + generator = OllamaLLM() + elif provider == "llamacpp": + generator = LlamacppLLM() + else: + raise ValueError(f"Provider {provider} does not exists. see --help for more information") if model is None: return jsonify({"error": "Model not provided"}), 400 generator.set_model(model) diff --git a/sources/llm_provider.py b/sources/llm_provider.py index e34109c..cd1c1f9 100644 --- a/sources/llm_provider.py +++ b/sources/llm_provider.py @@ -114,7 +114,7 @@ class Provider: raise Exception(f"Server is offline at {self.server_ip}") try: - requests.post(route_setup, json={"model": self.model, "provider": self.provider_name}) + requests.post(route_setup, json={"model": self.model}) requests.post(route_gen, json={"messages": history}) is_complete = False while not is_complete: