mirror of
https://github.com/tcsenpai/agenticSeek.git
synced 2025-06-06 11:05:26 +00:00
fix : server problem
This commit is contained in:
parent
a4cfa9c651
commit
ca50b1f2d0
@ -11,6 +11,7 @@ log = logging.getLogger('werkzeug')
|
||||
log.setLevel(logging.ERROR)
|
||||
|
||||
parser = argparse.ArgumentParser(description='AgenticSeek server script')
|
||||
parser.add_argument('--provider', type=str, help='LLM backend library to use. set to [ollama] or [llamacpp]', required=True)
|
||||
args = parser.parse_args()
|
||||
|
||||
app = Flask(__name__)
|
||||
@ -31,14 +32,13 @@ def start_generation():
|
||||
def setup():
|
||||
data = request.get_json()
|
||||
model = data.get('model', None)
|
||||
provider = data.get('provider', None)
|
||||
if provider is not None and generator is None:
|
||||
if provider == "ollama":
|
||||
generator = OllamaLLM()
|
||||
elif provider == "llamacpp":
|
||||
generator = LlamacppLLM()
|
||||
else:
|
||||
return jsonify({"error": "Provider not supported
|
||||
provider = args.provider
|
||||
if provider == "ollama":
|
||||
generator = OllamaLLM()
|
||||
elif provider == "llamacpp":
|
||||
generator = LlamacppLLM()
|
||||
else:
|
||||
raise ValueError(f"Provider {provider} does not exists. see --help for more information")
|
||||
if model is None:
|
||||
return jsonify({"error": "Model not provided"}), 400
|
||||
generator.set_model(model)
|
||||
|
@ -114,7 +114,7 @@ class Provider:
|
||||
raise Exception(f"Server is offline at {self.server_ip}")
|
||||
|
||||
try:
|
||||
requests.post(route_setup, json={"model": self.model, "provider": self.provider_name})
|
||||
requests.post(route_setup, json={"model": self.model})
|
||||
requests.post(route_gen, json={"messages": history})
|
||||
is_complete = False
|
||||
while not is_complete:
|
||||
|
Loading…
x
Reference in New Issue
Block a user