mirror of
https://github.com/tcsenpai/agenticSeek.git
synced 2025-06-08 12:05:27 +00:00
fix : server problem
This commit is contained in:
parent
a4cfa9c651
commit
ca50b1f2d0
@ -11,6 +11,7 @@ log = logging.getLogger('werkzeug')
|
|||||||
log.setLevel(logging.ERROR)
|
log.setLevel(logging.ERROR)
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='AgenticSeek server script')
|
parser = argparse.ArgumentParser(description='AgenticSeek server script')
|
||||||
|
parser.add_argument('--provider', type=str, help='LLM backend library to use. set to [ollama] or [llamacpp]', required=True)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
@ -31,14 +32,13 @@ def start_generation():
|
|||||||
def setup():
|
def setup():
|
||||||
data = request.get_json()
|
data = request.get_json()
|
||||||
model = data.get('model', None)
|
model = data.get('model', None)
|
||||||
provider = data.get('provider', None)
|
provider = args.provider
|
||||||
if provider is not None and generator is None:
|
|
||||||
if provider == "ollama":
|
if provider == "ollama":
|
||||||
generator = OllamaLLM()
|
generator = OllamaLLM()
|
||||||
elif provider == "llamacpp":
|
elif provider == "llamacpp":
|
||||||
generator = LlamacppLLM()
|
generator = LlamacppLLM()
|
||||||
else:
|
else:
|
||||||
return jsonify({"error": "Provider not supported
|
raise ValueError(f"Provider {provider} does not exists. see --help for more information")
|
||||||
if model is None:
|
if model is None:
|
||||||
return jsonify({"error": "Model not provided"}), 400
|
return jsonify({"error": "Model not provided"}), 400
|
||||||
generator.set_model(model)
|
generator.set_model(model)
|
||||||
|
@ -114,7 +114,7 @@ class Provider:
|
|||||||
raise Exception(f"Server is offline at {self.server_ip}")
|
raise Exception(f"Server is offline at {self.server_ip}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
requests.post(route_setup, json={"model": self.model, "provider": self.provider_name})
|
requests.post(route_setup, json={"model": self.model})
|
||||||
requests.post(route_gen, json={"messages": history})
|
requests.post(route_gen, json={"messages": history})
|
||||||
is_complete = False
|
is_complete = False
|
||||||
while not is_complete:
|
while not is_complete:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user