mirror of
https://github.com/tcsenpai/agenticSeek.git
synced 2025-06-06 11:05:26 +00:00
fix: server script
This commit is contained in:
parent
8c77f3eddb
commit
8dde9f19a4
@ -212,6 +212,8 @@ If you have a powerful computer or a server that you can use, but you want to us
|
||||
|
||||
### 1️⃣ **Set up and start the server scripts**
|
||||
|
||||
You need to have ollama installed on the server (We will integrate VLLM and llama.cpp soon).
|
||||
|
||||
On your "server" that will run the AI model, get the ip address
|
||||
|
||||
```sh
|
||||
@ -223,7 +225,7 @@ Note: For Windows or macOS, use ipconfig or ifconfig respectively to find the IP
|
||||
Clone the repository and then, run the script `stream_llm.py` in `server/`
|
||||
|
||||
```sh
|
||||
python3 server_ollama.py
|
||||
python3 server_ollama.py --model "deepseek-r1:32b"
|
||||
```
|
||||
|
||||
### 2️⃣ **Run it**
|
||||
|
@ -1,30 +0,0 @@
|
||||
{
|
||||
"model_name": "deepseek-r1:14b",
|
||||
"known_models": [
|
||||
"qwq:32b",
|
||||
"deepseek-r1:1.5b",
|
||||
"deepseek-r1:7b",
|
||||
"deepseek-r1:14b",
|
||||
"deepseek-r1:32b",
|
||||
"deepseek-r1:70b",
|
||||
"deepseek-r1:671b",
|
||||
"deepseek-coder:1.3b",
|
||||
"deepseek-coder:6.7b",
|
||||
"deepseek-coder:33b",
|
||||
"llama2-uncensored:7b",
|
||||
"llama2-uncensored:70b",
|
||||
"llama3.1:8b",
|
||||
"llama3.1:70b",
|
||||
"llama3.3:70b",
|
||||
"llama3:8b",
|
||||
"llama3:70b",
|
||||
"i4:14b",
|
||||
"mistral:7b",
|
||||
"mistral:70b",
|
||||
"mistral:33b",
|
||||
"qwen1:7b",
|
||||
"qwen1:14b",
|
||||
"qwen1:32b",
|
||||
"qwen1:70b"
|
||||
]
|
||||
}
|
@ -6,13 +6,18 @@ from flask import Flask, jsonify, request
|
||||
import threading
|
||||
import ollama
|
||||
import logging
|
||||
import argparse
|
||||
|
||||
log = logging.getLogger('werkzeug')
|
||||
log.setLevel(logging.ERROR)
|
||||
|
||||
parser = argparse.ArgumentParser(description='AgenticSeek server script')
|
||||
parser.add_argument('--model', type=str, help='Model to use. eg: deepseek-r1:14b', required=True)
|
||||
args = parser.parse_args()
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
model = 'deepseek-r1:14b'
|
||||
model = args.model
|
||||
|
||||
# Shared state with thread-safe locks
|
||||
class GenerationState:
|
Loading…
x
Reference in New Issue
Block a user