mirror of
https://github.com/tcsenpai/agenticSeek.git
synced 2025-06-03 01:30:11 +00:00
fix deployement script
This commit is contained in:
parent
3d1f42351f
commit
36b26b43c9
@ -27,10 +27,13 @@ RUN pip install --no-cache-dir -r requirements.txt
|
|||||||
COPY api.py .
|
COPY api.py .
|
||||||
COPY sources/ ./sources/
|
COPY sources/ ./sources/
|
||||||
COPY prompts/ ./prompts/
|
COPY prompts/ ./prompts/
|
||||||
|
COPY crx/ crx/
|
||||||
|
COPY llm_router/ llm_router/
|
||||||
|
COPY .env .
|
||||||
COPY config.ini .
|
COPY config.ini .
|
||||||
|
|
||||||
# Expose port
|
# Expose port
|
||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
|
|
||||||
# Run the application
|
# Run the application
|
||||||
CMD ["uvicorn", "api:api", "--host", "0.0.0.0", "--port", "8000"]
|
CMD ["python3", "api.py"]
|
@ -439,11 +439,14 @@ https://googlechromelabs.github.io/chrome-for-testing/
|
|||||||
|
|
||||||
## FAQ
|
## FAQ
|
||||||
|
|
||||||
**Q:我需要什么的硬体配备?**
|
**Q:我需要什么样的硬体配备?**
|
||||||
|
|
||||||
7B 型号:具有 8GB VRAM 的 GPU。
|
| 模型大小 | GPU | 说明 |
|
||||||
14B 型号:12GB GPU(例如 RTX 3060)。
|
|-----------|--------|-----------------------------------------------------------|
|
||||||
32B 型号:24GB+ VRAM。
|
| 7B | 8GB VRAM | ⚠️ 不推荐。性能较差,容易出现幻觉,规划代理可能会失败。 |
|
||||||
|
| 14B | 12GB VRAM(例如 RTX 3060) | ✅ 可用于简单任务。可能在网页浏览和规划任务上表现较弱。 |
|
||||||
|
| 32B | 24GB+ VRAM(例如 RTX 4090) | 🚀 能完成大多数任务,但在任务规划上可能仍有困难。 |
|
||||||
|
| 70B+ | 48GB+ VRAM(例如 Mac Studio) | 💪 表现优秀。推荐用于高级用例。 |
|
||||||
|
|
||||||
**Q:为什么选择 Deepseek R1 而不是其他模型?**
|
**Q:为什么选择 Deepseek R1 而不是其他模型?**
|
||||||
|
|
||||||
|
13
README_FR.md
13
README_FR.md
@ -383,13 +383,14 @@ Et téléchargez la version de chromedriver correspondant à votre système d’
|
|||||||
Si cette section est incomplète, merci de faire une nouvelle issue sur github.
|
Si cette section est incomplète, merci de faire une nouvelle issue sur github.
|
||||||
|
|
||||||
## FAQ
|
## FAQ
|
||||||
|
**Q: Quel matériel est nécessaire ?**
|
||||||
|
|
||||||
**Q: J'ai besoin d'un gros PC?**
|
| Taille du Modèle | GPU | Commentaire |
|
||||||
|
|--------------------|------|----------------------------------------------------------|
|
||||||
ça dépend du modèle!
|
| 7B | 8 Go VRAM | ⚠️ Non recommandé. Performances médiocres, hallucinations fréquentes, et l'agent planificateur échouera probablement. |
|
||||||
Pour un modèle 7B : GPU avec 8 Go de VRAM.
|
| 14B | 12 Go VRAM (par ex. RTX 3060) | ✅ Utilisable pour des tâches simples. Peut rencontrer des difficultés avec la navigation web et les tâches de planification. |
|
||||||
Pour un modèle 14B : GPU 12 Go (par exemple, RTX 3060).
|
| 32B | 24+ Go VRAM (par ex. RTX 4090) | 🚀 Réussite avec la plupart des tâches, peut encore avoir des difficultés avec la planification des tâches. |
|
||||||
Et un modèle 32B : 24 Go+ de VRAM.
|
| 70B+ | 48+ Go VRAM (par ex. Mac Studio) | 💪 Excellent. Recommandé pour des cas d'utilisation avancés. |
|
||||||
|
|
||||||
**Q: Pourquoi deepseek et pas un autre modèle**
|
**Q: Pourquoi deepseek et pas un autre modèle**
|
||||||
|
|
||||||
|
16
api.py
16
api.py
@ -24,14 +24,14 @@ from concurrent.futures import ThreadPoolExecutor
|
|||||||
|
|
||||||
from celery import Celery
|
from celery import Celery
|
||||||
|
|
||||||
app = FastAPI(title="AgenticSeek API", version="0.1.0")
|
api = FastAPI(title="AgenticSeek API", version="0.1.0")
|
||||||
celery_app = Celery("tasks", broker="redis://localhost:6379/0", backend="redis://localhost:6379/0")
|
celery_app = Celery("tasks", broker="redis://localhost:6379/0", backend="redis://localhost:6379/0")
|
||||||
celery_app.conf.update(task_track_started=True)
|
celery_app.conf.update(task_track_started=True)
|
||||||
logger = Logger("backend.log")
|
logger = Logger("backend.log")
|
||||||
config = configparser.ConfigParser()
|
config = configparser.ConfigParser()
|
||||||
config.read('config.ini')
|
config.read('config.ini')
|
||||||
|
|
||||||
app.add_middleware(
|
api.add_middleware(
|
||||||
CORSMiddleware,
|
CORSMiddleware,
|
||||||
allow_origins=["*"],
|
allow_origins=["*"],
|
||||||
allow_credentials=True,
|
allow_credentials=True,
|
||||||
@ -41,7 +41,7 @@ app.add_middleware(
|
|||||||
|
|
||||||
if not os.path.exists(".screenshots"):
|
if not os.path.exists(".screenshots"):
|
||||||
os.makedirs(".screenshots")
|
os.makedirs(".screenshots")
|
||||||
app.mount("/screenshots", StaticFiles(directory=".screenshots"), name="screenshots")
|
api.mount("/screenshots", StaticFiles(directory=".screenshots"), name="screenshots")
|
||||||
|
|
||||||
executor = ThreadPoolExecutor(max_workers=1)
|
executor = ThreadPoolExecutor(max_workers=1)
|
||||||
|
|
||||||
@ -106,7 +106,7 @@ def initialize_system():
|
|||||||
interaction = initialize_system()
|
interaction = initialize_system()
|
||||||
is_generating = False
|
is_generating = False
|
||||||
|
|
||||||
@app.get("/screenshot")
|
@api.get("/screenshot")
|
||||||
async def get_screenshot():
|
async def get_screenshot():
|
||||||
logger.info("Screenshot endpoint called")
|
logger.info("Screenshot endpoint called")
|
||||||
screenshot_path = ".screenshots/updated_screen.png"
|
screenshot_path = ".screenshots/updated_screen.png"
|
||||||
@ -118,12 +118,12 @@ async def get_screenshot():
|
|||||||
content={"error": "No screenshot available"}
|
content={"error": "No screenshot available"}
|
||||||
)
|
)
|
||||||
|
|
||||||
@app.get("/health")
|
@api.get("/health")
|
||||||
async def health_check():
|
async def health_check():
|
||||||
logger.info("Health check endpoint called")
|
logger.info("Health check endpoint called")
|
||||||
return {"status": "healthy", "version": "0.1.0"}
|
return {"status": "healthy", "version": "0.1.0"}
|
||||||
|
|
||||||
@app.get("/is_active")
|
@api.get("/is_active")
|
||||||
async def is_active():
|
async def is_active():
|
||||||
logger.info("Is active endpoint called")
|
logger.info("Is active endpoint called")
|
||||||
return {"is_active": interaction.is_active}
|
return {"is_active": interaction.is_active}
|
||||||
@ -146,7 +146,7 @@ def think_wrapper(interaction, query, tts_enabled):
|
|||||||
interaction.last_success = False
|
interaction.last_success = False
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
@app.post("/query", response_model=QueryResponse)
|
@api.post("/query", response_model=QueryResponse)
|
||||||
async def process_query(request: QueryRequest):
|
async def process_query(request: QueryRequest):
|
||||||
global is_generating
|
global is_generating
|
||||||
logger.info(f"Processing query: {request.query}")
|
logger.info(f"Processing query: {request.query}")
|
||||||
@ -199,4 +199,4 @@ async def process_query(request: QueryRequest):
|
|||||||
interaction.save_session()
|
interaction.save_session()
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
uvicorn.run(api, host="0.0.0.0", port=8000)
|
@ -61,13 +61,54 @@ services:
|
|||||||
- ./screenshots:/app/screenshots
|
- ./screenshots:/app/screenshots
|
||||||
environment:
|
environment:
|
||||||
- NODE_ENV=development
|
- NODE_ENV=development
|
||||||
- CHOKIDAR_USEPOLLING=true # Ensure file watching works in Docker
|
- CHOKIDAR_USEPOLLING=true
|
||||||
- BACKEND_URL=http://host.docker.internal:8000
|
- BACKEND_URL=http://backend:8000
|
||||||
|
networks:
|
||||||
|
- agentic-seek-net
|
||||||
|
|
||||||
|
backend:
|
||||||
|
container_name: backend
|
||||||
|
build:
|
||||||
|
context: ./
|
||||||
|
dockerfile: Dockerfile.backend
|
||||||
|
stdin_open: true
|
||||||
|
tty: true
|
||||||
|
shm_size: 6g
|
||||||
|
ports:
|
||||||
|
- "8000:8000"
|
||||||
|
volumes:
|
||||||
|
- ./:/app
|
||||||
|
#network_mode: host
|
||||||
|
environment:
|
||||||
|
- NODE_ENV=development
|
||||||
|
- REDIS_URL=redis://redis:6379/0
|
||||||
|
- SEARXNG_URL=http://searxng:8080
|
||||||
|
- OLLAMA_URL=http://localhost:11434
|
||||||
|
- LM_STUDIO_URL=http://localhost:1234
|
||||||
|
extra_hosts:
|
||||||
|
- "host.docker.internal:host-gateway"
|
||||||
|
depends_on:
|
||||||
|
- redis
|
||||||
|
- searxng
|
||||||
|
- selenium
|
||||||
|
networks:
|
||||||
|
- agentic-seek-net
|
||||||
|
|
||||||
|
selenium:
|
||||||
|
image: selenium/standalone-chrome
|
||||||
|
platform: linux/amd64
|
||||||
|
shm_size: 2g
|
||||||
|
volumes:
|
||||||
|
- /dev/shm:/dev/shm
|
||||||
|
- chrome_profiles:/tmp/chrome
|
||||||
|
ports:
|
||||||
|
- 4444:4444
|
||||||
networks:
|
networks:
|
||||||
- agentic-seek-net
|
- agentic-seek-net
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
redis-data:
|
redis-data:
|
||||||
|
chrome_profiles:
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
agentic-seek-net:
|
agentic-seek-net:
|
||||||
|
@ -7,7 +7,7 @@ COPY agentic-seek-front/package.json agentic-seek-front/package-lock.json ./
|
|||||||
RUN npm install
|
RUN npm install
|
||||||
|
|
||||||
# Copy application code
|
# Copy application code
|
||||||
COPY .agentic-seek-front/ .
|
COPY agentic-seek-front/ .
|
||||||
|
|
||||||
# Expose port
|
# Expose port
|
||||||
EXPOSE 3000
|
EXPOSE 3000
|
||||||
|
@ -19,7 +19,7 @@ function App() {
|
|||||||
|
|
||||||
const checkHealth = async () => {
|
const checkHealth = async () => {
|
||||||
try {
|
try {
|
||||||
await axios.get('http://0.0.0.0:8000/health');
|
await axios.get('http://backend:8000/health');
|
||||||
setIsOnline(true);
|
setIsOnline(true);
|
||||||
console.log('System is online');
|
console.log('System is online');
|
||||||
} catch {
|
} catch {
|
||||||
@ -39,7 +39,7 @@ function App() {
|
|||||||
const fetchScreenshot = async () => {
|
const fetchScreenshot = async () => {
|
||||||
try {
|
try {
|
||||||
const timestamp = new Date().getTime();
|
const timestamp = new Date().getTime();
|
||||||
const res = await axios.get(`http://0.0.0.0:8000/screenshots/updated_screen.png?timestamp=${timestamp}`, {
|
const res = await axios.get(`http://backend:8000/screenshots/updated_screen.png?timestamp=${timestamp}`, {
|
||||||
responseType: 'blob'
|
responseType: 'blob'
|
||||||
});
|
});
|
||||||
if (isMounted) {
|
if (isMounted) {
|
||||||
@ -94,7 +94,7 @@ function App() {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
console.log('Sending query:', query);
|
console.log('Sending query:', query);
|
||||||
const res = await axios.post('http://0.0.0.0:8000/query', {
|
const res = await axios.post('http://backend:8000/query', {
|
||||||
query,
|
query,
|
||||||
tts_enabled: false
|
tts_enabled: false
|
||||||
});
|
});
|
||||||
|
@ -95,7 +95,7 @@ server:
|
|||||||
# If your instance owns a /etc/searxng/settings.yml file, then set the following
|
# If your instance owns a /etc/searxng/settings.yml file, then set the following
|
||||||
# values there.
|
# values there.
|
||||||
|
|
||||||
secret_key: "a0f32049a2301f36ef012b9b7ef62733b4af1379a1d28d5c33d92cea37288182" # Is overwritten by ${SEARXNG_SECRET}
|
secret_key: "1234bbd68b6c51ff0b9ccb77e39b1f20a3178b63e71496cbc89ef019f0341382" # Is overwritten by ${SEARXNG_SECRET}
|
||||||
# Proxy image results through SearXNG. Is overwritten by ${SEARXNG_IMAGE_PROXY}
|
# Proxy image results through SearXNG. Is overwritten by ${SEARXNG_IMAGE_PROXY}
|
||||||
image_proxy: false
|
image_proxy: false
|
||||||
# 1.0 and 1.1 are supported
|
# 1.0 and 1.1 are supported
|
||||||
@ -2642,6 +2642,12 @@ engines:
|
|||||||
shortcut: pgo
|
shortcut: pgo
|
||||||
disabled: true
|
disabled: true
|
||||||
|
|
||||||
|
- name: senscritique
|
||||||
|
engine: senscritique
|
||||||
|
shortcut: scr
|
||||||
|
timeout: 4.0
|
||||||
|
disabled: true
|
||||||
|
|
||||||
# Doku engine lets you access to any Doku wiki instance:
|
# Doku engine lets you access to any Doku wiki instance:
|
||||||
# A public one or a privete/corporate one.
|
# A public one or a privete/corporate one.
|
||||||
# - name: ubuntuwiki
|
# - name: ubuntuwiki
|
||||||
|
Loading…
x
Reference in New Issue
Block a user