feat: always enable vlm and ocr plugin

This commit is contained in:
arkohut 2024-10-25 11:13:46 +08:00
parent 559f7e9732
commit a3cacb02e9
3 changed files with 6 additions and 12 deletions

View File

@ -16,7 +16,6 @@ import typer
class VLMSettings(BaseModel):
enabled: bool = True
modelname: str = "minicpm-v"
endpoint: str = "http://localhost:11434"
token: str = ""
@ -28,7 +27,6 @@ class VLMSettings(BaseModel):
class OCRSettings(BaseModel):
enabled: bool = True
# will by ignored if use_local is True
endpoint: str = "http://localhost:5555/predict"
token: str = ""

View File

@ -16,7 +16,6 @@ default_plugins:
# using ollama as the vlm server
vlm:
concurrency: 1
enabled: true
endpoint: http://localhost:11434
force_jpeg: true
modelname: minicpm-v
@ -26,7 +25,6 @@ vlm:
# using local ocr
ocr:
concurrency: 1
enabled: true
# this is not used if use_local is true
endpoint: http://localhost:5555/predict
force_jpeg: false

View File

@ -878,14 +878,12 @@ def run_server():
print(f"OCR plugin enabled: {settings.ocr}")
# Add VLM plugin router
if settings.vlm.enabled:
print("VLM plugin is enabled")
# Removed check for settings.vlm.enabled
vlm_main.init_plugin(settings.vlm)
app.include_router(vlm_main.router, prefix="/plugins/vlm")
# Add OCR plugin router
if settings.ocr.enabled:
print("OCR plugin is enabled")
# Removed check for settings.ocr.enabled
ocr_main.init_plugin(settings.ocr)
app.include_router(ocr_main.router, prefix="/plugins/ocr")