Feature: Add LM Studio provider integration - Added LM Studio provider to support OpenAI compatible API - Implemented chat and embeddings model loading - Updated config to include LM Studio API endpoint
This commit is contained in:
parent
41b258e4d8
commit
aa240009ab
4 changed files with 113 additions and 1 deletions
|
|
@ -25,5 +25,8 @@ API_URL = "" # Ollama API URL - http://host.docker.internal:11434
|
|||
[MODELS.DEEPSEEK]
|
||||
API_KEY = ""
|
||||
|
||||
[MODELS.LM_STUDIO]
|
||||
API_URL = "" # LM Studio API URL - http://host.docker.internal:1234
|
||||
|
||||
[API_ENDPOINTS]
|
||||
SEARXNG = "" # SearxNG API URL - http://localhost:32768
|
||||
SEARXNG = "" # SearxNG API URL - http://localhost:32768
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue