Create config.toml
This commit is contained in:
parent
0b7989c3d3
commit
8536caa096
1 changed files with 32 additions and 0 deletions
32
config.toml
Normal file
32
config.toml
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
[GENERAL]
|
||||
SIMILARITY_MEASURE = "cosine" # "cosine" or "dot"
|
||||
KEEP_ALIVE = "5m" # How long to keep Ollama models loaded into memory. (Instead of using -1 use "-1m")
|
||||
|
||||
[MODELS.OPENAI]
|
||||
API_KEY = ""
|
||||
|
||||
[MODELS.GROQ]
|
||||
API_KEY = ""
|
||||
|
||||
[MODELS.ANTHROPIC]
|
||||
API_KEY = ""
|
||||
|
||||
[MODELS.GEMINI]
|
||||
API_KEY = "AIzaSyCkRpRuHKbE1nMY5CQLhoiWP5UH0zRzLqM"
|
||||
|
||||
[MODELS.CUSTOM_OPENAI]
|
||||
API_KEY = ""
|
||||
API_URL = ""
|
||||
MODEL_NAME = ""
|
||||
|
||||
[MODELS.OLLAMA]
|
||||
API_URL = "" # Ollama API URL - http://host.docker.internal:11434
|
||||
|
||||
[MODELS.DEEPSEEK]
|
||||
API_KEY = ""
|
||||
|
||||
[MODELS.LM_STUDIO]
|
||||
API_URL = "" # LM Studio API URL - http://host.docker.internal:1234
|
||||
|
||||
[API_ENDPOINTS]
|
||||
SEARXNG = "" # SearxNG API URL - http://localhost:32768
|
||||
Loading…
Add table
Add a link
Reference in a new issue