Fix mealie and tabby configs

This commit is contained in:
JLP 2025-02-12 12:13:14 -05:00
parent 2e8ce9b1f8
commit 46e64631c6
No known key found for this signature in database
GPG key ID: 414E00D1FF7519DC
2 changed files with 3 additions and 3 deletions

View file

@ -1,13 +1,13 @@
[model.completion.http]
kind = "ollama/completion"
model_name = "deepseek-r1:8b"
model_name = "qwen2.5-coder:14b-instruct-q4_K_M"
api_endpoint = "http://ollama-svc:11434"
# prompt_template = "<PRE> {prefix} <SUF>{suffix} <MID>" # Example prompt template for the CodeLlama model series.
# Chat model
[model.chat.http]
kind = "openai/chat"
model_name = "deepseek-r1:8b"
model_name = "llama3.2:latest"
api_endpoint = "http://ollama-svc:11434/v1"
# Embedding model

View file

@ -92,7 +92,7 @@ configMapGenerator:
- POSTGRES_PORT=5432
- POSTGRES_DB=mealie
# Use Ollama
- OPENAI_BASE_URL=http://ollama.ai.svc.cluster.local:11434
- OPENAI_BASE_URL=https://ollama.leechpepin.com:11434/v1
- OPENAI_MODEL=llama3.2:latest
- OPENAI_API_KEY=gibberish
# Use Authentik OIDC