diff --git a/namespaces/ai/tabby/config.toml b/namespaces/ai/tabby/config.toml index 6aa4585..cb8950c 100644 --- a/namespaces/ai/tabby/config.toml +++ b/namespaces/ai/tabby/config.toml @@ -1,13 +1,13 @@ [model.completion.http] kind = "ollama/completion" -model_name = "deepseek-r1:8b" +model_name = "qwen2.5-coder:14b-instruct-q4_K_M" api_endpoint = "http://ollama-svc:11434" # prompt_template = "
 {prefix} {suffix} "  # Example prompt template for the CodeLlama model series.
 
 # Chat model
 [model.chat.http]
 kind = "openai/chat"
-model_name = "deepseek-r1:8b"
+model_name = "llama3.2:latest"
 api_endpoint = "http://ollama-svc:11434/v1"
 
 # Embedding model
diff --git a/namespaces/apps/mealie/kustomization.yaml b/namespaces/apps/mealie/kustomization.yaml
index 8487aa2..bbac610 100644
--- a/namespaces/apps/mealie/kustomization.yaml
+++ b/namespaces/apps/mealie/kustomization.yaml
@@ -92,7 +92,7 @@ configMapGenerator:
       - POSTGRES_PORT=5432
       - POSTGRES_DB=mealie
       # Use Ollama
-      - OPENAI_BASE_URL=http://ollama.ai.svc.cluster.local:11434
+      - OPENAI_BASE_URL=https://ollama.leechpepin.com:11434/v1
       - OPENAI_MODEL=llama3.2:latest
       - OPENAI_API_KEY=gibberish
       # Use Authentik OIDC