diff --git a/namespaces/ai/ollama/kustomization.yaml b/namespaces/ai/ollama/kustomization.yaml index 816f4c7..c086591 100644 --- a/namespaces/ai/ollama/kustomization.yaml +++ b/namespaces/ai/ollama/kustomization.yaml @@ -4,7 +4,7 @@ kind: Kustomization images: - name: image newName: ollama/ollama - newTag: 0.11.3 + newTag: 0.21.0 transformers: - |- diff --git a/namespaces/ai/tabby/config.toml b/namespaces/ai/tabby/config.toml index fa760aa..b6db723 100644 --- a/namespaces/ai/tabby/config.toml +++ b/namespaces/ai/tabby/config.toml @@ -1,13 +1,13 @@ [model.completion.http] kind = "ollama/completion" -model_name = "qwen2.5-coder:14b-instruct-q4_K_M" +model_name = "qwen3.5:9b" api_endpoint = "http://ollama-svc:11434" # prompt_template = "
{prefix} {suffix} " # Example prompt template for the CodeLlama model series.
# Chat model
[model.chat.http]
kind = "openai/chat"
-model_name = "granite3.3:8b"
+model_name = "qwen3.5:9b"
api_endpoint = "http://ollama-svc:11434/v1"
# Embedding model
diff --git a/namespaces/ai/tabby/kustomization.yaml b/namespaces/ai/tabby/kustomization.yaml
index aac54a4..b4e4d65 100644
--- a/namespaces/ai/tabby/kustomization.yaml
+++ b/namespaces/ai/tabby/kustomization.yaml
@@ -4,7 +4,7 @@ kind: Kustomization
images:
- name: image
newName: tabbyml/tabby
- newTag: 0.30.1
+ newTag: "20260330"
transformers:
- |-
apiVersion: builtin