From 6c36f637a50ad7f34a7f89ee63b44925e1cf6d06 Mon Sep 17 00:00:00 2001 From: Jonathan Leech-Pepin Date: Fri, 31 Jan 2025 17:05:27 -0500 Subject: [PATCH] Switch tabby to use new service endpoint --- namespaces/ai/ollama/config.toml | 17 ----------------- namespaces/ai/tabby/config.toml | 6 +++--- 2 files changed, 3 insertions(+), 20 deletions(-) delete mode 100644 namespaces/ai/ollama/config.toml diff --git a/namespaces/ai/ollama/config.toml b/namespaces/ai/ollama/config.toml deleted file mode 100644 index f640ec1..0000000 --- a/namespaces/ai/ollama/config.toml +++ /dev/null @@ -1,17 +0,0 @@ -[model.completion.http] -kind = "ollama/completion" -model_name = "deepseek-r1:8b" -api_endpoint = "http://ollama:11434" -# prompt_template = "
 {prefix} {suffix} "  # Example prompt template for the CodeLlama model series.
-
-# Chat model
-[model.chat.http]
-kind = "openai/chat"
-model_name = "deepseek-r1:8b"
-api_endpoint = "http://ollama:11434/v1"
-
-# Embedding model
-[model.embedding.http]
-kind = "ollama/embedding"
-model_name = "ordis/jina-embeddings-v2-base-code"
-api_endpoint = "http://ollama:11434"
diff --git a/namespaces/ai/tabby/config.toml b/namespaces/ai/tabby/config.toml
index f640ec1..6aa4585 100644
--- a/namespaces/ai/tabby/config.toml
+++ b/namespaces/ai/tabby/config.toml
@@ -1,17 +1,17 @@
 [model.completion.http]
 kind = "ollama/completion"
 model_name = "deepseek-r1:8b"
-api_endpoint = "http://ollama:11434"
+api_endpoint = "http://ollama-svc:11434"
 # prompt_template = "
 {prefix} {suffix} "  # Example prompt template for the CodeLlama model series.
 
 # Chat model
 [model.chat.http]
 kind = "openai/chat"
 model_name = "deepseek-r1:8b"
-api_endpoint = "http://ollama:11434/v1"
+api_endpoint = "http://ollama-svc:11434/v1"
 
 # Embedding model
 [model.embedding.http]
 kind = "ollama/embedding"
 model_name = "ordis/jina-embeddings-v2-base-code"
-api_endpoint = "http://ollama:11434"
+api_endpoint = "http://ollama-svc:11434"