Switch tabby to use new service endpoint
This commit is contained in:
parent
82ab5dd0a1
commit
6c36f637a5
2 changed files with 3 additions and 20 deletions
|
@ -1,17 +0,0 @@
|
|||
[model.completion.http]
|
||||
kind = "ollama/completion"
|
||||
model_name = "deepseek-r1:8b"
|
||||
api_endpoint = "http://ollama:11434"
|
||||
# prompt_template = "<PRE> {prefix} <SUF>{suffix} <MID>" # Example prompt template for the CodeLlama model series.
|
||||
|
||||
# Chat model
|
||||
[model.chat.http]
|
||||
kind = "openai/chat"
|
||||
model_name = "deepseek-r1:8b"
|
||||
api_endpoint = "http://ollama:11434/v1"
|
||||
|
||||
# Embedding model
|
||||
[model.embedding.http]
|
||||
kind = "ollama/embedding"
|
||||
model_name = "ordis/jina-embeddings-v2-base-code"
|
||||
api_endpoint = "http://ollama:11434"
|
|
@ -1,17 +1,17 @@
|
|||
[model.completion.http]
|
||||
kind = "ollama/completion"
|
||||
model_name = "deepseek-r1:8b"
|
||||
api_endpoint = "http://ollama:11434"
|
||||
api_endpoint = "http://ollama-svc:11434"
|
||||
# prompt_template = "<PRE> {prefix} <SUF>{suffix} <MID>" # Example prompt template for the CodeLlama model series.
|
||||
|
||||
# Chat model
|
||||
[model.chat.http]
|
||||
kind = "openai/chat"
|
||||
model_name = "deepseek-r1:8b"
|
||||
api_endpoint = "http://ollama:11434/v1"
|
||||
api_endpoint = "http://ollama-svc:11434/v1"
|
||||
|
||||
# Embedding model
|
||||
[model.embedding.http]
|
||||
kind = "ollama/embedding"
|
||||
model_name = "ordis/jina-embeddings-v2-base-code"
|
||||
api_endpoint = "http://ollama:11434"
|
||||
api_endpoint = "http://ollama-svc:11434"
|
||||
|
|
Loading…
Add table
Reference in a new issue