2025-01-31 17:03:48 -05:00
|
|
|
[model.completion.http]
|
|
|
|
kind = "ollama/completion"
|
2025-02-12 12:13:14 -05:00
|
|
|
model_name = "qwen2.5-coder:14b-instruct-q4_K_M"
|
2025-01-31 17:05:27 -05:00
|
|
|
api_endpoint = "http://ollama-svc:11434"
|
2025-01-31 17:03:48 -05:00
|
|
|
# prompt_template = "<PRE> {prefix} <SUF>{suffix} <MID>" # Example prompt template for the CodeLlama model series.
|
|
|
|
|
|
|
|
# Chat model
|
|
|
|
[model.chat.http]
|
|
|
|
kind = "openai/chat"
|
2025-02-12 12:13:14 -05:00
|
|
|
model_name = "llama3.2:latest"
|
2025-01-31 17:05:27 -05:00
|
|
|
api_endpoint = "http://ollama-svc:11434/v1"
|
2025-01-31 17:03:48 -05:00
|
|
|
|
|
|
|
# Embedding model
|
|
|
|
[model.embedding.http]
|
|
|
|
kind = "ollama/embedding"
|
2025-03-30 13:00:00 -04:00
|
|
|
model_name = "ordis/jina-embeddings-v2-base-code:latest"
|
2025-01-31 17:05:27 -05:00
|
|
|
api_endpoint = "http://ollama-svc:11434"
|