feat: add Ollama endpoint configuration

- Added `OLLAMA_ENDPOINT` environment variable to `.env.example`
- Updated `get_llm_model` function in `src/utils/utils.py` to use the new `OLLAMA_ENDPOINT` environment variable if not provided
This commit is contained in:
wraps
2025-01-26 15:39:35 +01:00
parent af3a84ff8b
commit be01aaf336
2 changed files with 8 additions and 1 deletions

View File

@@ -11,6 +11,8 @@ AZURE_OPENAI_API_KEY=
DEEPSEEK_ENDPOINT=https://api.deepseek.com
DEEPSEEK_API_KEY=
OLLAMA_ENDPOINT=http://localhost:11434
# Set to false to disable anonymized telemetry
ANONYMIZED_TELEMETRY=true

View File

@@ -89,11 +89,16 @@ def get_llm_model(provider: str, **kwargs):
google_api_key=api_key,
)
elif provider == "ollama":
if not kwargs.get("base_url", ""):
base_url = os.getenv("OLLAMA_ENDPOINT", "http://localhost:11434")
else:
base_url = kwargs.get("base_url")
return ChatOllama(
model=kwargs.get("model_name", "qwen2.5:7b"),
temperature=kwargs.get("temperature", 0.0),
num_ctx=kwargs.get("num_ctx", 32000),
base_url=kwargs.get("base_url", "http://localhost:11434"),
base_url=base_url,
)
elif provider == "azure_openai":
if not kwargs.get("base_url", ""):