fix: ollama provider not respecting OLLAMA_ENDPOINT env var
This commit is contained in:
@@ -106,13 +106,13 @@ def get_llm_model(provider: str, **kwargs):
|
||||
base_url = os.getenv("OLLAMA_ENDPOINT", "http://localhost:11434")
|
||||
else:
|
||||
base_url = kwargs.get("base_url")
|
||||
|
||||
|
||||
if "deepseek-r1" in kwargs.get("model_name", "qwen2.5:7b"):
|
||||
return DeepSeekR1ChatOllama(
|
||||
model=kwargs.get("model_name", "deepseek-r1:14b"),
|
||||
temperature=kwargs.get("temperature", 0.0),
|
||||
num_ctx=kwargs.get("num_ctx", 32000),
|
||||
base_url=kwargs.get("base_url", base_url),
|
||||
base_url=base_url,
|
||||
)
|
||||
else:
|
||||
return ChatOllama(
|
||||
@@ -120,7 +120,7 @@ def get_llm_model(provider: str, **kwargs):
|
||||
temperature=kwargs.get("temperature", 0.0),
|
||||
num_ctx=kwargs.get("num_ctx", 32000),
|
||||
num_predict=kwargs.get("num_predict", 1024),
|
||||
base_url=kwargs.get("base_url", base_url),
|
||||
base_url=base_url,
|
||||
)
|
||||
elif provider == "azure_openai":
|
||||
if not kwargs.get("base_url", ""):
|
||||
|
||||
Reference in New Issue
Block a user