Skip to content

Commit 5e157a3

Browse files
Provider param fix in crewai llm (#283)
* Proider param fix in crewai llm * chore: Auto-fix pre-commit issues * Update provider_service.py * chore: Auto-fix pre-commit issues --------- Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
1 parent 8856abe commit 5e157a3

File tree

1 file changed

+3
-4
lines changed

1 file changed

+3
-4
lines changed

app/modules/intelligence/provider/provider_service.py

+3-4
Original file line numberDiff line numberDiff line change
@@ -305,7 +305,7 @@ async def call_llm(
305305
"""
306306
provider = self._get_provider_config(size)
307307
params = self._build_llm_params(provider, size)
308-
routing_provider = params["routing_provider"]
308+
routing_provider = params.pop("routing_provider", None)
309309
extra_params = {}
310310
if self.portkey_api_key and routing_provider != "ollama":
311311
# ollama + portkey is not supported currently
@@ -358,7 +358,7 @@ async def call_llm_with_structured_output(
358358
"""
359359
provider = self._get_provider_config(size)
360360
params = self._build_llm_params(provider, size)
361-
routing_provider = params["routing_provider"]
361+
routing_provider = params.pop("routing_provider", None)
362362

363363
extra_params = {}
364364
if self.portkey_api_key and routing_provider != "ollama":
@@ -406,8 +406,7 @@ def _initialize_llm(self, provider: str, size: str, agent_type: AgentProvider):
406406
Kept for potential future differentiated initialization.
407407
"""
408408
params = self._build_llm_params(provider, size)
409-
routing_provider = params["routing_provider"]
410-
409+
routing_provider = params.pop("routing_provider", None)
411410
if agent_type == AgentProvider.CREWAI:
412411
crewai_params = {"model": params["model"], **params}
413412
if "default_headers" in params:

0 commit comments

Comments
 (0)