Skip to content
Open
2 changes: 1 addition & 1 deletion src/google/adk/models/lite_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -2168,7 +2168,7 @@ class LiteLlm(BaseLlm):
llm_client: The LLM client to use for the model.
"""

llm_client: LiteLLMClient = Field(default_factory=LiteLLMClient)
llm_client: LiteLLMClient = Field(default_factory=LiteLLMClient, exclude=True)
"""The LLM client to use for the model."""

_additional_args: Dict[str, Any] = None
Expand Down
7 changes: 7 additions & 0 deletions tests/unittests/models/test_litellm.py
Original file line number Diff line number Diff line change
Expand Up @@ -4954,3 +4954,10 @@ async def test_content_to_message_param_anthropic_no_signature_falls_back():
# Falls back to reasoning_content when no signatures present
assert result.get("reasoning_content") == "thinking without sig"
assert "thinking_blocks" not in result


def test_lite_llm_model_dump_excludes_llm_client():
"""llm_client should not appear in model_dump output (not JSON serializable)."""
model = LiteLlm(model="openrouter/openai/gpt-4o-mini")
dumped = model.model_dump(mode="python", exclude_none=True)
assert "llm_client" not in dumped