diff --git a/src/google/adk/models/lite_llm.py b/src/google/adk/models/lite_llm.py index 7d13696c96..2d9c63e9b5 100644 --- a/src/google/adk/models/lite_llm.py +++ b/src/google/adk/models/lite_llm.py @@ -2146,7 +2146,7 @@ class LiteLlm(BaseLlm): llm_client: The LLM client to use for the model. """ - llm_client: LiteLLMClient = Field(default_factory=LiteLLMClient) + llm_client: LiteLLMClient = Field(default_factory=LiteLLMClient, exclude=True) """The LLM client to use for the model.""" _additional_args: Dict[str, Any] = None diff --git a/tests/unittests/models/test_litellm.py b/tests/unittests/models/test_litellm.py index ace08ad997..1363dd0059 100644 --- a/tests/unittests/models/test_litellm.py +++ b/tests/unittests/models/test_litellm.py @@ -4894,3 +4894,9 @@ async def test_content_to_message_param_anthropic_no_signature_falls_back(): # Falls back to reasoning_content when no signatures present assert result.get("reasoning_content") == "thinking without sig" assert "thinking_blocks" not in result + +def test_lite_llm_model_dump_excludes_llm_client(): + """llm_client should not appear in model_dump output (not JSON serializable).""" + model = LiteLlm(model="openrouter/openai/gpt-4o-mini") + dumped = model.model_dump(mode="python", exclude_none=True) + assert "llm_client" not in dumped \ No newline at end of file