From 7a14c49fb8f17975c34ab52e120cbd4508cae5f9 Mon Sep 17 00:00:00 2001 From: Ammar Fitwalla Date: Fri, 17 Apr 2026 23:45:12 +0530 Subject: [PATCH] fix(lite_llm): exclude llm_client from model_dump to fix build_graph 500 error --- src/google/adk/models/lite_llm.py | 2 +- tests/unittests/models/test_litellm.py | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/src/google/adk/models/lite_llm.py b/src/google/adk/models/lite_llm.py index 7d13696c96..2d9c63e9b5 100644 --- a/src/google/adk/models/lite_llm.py +++ b/src/google/adk/models/lite_llm.py @@ -2146,7 +2146,7 @@ class LiteLlm(BaseLlm): llm_client: The LLM client to use for the model. """ - llm_client: LiteLLMClient = Field(default_factory=LiteLLMClient) + llm_client: LiteLLMClient = Field(default_factory=LiteLLMClient, exclude=True) """The LLM client to use for the model.""" _additional_args: Dict[str, Any] = None diff --git a/tests/unittests/models/test_litellm.py b/tests/unittests/models/test_litellm.py index ace08ad997..1363dd0059 100644 --- a/tests/unittests/models/test_litellm.py +++ b/tests/unittests/models/test_litellm.py @@ -4894,3 +4894,9 @@ async def test_content_to_message_param_anthropic_no_signature_falls_back(): # Falls back to reasoning_content when no signatures present assert result.get("reasoning_content") == "thinking without sig" assert "thinking_blocks" not in result + +def test_lite_llm_model_dump_excludes_llm_client(): + """llm_client should not appear in model_dump output (not JSON serializable).""" + model = LiteLlm(model="openrouter/openai/gpt-4o-mini") + dumped = model.model_dump(mode="python", exclude_none=True) + assert "llm_client" not in dumped \ No newline at end of file