8000 No public description · DyProgrammerDy/adk-python@6742ab9 · GitHub
[go: up one dir, main page]

Skip to content

Commit 6742ab9

Browse files
Jacksunweicopybara-github
authored andcommitted
No public description
PiperOrigin-RevId: 749202950
1 parent daed456 commit 6742ab9

File tree

3 files changed

+11
-3
lines changed

3 files changed

+11
-3
lines changed

src/google/adk/agents/callback_context.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@
2323
if TYPE_CHECKING:
2424
from google.genai import types
2525

26-
from ..events.event import Event
2726
from ..events.event_actions import EventActions
2827
from ..sessions.state import State
2928
from .invocation_context import InvocationContext

src/google/adk/flows/llm_flows/functions.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -310,7 +310,7 @@ async def _process_function_live_helper(
310310
function_response = {
311311
'status': f'No active streaming function named {function_name} found'
312312
}
313-
elif hasattr(tool, "func") and inspect.isasyncgenfunction(tool.func):
313+
elif inspect.isasyncgenfunction(tool.func):
314314
print('is async')
315315

316316
# for streaming tool use case

src/google/adk/models/llm_response.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414

1515
from __future__ import annotations
1616

17-
from typing import Optional
17+
from typing import Any, Optional
1818

1919
from google.genai import types
2020
from pydantic import BaseModel
@@ -37,6 +37,7 @@ class LlmResponse(BaseModel):
3737
error_message: Error message if the response is an error.
3838
interrupted: Flag indicating that LLM was interrupted when generating the
3939
content. Usually it's due to user interruption during a bidi streaming.
40+
custom_metadata: The custom metadata of the LlmResponse.
4041
"""
4142

4243
model_config = ConfigDict(extra='forbid')
@@ -71,6 +72,14 @@ class LlmResponse(BaseModel):
7172
Usually it's due to user interruption during a bidi streaming.
7273
"""
7374

75+
custom_metadata: Optional[dict[str, Any]] = None
76+
"""The custom metadata of the LlmResponse.
77+
78+
An optional key-value pair to label an LlmResponse.
79+
80+
NOTE: the entire dict must be JSON serializable.
81+
"""
82+
7483
@staticmethod
7584
def create(
7685
generate_content_response: types.GenerateContentResponse,

0 commit comments

Comments
 (0)
0