Skip to content

Commit f4ea24e

Browse files
authored
Merge branch 'main' into main
2 parents b60e9e4 + 35308d3 commit f4ea24e

File tree

19 files changed

+1195
-50
lines changed

19 files changed

+1195
-50
lines changed

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ dependencies = [
3232
"click>=8.1.8, <9.0.0", # For CLI tools
3333
"fastapi>=0.115.0, <0.124.0", # FastAPI framework
3434
"google-api-python-client>=2.157.0, <3.0.0", # Google API client discovery
35+
"google-auth>=2.47.0", # Google Auth library
3536
"google-cloud-aiplatform[agent_engines]>=1.132.0, <2.0.0", # For VertexAI integrations, e.g. example store.
3637
"google-cloud-bigquery-storage>=2.0.0",
3738
"google-cloud-bigquery>=2.2.0",

src/google/adk/agents/live_request_queue.py

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -45,15 +45,6 @@ class LiveRequestQueue:
4545
"""Queue used to send LiveRequest in a live(bidirectional streaming) way."""
4646

4747
def __init__(self):
48-
# Ensure there's an event loop available in this thread
49-
try:
50-
asyncio.get_running_loop()
51-
except RuntimeError:
52-
# No running loop, create one
53-
loop = asyncio.new_event_loop()
54-
asyncio.set_event_loop(loop)
55-
56-
# Now create the queue (it will use the event loop we just ensured exists)
5748
self._queue = asyncio.Queue()
5849

5950
def close(self):

src/google/adk/auth/credential_manager.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -176,17 +176,13 @@ async def get_auth_credential(
176176
async def _load_existing_credential(
177177
self, callback_context: CallbackContext
178178
) -> Optional[AuthCredential]:
179-
"""Load existing credential from credential service or cached exchanged credential."""
179+
"""Load existing credential from credential service."""
180180

181181
# Try loading from credential service first
182182
credential = await self._load_from_credential_service(callback_context)
183183
if credential:
184184
return credential
185185

186-
# Check if we have a cached exchanged credential
187-
if self._auth_config.exchanged_auth_credential:
188-
return self._auth_config.exchanged_auth_credential
189-
190186
return None
191187

192188
async def _load_from_credential_service(

src/google/adk/cli/adk_web_server.py

Lines changed: 25 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1547,14 +1547,31 @@ async def event_generator():
15471547
)
15481548
) as agen:
15491549
async for event in agen:
1550-
# Format as SSE data
1551-
sse_event = event.model_dump_json(
1552-
exclude_none=True, by_alias=True
1553-
)
1554-
logger.debug(
1555-
"Generated event in agent run streaming: %s", sse_event
1556-
)
1557-
yield f"data: {sse_event}\n\n"
1550+
# ADK Web renders artifacts from `actions.artifactDelta`
1551+
# during part processing *and* during action processing
1552+
# 1) the original event with `artifactDelta` cleared (content)
1553+
# 2) a content-less "action-only" event carrying `artifactDelta`
1554+
events_to_stream = [event]
1555+
if (
1556+
event.actions.artifact_delta
1557+
and event.content
1558+
and event.content.parts
1559+
):
1560+
content_event = event.model_copy(deep=True)
1561+
content_event.actions.artifact_delta = {}
1562+
artifact_event = event.model_copy(deep=True)
1563+
artifact_event.content = None
1564+
events_to_stream = [content_event, artifact_event]
1565+
1566+
for event_to_stream in events_to_stream:
1567+
sse_event = event_to_stream.model_dump_json(
1568+
exclude_none=True,
1569+
by_alias=True,
1570+
)
1571+
logger.debug(
1572+
"Generated event in agent run streaming: %s", sse_event
1573+
)
1574+
yield f"data: {sse_event}\n\n"
15581575
except Exception as e:
15591576
logger.exception("Error in event_generator: %s", e)
15601577
# You might want to yield an error event here

src/google/adk/cli/cli_tools_click.py

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@
3636
from . import cli_deploy
3737
from .. import version
3838
from ..evaluation.constants import MISSING_EVAL_DEPENDENCIES_MESSAGE
39+
from ..sessions.migration import migration_runner
3940
from .cli import run_cli
4041
from .fast_api import get_fast_api_app
4142
from .utils import envs
@@ -1507,6 +1508,47 @@ def cli_deploy_cloud_run(
15071508
click.secho(f"Deploy failed: {e}", fg="red", err=True)
15081509

15091510

1511+
@main.group()
1512+
def migrate():
1513+
"""ADK migration commands."""
1514+
pass
1515+
1516+
1517+
@migrate.command("session", cls=HelpfulCommand)
1518+
@click.option(
1519+
"--source_db_url",
1520+
required=True,
1521+
help=(
1522+
"SQLAlchemy URL of source database in database session service, e.g."
1523+
" sqlite:///source.db."
1524+
),
1525+
)
1526+
@click.option(
1527+
"--dest_db_url",
1528+
required=True,
1529+
help=(
1530+
"SQLAlchemy URL of destination database in database session service,"
1531+
" e.g. sqlite:///dest.db."
1532+
),
1533+
)
1534+
@click.option(
1535+
"--log_level",
1536+
type=LOG_LEVELS,
1537+
default="INFO",
1538+
help="Optional. Set the logging level",
1539+
)
1540+
def cli_migrate_session(
1541+
*, source_db_url: str, dest_db_url: str, log_level: str
1542+
):
1543+
"""Migrates a session database to the latest schema version."""
1544+
logs.setup_adk_logger(getattr(logging, log_level.upper()))
1545+
try:
1546+
migration_runner.upgrade(source_db_url, dest_db_url)
1547+
click.secho("Migration check and upgrade process finished.", fg="green")
1548+
except Exception as e:
1549+
click.secho(f"Migration failed: {e}", fg="red", err=True)
1550+
1551+
15101552
@deploy.command("agent_engine")
15111553
@click.option(
15121554
"--api_key",

src/google/adk/cli/utils/envs.py

Lines changed: 38 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,12 +12,30 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
from __future__ import annotations
16+
17+
import functools
1518
import logging
1619
import os
1720

1821
from dotenv import load_dotenv
1922

20-
logger = logging.getLogger(__file__)
23+
from ...utils.env_utils import is_env_enabled
24+
25+
logger = logging.getLogger('google_adk.' + __name__)
26+
27+
_ADK_DISABLE_LOAD_DOTENV_ENV_VAR = 'ADK_DISABLE_LOAD_DOTENV'
28+
29+
30+
@functools.lru_cache(maxsize=1)
31+
def _get_explicit_env_keys() -> frozenset[str]:
32+
"""Returns env var keys set before ADK loads any `.env` files.
33+
34+
This snapshot is used to preserve user-provided environment variables while
35+
still allowing later `.env` files to override earlier ones via
36+
`override=True`.
37+
"""
38+
return frozenset(os.environ)
2139

2240

2341
def _walk_to_root_until_found(folder, filename) -> str:
@@ -35,15 +53,33 @@ def _walk_to_root_until_found(folder, filename) -> str:
3553
def load_dotenv_for_agent(
3654
agent_name: str, agent_parent_folder: str, filename: str = '.env'
3755
):
38-
"""Loads the .env file for the agent module."""
56+
"""Loads the `.env` file for the agent module.
57+
58+
Explicit environment variables (present before the first `.env` load) are
59+
preserved, while values loaded from `.env` may be overridden by later `.env`
60+
loads.
61+
"""
62+
if is_env_enabled(_ADK_DISABLE_LOAD_DOTENV_ENV_VAR):
63+
logger.info(
64+
'Skipping %s loading because %s is enabled.',
65+
filename,
66+
_ADK_DISABLE_LOAD_DOTENV_ENV_VAR,
67+
)
68+
return
3969

4070
# Gets the folder of agent_module as starting_folder
4171
starting_folder = os.path.abspath(
4272
os.path.join(agent_parent_folder, agent_name)
4373
)
4474
dotenv_file_path = _walk_to_root_until_found(starting_folder, filename)
4575
if dotenv_file_path:
76+
explicit_env_keys = _get_explicit_env_keys()
77+
explicit_env = {
78+
key: os.environ[key] for key in explicit_env_keys if key in os.environ
79+
}
80+
4681
load_dotenv(dotenv_file_path, override=True, verbose=True)
82+
os.environ.update(explicit_env)
4783
logger.info(
4884
'Loaded %s file for %s at %s',
4985
filename,

src/google/adk/models/lite_llm.py

Lines changed: 21 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -584,9 +584,12 @@ async def _get_content(
584584
parts: Iterable[types.Part],
585585
*,
586586
provider: str = "",
587-
) -> Union[OpenAIMessageContent, str]:
587+
) -> OpenAIMessageContent:
588588
"""Converts a list of parts to litellm content.
589589
590+
Thought parts represent internal model reasoning and are always dropped so
591+
they are not replayed back to the model in subsequent turns.
592+
590593
Args:
591594
parts: The parts to convert.
592595
provider: The LLM provider name (e.g., "openai", "azure").
@@ -595,11 +598,25 @@ async def _get_content(
595598
The litellm content.
596599
"""
597600

601+
parts_without_thought = [part for part in parts if not part.thought]
602+
if len(parts_without_thought) == 1:
603+
part = parts_without_thought[0]
604+
if part.text:
605+
return part.text
606+
if (
607+
part.inline_data
608+
and part.inline_data.data
609+
and part.inline_data.mime_type
610+
and part.inline_data.mime_type.startswith("text/")
611+
):
612+
return _decode_inline_text_data(part.inline_data.data)
613+
598614
content_objects = []
599-
for part in parts:
615+
for part in parts_without_thought:
616+
# Skip thought parts to prevent reasoning from being replayed in subsequent
617+
# turns. Thought parts are internal model reasoning and should not be sent
618+
# back to the model.
600619
if part.text:
601-
if len(parts) == 1:
602-
return part.text
603620
content_objects.append({
604621
"type": "text",
605622
"text": part.text,
@@ -611,8 +628,6 @@ async def _get_content(
611628
):
612629
if part.inline_data.mime_type.startswith("text/"):
613630
decoded_text = _decode_inline_text_data(part.inline_data.data)
614-
if len(parts) == 1:
615-
return decoded_text
616631
content_objects.append({
617632
"type": "text",
618633
"text": decoded_text,

src/google/adk/sessions/database_session_service.py

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -178,12 +178,9 @@ async def _prepare_tables(self):
178178
self._db_schema_version = await conn.run_sync(
179179
_schema_check_utils.get_db_schema_version_from_connection
180180
)
181-
except Exception:
182-
# If inspection fails, assume the latest schema
183-
logger.warning(
184-
"Failed to inspect database tables, assuming the latest schema."
185-
)
186-
self._db_schema_version = _schema_check_utils.LATEST_SCHEMA_VERSION
181+
except Exception as e:
182+
logger.error("Failed to inspect database tables: %s", e)
183+
raise
187184

188185
# Check if tables are created and create them if not
189186
if self._tables_created:

src/google/adk/sessions/migration/_schema_check_utils.py

Lines changed: 52 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616

1717
import logging
1818

19+
from sqlalchemy import create_engine as create_sync_engine
1920
from sqlalchemy import inspect
2021
from sqlalchemy import text
2122

@@ -38,14 +39,16 @@ def _get_schema_version_impl(inspector, connection) -> str:
3839
if result:
3940
return result[0]
4041
else:
41-
return LATEST_SCHEMA_VERSION
42+
raise ValueError(
43+
"Schema version not found in adk_internal_metadata. The database"
44+
" might be malformed."
45+
)
4246
except Exception as e:
43-
logger.warning(
44-
"Failed to query schema version from adk_internal_metadata,"
45-
" assuming the latest schema: %s.",
47+
logger.error(
48+
"Failed to query schema version from adk_internal_metadata: %s.",
4649
e,
4750
)
48-
return LATEST_SCHEMA_VERSION
51+
raise
4952
# Metadata table doesn't exist, check for v0 schema.
5053
# V0 schema has an 'events' table with an 'actions' column.
5154
if inspector.has_table("events"):
@@ -57,17 +60,57 @@ def _get_schema_version_impl(inspector, connection) -> str:
5760
" serialize event actions. The v0 schema will not be supported"
5861
" going forward and will be deprecated in a few rollouts. Please"
5962
" migrate to the v1 schema which uses JSON serialization for event"
60-
" data. The migration command and script will be provided soon."
63+
" data. You can use `adk migrate session` command to migrate your"
64+
" database."
6165
)
6266
return SCHEMA_VERSION_0_PICKLE
6367
except Exception as e:
64-
logger.warning("Failed to inspect 'events' table columns: %s", e)
65-
return LATEST_SCHEMA_VERSION
66-
# New database, assume the latest schema.
68+
logger.error("Failed to inspect 'events' table columns: %s", e)
69+
raise
70+
# New database, use the latest schema.
6771
return LATEST_SCHEMA_VERSION
6872

6973

7074
def get_db_schema_version_from_connection(connection) -> str:
7175
"""Gets DB schema version from a DB connection."""
7276
inspector = inspect(connection)
7377
return _get_schema_version_impl(inspector, connection)
78+
79+
80+
def _to_sync_url(db_url: str) -> str:
81+
"""Removes '+driver' from SQLAlchemy URL."""
82+
if "://" in db_url:
83+
scheme, _, rest = db_url.partition("://")
84+
if "+" in scheme:
85+
dialect = scheme.split("+", 1)[0]
86+
return f"{dialect}://{rest}"
87+
return db_url
88+
89+
90+
def get_db_schema_version(db_url: str) -> str:
91+
"""Reads schema version from DB.
92+
93+
Checks metadata table first and then falls back to table structure.
94+
95+
Args:
96+
db_url: The database URL.
97+
98+
Returns:
99+
The detected schema version as a string. Returns `LATEST_SCHEMA_VERSION`
100+
if it's a new database.
101+
"""
102+
engine = None
103+
try:
104+
engine = create_sync_engine(_to_sync_url(db_url))
105+
with engine.connect() as connection:
106+
inspector = inspect(connection)
107+
return _get_schema_version_impl(inspector, connection)
108+
except Exception:
109+
logger.warning(
110+
"Failed to get schema version from database %s.",
111+
db_url,
112+
)
113+
raise
114+
finally:
115+
if engine:
116+
engine.dispose()

0 commit comments

Comments
 (0)