You've already forked adk-python
mirror of
https://github.com/encounter/adk-python.git
synced 2026-03-30 10:57:20 -07:00
chore: Update default and example Gemini models to new versions
Changes references from `gemini-1.5-flash` and `gemini-1.5-pro` to `gemini-2.5-flash` and `gemini-2.5-pro` in docstrings, default values, sample agents, and tests. PiperOrigin-RevId: 805536434
This commit is contained in:
committed by
Copybara-Service
parent
3c433b7168
commit
894bec794e
@@ -62,7 +62,7 @@ bigtable_toolset = BigtableToolset(
|
||||
# The variable name `root_agent` determines what your root agent is for the
|
||||
# debug CLI
|
||||
root_agent = LlmAgent(
|
||||
model="gemini-1.5-flash",
|
||||
model="gemini-2.5-flash",
|
||||
name="bigtable_agent",
|
||||
description=(
|
||||
"Agent to answer questions about Bigtable database tables and"
|
||||
|
||||
@@ -32,7 +32,7 @@ class BaseLlm(BaseModel):
|
||||
"""The BaseLLM class.
|
||||
|
||||
Attributes:
|
||||
model: The name of the LLM, e.g. gemini-1.5-flash or gemini-1.5-flash-001.
|
||||
model: The name of the LLM, e.g. gemini-2.5-flash or gemini-2.5-pro.
|
||||
"""
|
||||
|
||||
model_config = ConfigDict(
|
||||
@@ -42,7 +42,7 @@ class BaseLlm(BaseModel):
|
||||
"""The pydantic model config."""
|
||||
|
||||
model: str
|
||||
"""The name of the LLM, e.g. gemini-1.5-flash or gemini-1.5-flash-001."""
|
||||
"""The name of the LLM, e.g. gemini-2.5-flash or gemini-2.5-pro."""
|
||||
|
||||
@classmethod
|
||||
def supported_models(cls) -> list[str]:
|
||||
|
||||
@@ -58,7 +58,7 @@ class Gemini(BaseLlm):
|
||||
model: The name of the Gemini model.
|
||||
"""
|
||||
|
||||
model: str = 'gemini-1.5-flash'
|
||||
model: str = 'gemini-2.5-flash'
|
||||
|
||||
retry_options: Optional[types.HttpRetryOptions] = None
|
||||
"""Allow Gemini to retry failed responses.
|
||||
|
||||
@@ -683,7 +683,7 @@ def _is_litellm_gemini_model(model_string: str) -> bool:
|
||||
|
||||
Args:
|
||||
model_string: A LiteLLM model string (e.g., "gemini/gemini-2.5-pro" or
|
||||
"vertex_ai/gemini-1.5-flash")
|
||||
"vertex_ai/gemini-2.5-flash")
|
||||
|
||||
Returns:
|
||||
True if it's a Gemini model accessed via LiteLLM, False otherwise
|
||||
|
||||
Reference in New Issue
Block a user