diff --git a/contributing/samples/bigquery/agent.py b/contributing/samples/bigquery/agent.py index 3cd1eb99..c1b265c0 100644 --- a/contributing/samples/bigquery/agent.py +++ b/contributing/samples/bigquery/agent.py @@ -60,7 +60,7 @@ bigquery_toolset = BigQueryToolset( # debug CLI root_agent = llm_agent.Agent( model="gemini-2.0-flash", - name="hello_agent", + name="bigquery_agent", description=( "Agent to answer questions about BigQuery data and models and execute" " SQL queries." diff --git a/contributing/samples/bigquery_agent/README.md b/contributing/samples/google_api/README.md similarity index 50% rename from contributing/samples/bigquery_agent/README.md rename to contributing/samples/google_api/README.md index c7dc7fd8..c1e6e8d4 100644 --- a/contributing/samples/bigquery_agent/README.md +++ b/contributing/samples/google_api/README.md @@ -1,45 +1,40 @@ -# BigQuery Sample +# Google API Tools Sample ## Introduction -This sample tests and demos the BigQuery support in ADK via two tools: +This sample tests and demos Google API tools available in the +`google.adk.tools.google_api_tool` module. We pick the following BigQuery API +tools for this sample agent: -* 1. bigquery_datasets_list: +1. `bigquery_datasets_list`: List user's datasets. - List user's datasets. +2. `bigquery_datasets_get`: Get a dataset's details. -* 2. bigquery_datasets_get: - Get a dataset's details. +3. `bigquery_datasets_insert`: Create a new dataset. -* 3. bigquery_datasets_insert: - Create a new dataset. +4. `bigquery_tables_list`: List all tables in a dataset. -* 4. bigquery_tables_list: - List all tables in a dataset. +5. `bigquery_tables_get`: Get a table's details. -* 5. bigquery_tables_get: - Get a table's details. - -* 6. bigquery_tables_insert: - Insert a new table into a dataset. +6. `bigquery_tables_insert`: Insert a new table into a dataset. ## How to use -* 1. Follow https://developers.google.com/identity/protocols/oauth2#1.-obtain-oauth-2.0-credentials-from-the-dynamic_data.setvar.console_name. to get your client id and client secret. +1. Follow https://developers.google.com/identity/protocols/oauth2#1.-obtain-oauth-2.0-credentials-from-the-dynamic_data.setvar.console_name. to get your client id and client secret. Be sure to choose "web" as your client type. -* 2. Configure your `.env` file to add two variables: +2. Configure your `.env` file to add two variables: * OAUTH_CLIENT_ID={your client id} * OAUTH_CLIENT_SECRET={your client secret} Note: don't create a separate `.env` file , instead put it to the same `.env` file that stores your Vertex AI or Dev ML credentials -* 3. Follow https://developers.google.com/identity/protocols/oauth2/web-server#creatingcred to add http://localhost/dev-ui/ to "Authorized redirect URIs". +3. Follow https://developers.google.com/identity/protocols/oauth2/web-server#creatingcred to add http://localhost/dev-ui/ to "Authorized redirect URIs". Note: localhost here is just a hostname that you use to access the dev ui, replace it with the actual hostname you use to access the dev ui. -* 4. For 1st run, allow popup for localhost in Chrome. +4. For 1st run, allow popup for localhost in Chrome. ## Sample prompt diff --git a/contributing/samples/bigquery_agent/__init__.py b/contributing/samples/google_api/__init__.py similarity index 100% rename from contributing/samples/bigquery_agent/__init__.py rename to contributing/samples/google_api/__init__.py diff --git a/contributing/samples/bigquery_agent/agent.py b/contributing/samples/google_api/agent.py similarity index 98% rename from contributing/samples/bigquery_agent/agent.py rename to contributing/samples/google_api/agent.py index 976cea17..1cdbab9c 100644 --- a/contributing/samples/bigquery_agent/agent.py +++ b/contributing/samples/google_api/agent.py @@ -40,7 +40,7 @@ bigquery_toolset = BigQueryToolset( root_agent = Agent( model="gemini-2.0-flash", - name="bigquery_agent", + name="google_api_bigquery_agent", instruction=""" You are a helpful Google BigQuery agent that help to manage users' data on Google BigQuery. Use the provided tools to conduct various operations on users' data in Google BigQuery.