Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions google/genai/_extra_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,9 +141,12 @@ def find_afc_incompatible_tool_indexes(
return incompatible_tools_indexes

for index, tool in enumerate(config_model.tools):
if isinstance(tool, types.Tool) and tool.function_declarations:
if not isinstance(tool, types.Tool):
continue
if tool.function_declarations:
incompatible_tools_indexes.append(index)
if tool.mcp_servers:
incompatible_tools_indexes.append(index)

return incompatible_tools_indexes


Expand Down
10 changes: 10 additions & 0 deletions google/genai/_live_converters.py
Original file line number Diff line number Diff line change
Expand Up @@ -1397,6 +1397,13 @@ def _Tool_to_mldev(
if getv(from_object, ['url_context']) is not None:
setv(to_object, ['urlContext'], getv(from_object, ['url_context']))

if getv(from_object, ['mcp_servers']) is not None:
setv(
to_object,
['mcpServers'],
[item for item in getv(from_object, ['mcp_servers'])],
)

return to_object


Expand Down Expand Up @@ -1450,6 +1457,9 @@ def _Tool_to_vertex(
if getv(from_object, ['url_context']) is not None:
setv(to_object, ['urlContext'], getv(from_object, ['url_context']))

if getv(from_object, ['mcp_servers']) is not None:
raise ValueError('mcp_servers parameter is not supported in Vertex AI.')

return to_object


Expand Down
7 changes: 7 additions & 0 deletions google/genai/_tokens_converters.py
Original file line number Diff line number Diff line change
Expand Up @@ -522,4 +522,11 @@ def _Tool_to_mldev(
if getv(from_object, ['url_context']) is not None:
setv(to_object, ['urlContext'], getv(from_object, ['url_context']))

if getv(from_object, ['mcp_servers']) is not None:
setv(
to_object,
['mcpServers'],
[item for item in getv(from_object, ['mcp_servers'])],
)

return to_object
7 changes: 7 additions & 0 deletions google/genai/batches.py
Original file line number Diff line number Diff line change
Expand Up @@ -1498,6 +1498,13 @@ def _Tool_to_mldev(
if getv(from_object, ['url_context']) is not None:
setv(to_object, ['urlContext'], getv(from_object, ['url_context']))

if getv(from_object, ['mcp_servers']) is not None:
setv(
to_object,
['mcpServers'],
[item for item in getv(from_object, ['mcp_servers'])],
)

return to_object


Expand Down
10 changes: 10 additions & 0 deletions google/genai/caches.py
Original file line number Diff line number Diff line change
Expand Up @@ -704,6 +704,13 @@ def _Tool_to_mldev(
if getv(from_object, ['url_context']) is not None:
setv(to_object, ['urlContext'], getv(from_object, ['url_context']))

if getv(from_object, ['mcp_servers']) is not None:
setv(
to_object,
['mcpServers'],
[item for item in getv(from_object, ['mcp_servers'])],
)

return to_object


Expand Down Expand Up @@ -757,6 +764,9 @@ def _Tool_to_vertex(
if getv(from_object, ['url_context']) is not None:
setv(to_object, ['urlContext'], getv(from_object, ['url_context']))

if getv(from_object, ['mcp_servers']) is not None:
raise ValueError('mcp_servers parameter is not supported in Vertex AI.')

return to_object


Expand Down
16 changes: 13 additions & 3 deletions google/genai/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -3770,6 +3770,13 @@ def _Tool_to_mldev(
if getv(from_object, ['url_context']) is not None:
setv(to_object, ['urlContext'], getv(from_object, ['url_context']))

if getv(from_object, ['mcp_servers']) is not None:
setv(
to_object,
['mcpServers'],
[item for item in getv(from_object, ['mcp_servers'])],
)

return to_object


Expand Down Expand Up @@ -3824,6 +3831,9 @@ def _Tool_to_vertex(
if getv(from_object, ['url_context']) is not None:
setv(to_object, ['urlContext'], getv(from_object, ['url_context']))

if getv(from_object, ['mcp_servers']) is not None:
raise ValueError('mcp_servers parameter is not supported in Vertex AI.')

return to_object


Expand Down Expand Up @@ -5600,7 +5610,7 @@ def generate_content(
'Tools at indices [%s] are not compatible with automatic function '
'calling (AFC). AFC is disabled. If AFC is intended, please '
'include python callables in the tool list, and do not include '
'function declaration in the tool list.',
'function declaration and MCP server in the tool list.',
indices_str,
)
return self._generate_content(
Expand Down Expand Up @@ -7469,7 +7479,7 @@ async def generate_content(
'Tools at indices [%s] are not compatible with automatic function '
'calling (AFC). AFC is disabled. If AFC is intended, please '
'include python callables in the tool list, and do not include '
'function declaration in the tool list.',
'function declaration and MCP server in the tool list.',
indices_str,
)
return await self._generate_content(
Expand Down Expand Up @@ -7633,7 +7643,7 @@ async def base_async_generator(model, contents, config): # type: ignore[no-unty
'Tools at indices [%s] are not compatible with automatic function '
'calling (AFC). AFC is disabled. If AFC is intended, please '
'include python callables in the tool list, and do not include '
'function declaration in the tool list.',
'function declaration and MCP server in the tool list.',
indices_str,
)
response = await self._generate_content_stream(
Expand Down
32 changes: 30 additions & 2 deletions google/genai/tests/afc/test_find_afc_incompatible_tool_indexes.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ def test_empty_tools_list_returns_empty_list():

def test_all_compatible_tools_returns_empty_list_with_empty_fd():
"""Verifies that an empty list is returned when all tools are compatible.

A tool is compatible if it's not a `types.Tool` or if its
`function_declarations` attribute is empty or None from config.
"""
Expand Down Expand Up @@ -118,6 +119,7 @@ def test_all_compatible_tools_returns_empty_list_with_empty_fd():

def test_all_compatible_tools_returns_empty_list_with_none_fd():
"""Verifies that an empty list is returned when all tools are compatible.

A tool is compatible if it's not a `types.Tool` or if its
`function_declarations` attribute is empty or None from config.
"""
Expand Down Expand Up @@ -153,6 +155,7 @@ def test_all_compatible_tools_returns_empty_list_with_none_fd():

def test_all_compatible_tools_returns_empty_list():
"""Verifies that an empty list is returned when all tools are compatible.

A tool is compatible if it's not a `types.Tool` or if its
`function_declarations` attribute is empty or None from config.
"""
Expand Down Expand Up @@ -211,8 +214,7 @@ def test_single_incompatible_tool():


def test_multiple_incompatible_tools():
"""Verifies that all correct indexes are returned for multiple incompatible
tools. """
"""Verifies correct indexes are returned for multiple incompatible tools."""
result = find_afc_incompatible_tool_indexes(
config=types.GenerateContentConfig(
tools=[
Expand All @@ -238,3 +240,29 @@ def test_multiple_incompatible_tools():
)
)
assert result == [2, 5]

def test_mcp_tool_incompatible():
"""Verifies correct indexes are returned for multiple incompatible tools."""
result = find_afc_incompatible_tool_indexes(
config=types.GenerateContentConfig(
tools=[
types.Tool(
google_search_retrieval=types.GoogleSearchRetrieval()
),
types.Tool(retrieval=types.Retrieval()),
types.Tool(
function_declarations=[
types.FunctionDeclaration(name='test_function')
]
),
types.Tool(code_execution=types.ToolCodeExecution()),

get_weather_tool,
mcp_to_genai_tool_adapter,
types.Tool(
mcp_servers=[types.McpServer(name='test_mcp_server')]
),
]
)
)
assert result == [2, 6]
103 changes: 100 additions & 3 deletions google/genai/tests/chats/test_send_message.py
Original file line number Diff line number Diff line change
Expand Up @@ -768,9 +768,8 @@ def test_mcp_tools(client):
)
],},
)
response = chat.send_message('What is the weather in Boston?');
response = chat.send_message('What is the weather in San Francisco?');

response = chat.send_message('What is the weather in Boston?')
response = chat.send_message('What is the weather in San Francisco?')


def test_mcp_tools_stream(client):
Expand Down Expand Up @@ -842,3 +841,101 @@ async def test_async_mcp_tools_stream(client):
'What is the weather in San Francisco?'
):
pass


def test_server_side_mcp_tools(client):
with pytest_helper.exception_if_vertex(client, ValueError):
chat = client.chats.create(
model='gemini-2.5-flash',
config={
'tools': [
{
'mcp_servers': [
{
'name': 'weather_server',
'streamable_http_transport': {
'url': (
'https://gemini-api-demos.uc.r.appspot.com/mcp'
),
'headers': {
'AUTHORIZATION': 'Bearer github_pat_XXXX',
},
'timeout': '10s',
},
},
],
},
],
},
)
response = chat.send_message('What is the weather in Boston on 02/02/2026?')
response = chat.send_message(
'What is the weather in San Francisco on 02/02/2026?'
)


def test_server_side_mcp_tools_stream(client):
with pytest_helper.exception_if_vertex(client, ValueError):
chat = client.chats.create(
model='gemini-2.5-flash',
config={
'tools': [
{
'mcp_servers': [
{
'name': 'weather_server',
'streamable_http_transport': {
'url': (
'https://gemini-api-demos.uc.r.appspot.com/mcp'
),
'headers': {
'AUTHORIZATION': 'Bearer github_pat_XXXX',
},
'timeout': '10s',
},
},
],
},
],
},
)
for chunk in chat.send_message_stream(
'What is the weather in Boston on 02/02/2026?'
):
pass
for chunk in chat.send_message_stream(
'What is the weather in San Francisco on 02/02/2026?'
):
pass


@pytest.mark.asyncio
async def test_async_server_side_mcp_tools(client):
with pytest_helper.exception_if_vertex(client, ValueError):
chat = client.aio.chats.create(
model='gemini-2.5-flash',
config={
'tools': [
{
'mcp_servers': [
{
'name': 'weather_server',
'streamable_http_transport': {
'url': (
'https://gemini-api-demos.uc.r.appspot.com/mcp'
),
'headers': {
'AUTHORIZATION': 'Bearer github_pat_XXXX',
},
'timeout': '10s',
},
},
],
},
],
},
)
await chat.send_message('What is the weather in Boston on 02/02/2026?')
await chat.send_message(
'What is the weather in San Francisco on 02/02/2026?'
)
65 changes: 65 additions & 0 deletions google/genai/tests/models/test_generate_content_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -1759,3 +1759,68 @@ async def test_function_declaration_with_callable_async_stream(client):
},
):
pass

def test_server_side_mcp_only(client):
"""Test server side mcp, happy path."""
with pytest_helper.exception_if_vertex(client, ValueError):
response = client.models.generate_content(
model='gemini-2.5-pro',
contents=('What is the weather like in New York (NY) on 02/02/2026?'),
config=types.GenerateContentConfig(
tools=[types.Tool(
mcp_servers=[types.McpServer(
name='get_weather',
streamable_http_transport=types.StreamableHttpTransport(
url='https://gemini-api-demos.uc.r.appspot.com/mcp',
headers={'AUTHORIZATION': 'Bearer github_pat_XXXX'},
),
)]
)]
)
)
assert response.text

@pytest.mark.asyncio
async def test_server_side_mcp_only_async(client):
"""Test server side mcp, happy path."""
with pytest_helper.exception_if_vertex(client, ValueError):
response = await client.aio.models.generate_content(
model='gemini-2.5-flash',
contents=(
'What is the weather like in New York on 02/02/2026?'
),
config=types.GenerateContentConfig(
tools=[types.Tool(
mcp_servers=[types.McpServer(
name='get_weather',
streamable_http_transport=types.StreamableHttpTransport(
url='https://gemini-api-demos.uc.r.appspot.com/mcp',
headers={'AUTHORIZATION': 'Bearer github_pat_XXXX'},
),
)]

)]
)
)
assert response.text

def test_server_side_mcp_only_stream(client):
"""Test server side mcp, happy path."""
with pytest_helper.exception_if_vertex(client, ValueError):
response = client.models.generate_content_stream(
model='gemini-2.5-pro',
contents=('What is the weather like in New York (NY) on 02/02/2026?'),
config=types.GenerateContentConfig(
tools=[types.Tool(
mcp_servers=[types.McpServer(
name='get_weather',
streamable_http_transport=types.StreamableHttpTransport(
url='https://gemini-api-demos.uc.r.appspot.com/mcp',
headers={'AUTHORIZATION': 'Bearer github_pat_XXXX'},
),
)]
)]
)
)
for chunk in response:
pass
Loading