Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
79 changes: 60 additions & 19 deletions google/genai/_live_converters.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,10 @@ def _FunctionDeclaration_to_vertex(
)

if getv(from_object, ['behavior']) is not None:
raise ValueError('behavior parameter is not supported in Vertex AI.')
raise ValueError(
'behavior parameter is not supported in Gemini Enterprise Agent'
' Platform.'
)

return to_object

Expand Down Expand Up @@ -317,7 +320,8 @@ def _GenerationConfig_to_vertex(

if getv(from_object, ['enable_enhanced_civic_answers']) is not None:
raise ValueError(
'enable_enhanced_civic_answers parameter is not supported in Vertex AI.'
'enable_enhanced_civic_answers parameter is not supported in Gemini'
' Enterprise Agent Platform.'
)

return to_object
Expand Down Expand Up @@ -548,7 +552,8 @@ def _LiveClientRealtimeInput_to_vertex(

if getv(from_object, ['audio_stream_end']) is not None:
raise ValueError(
'audio_stream_end parameter is not supported in Vertex AI.'
'audio_stream_end parameter is not supported in Gemini Enterprise Agent'
' Platform.'
)

if getv(from_object, ['video']) is not None:
Expand Down Expand Up @@ -738,7 +743,10 @@ def _LiveClientSetup_to_vertex(
)

if getv(from_object, ['history_config']) is not None:
raise ValueError('history_config parameter is not supported in Vertex AI.')
raise ValueError(
'history_config parameter is not supported in Gemini Enterprise Agent'
' Platform.'
)

if getv(from_object, ['avatar_config']) is not None:
setv(to_object, ['avatarConfig'], getv(from_object, ['avatar_config']))
Expand Down Expand Up @@ -1094,7 +1102,10 @@ def _LiveConnectConfig_to_vertex(
)

if getv(from_object, ['history_config']) is not None:
raise ValueError('history_config parameter is not supported in Vertex AI.')
raise ValueError(
'history_config parameter is not supported in Gemini Enterprise Agent'
' Platform.'
)

if getv(from_object, ['avatar_config']) is not None:
setv(
Expand Down Expand Up @@ -1169,19 +1180,26 @@ def _LiveMusicClientMessage_to_vertex(
) -> dict[str, Any]:
to_object: dict[str, Any] = {}
if getv(from_object, ['setup']) is not None:
raise ValueError('setup parameter is not supported in Vertex AI.')
raise ValueError(
'setup parameter is not supported in Gemini Enterprise Agent Platform.'
)

if getv(from_object, ['client_content']) is not None:
raise ValueError('client_content parameter is not supported in Vertex AI.')
raise ValueError(
'client_content parameter is not supported in Gemini Enterprise Agent'
' Platform.'
)

if getv(from_object, ['music_generation_config']) is not None:
raise ValueError(
'music_generation_config parameter is not supported in Vertex AI.'
'music_generation_config parameter is not supported in Gemini'
' Enterprise Agent Platform.'
)

if getv(from_object, ['playback_control']) is not None:
raise ValueError(
'playback_control parameter is not supported in Vertex AI.'
'playback_control parameter is not supported in Gemini Enterprise Agent'
' Platform.'
)

return to_object
Expand All @@ -1204,7 +1222,9 @@ def _LiveMusicConnectParameters_to_vertex(
) -> dict[str, Any]:
to_object: dict[str, Any] = {}
if getv(from_object, ['model']) is not None:
raise ValueError('model parameter is not supported in Vertex AI.')
raise ValueError(
'model parameter is not supported in Gemini Enterprise Agent Platform.'
)

return to_object

Expand All @@ -1231,7 +1251,8 @@ def _LiveMusicSetConfigParameters_to_vertex(
to_object: dict[str, Any] = {}
if getv(from_object, ['music_generation_config']) is not None:
raise ValueError(
'music_generation_config parameter is not supported in Vertex AI.'
'music_generation_config parameter is not supported in Gemini'
' Enterprise Agent Platform.'
)

return to_object
Expand Down Expand Up @@ -1259,7 +1280,8 @@ def _LiveMusicSetWeightedPromptsParameters_to_vertex(
to_object: dict[str, Any] = {}
if getv(from_object, ['weighted_prompts']) is not None:
raise ValueError(
'weighted_prompts parameter is not supported in Vertex AI.'
'weighted_prompts parameter is not supported in Gemini Enterprise Agent'
' Platform.'
)

return to_object
Expand Down Expand Up @@ -1601,13 +1623,22 @@ def _Part_to_vertex(
setv(to_object, ['videoMetadata'], getv(from_object, ['video_metadata']))

if getv(from_object, ['tool_call']) is not None:
raise ValueError('tool_call parameter is not supported in Vertex AI.')
raise ValueError(
'tool_call parameter is not supported in Gemini Enterprise Agent'
' Platform.'
)

if getv(from_object, ['tool_response']) is not None:
raise ValueError('tool_response parameter is not supported in Vertex AI.')
raise ValueError(
'tool_response parameter is not supported in Gemini Enterprise Agent'
' Platform.'
)

if getv(from_object, ['part_metadata']) is not None:
raise ValueError('part_metadata parameter is not supported in Vertex AI.')
raise ValueError(
'part_metadata parameter is not supported in Gemini Enterprise Agent'
' Platform.'
)

return to_object

Expand All @@ -1628,11 +1659,15 @@ def _ReplicatedVoiceConfig_to_vertex(
)

if getv(from_object, ['consent_audio']) is not None:
raise ValueError('consent_audio parameter is not supported in Vertex AI.')
raise ValueError(
'consent_audio parameter is not supported in Gemini Enterprise Agent'
' Platform.'
)

if getv(from_object, ['voice_consent_signature']) is not None:
raise ValueError(
'voice_consent_signature parameter is not supported in Vertex AI.'
'voice_consent_signature parameter is not supported in Gemini'
' Enterprise Agent Platform.'
)

return to_object
Expand Down Expand Up @@ -1794,7 +1829,10 @@ def _Tool_to_vertex(
setv(to_object, ['computerUse'], getv(from_object, ['computer_use']))

if getv(from_object, ['file_search']) is not None:
raise ValueError('file_search parameter is not supported in Vertex AI.')
raise ValueError(
'file_search parameter is not supported in Gemini Enterprise Agent'
' Platform.'
)

if getv(from_object, ['google_search']) is not None:
setv(to_object, ['googleSearch'], getv(from_object, ['google_search']))
Expand Down Expand Up @@ -1840,7 +1878,10 @@ def _Tool_to_vertex(
setv(to_object, ['urlContext'], getv(from_object, ['url_context']))

if getv(from_object, ['mcp_servers']) is not None:
raise ValueError('mcp_servers parameter is not supported in Vertex AI.')
raise ValueError(
'mcp_servers parameter is not supported in Gemini Enterprise Agent'
' Platform.'
)

return to_object

Expand Down
4 changes: 3 additions & 1 deletion google/genai/_tokens_converters.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,9 @@ def _CreateAuthTokenParameters_to_vertex(
) -> dict[str, Any]:
to_object: dict[str, Any] = {}
if getv(from_object, ['config']) is not None:
raise ValueError('config parameter is not supported in Vertex AI.')
raise ValueError(
'config parameter is not supported in Gemini Enterprise Agent Platform.'
)

return to_object

Expand Down
4 changes: 2 additions & 2 deletions google/genai/_transformers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1025,8 +1025,8 @@ def t_batch_job_source(
if client.vertexai:
if mldev_sources or vertex_sources != 1:
raise ValueError(
'Exactly one of `gcs_uri`, `bigquery_uri`, or `vertex_dataset_name` must be set, other '
'sources are not supported in Vertex AI.'
'Exactly one of `gcs_uri` or `bigquery_uri`, or `vertex_dataset_name` must be set, other '
'sources are not supported in Gemini Enterprise Agent Platform.'
)
else:
if vertex_sources or mldev_sources != 1:
Expand Down
63 changes: 42 additions & 21 deletions google/genai/batches.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,17 +165,21 @@ def _BatchJobDestination_to_vertex(
)

if getv(from_object, ['file_name']) is not None:
raise ValueError('file_name parameter is not supported in Vertex AI.')
raise ValueError(
'file_name parameter is not supported in Gemini Enterprise Agent'
' Platform.'
)

if getv(from_object, ['inlined_responses']) is not None:
raise ValueError(
'inlined_responses parameter is not supported in Vertex AI.'
'inlined_responses parameter is not supported in Gemini Enterprise'
' Agent Platform.'
)

if getv(from_object, ['inlined_embed_content_responses']) is not None:
raise ValueError(
'inlined_embed_content_responses parameter is not supported in'
' Vertex AI.'
'inlined_embed_content_responses parameter is not supported in Gemini'
' Enterprise Agent Platform.'
)

if getv(from_object, ['vertex_dataset']) is not None:
Expand Down Expand Up @@ -276,11 +280,15 @@ def _BatchJobSource_to_vertex(
)

if getv(from_object, ['file_name']) is not None:
raise ValueError('file_name parameter is not supported in Vertex AI.')
raise ValueError(
'file_name parameter is not supported in Gemini Enterprise Agent'
' Platform.'
)

if getv(from_object, ['inlined_requests']) is not None:
raise ValueError(
'inlined_requests parameter is not supported in Vertex AI.'
'inlined_requests parameter is not supported in Gemini Enterprise Agent'
' Platform.'
)

if getv(from_object, ['vertex_dataset_name']) is not None:
Expand Down Expand Up @@ -595,7 +603,10 @@ def _CreateBatchJobConfig_to_vertex(
)

if getv(from_object, ['webhook_config']) is not None:
raise ValueError('webhook_config parameter is not supported in Vertex AI.')
raise ValueError(
'webhook_config parameter is not supported in Gemini Enterprise Agent'
' Platform.'
)

return to_object

Expand Down Expand Up @@ -1847,8 +1858,9 @@ def get(
Args:
name (str): A fully-qualified BatchJob resource name or ID.
Example: "projects/.../locations/.../batchPredictionJobs/456" or "456"
when project and location are initialized in the Vertex AI client. Or
"batches/abc" using the Gemini Developer AI client.
when project and location are initialized in the Gemini Enterprise
Agent Platform client. Or "batches/abc" using the Gemini Developer AI
client.

Returns:
A BatchJob object that contains details about the batch job.
Expand Down Expand Up @@ -2184,10 +2196,11 @@ def create(

Args:
model (str): The model to use for the batch job.
src: The source of the batch job. Currently Vertex AI supports GCS URI(-s)
or BigQuery URI. Example: "gs://path/to/input/data" or
"bq://projectId.bqDatasetId.bqTableId". Gemini Developer API supports
List of inlined_request, or file name. Example: "files/file_name".
src: The source of the batch job. Currently Gemini Enterprise Agent
Platform supports GCS URI(-s) or BigQuery URI. Example:
"gs://path/to/input/data" or "bq://projectId.bqDatasetId.bqTableId".
Gemini Developer API supports List of inlined_request, or file name.
Example: "files/file_name".
config (CreateBatchJobConfig): Optional configuration for the batch job.

Returns:
Expand Down Expand Up @@ -2264,7 +2277,10 @@ def create_embeddings(
)

if self._api_client.vertexai:
raise ValueError('Vertex AI does not support batches.create_embeddings.')
raise ValueError(
'Gemini Enterprise Agent Platform (previously known as Vertex AI)'
' does not support batches.create_embeddings.'
)
else:
return self._create_embeddings(model=model, src=src, config=config)

Expand Down Expand Up @@ -2465,8 +2481,9 @@ async def get(
Args:
name (str): A fully-qualified BatchJob resource name or ID.
Example: "projects/.../locations/.../batchPredictionJobs/456" or "456"
when project and location are initialized in the Vertex AI client. Or
"batches/abc" using the Gemini Developer AI client.
when project and location are initialized in the Gemini Enterprise
Agent Platform client. Or "batches/abc" using the Gemini Developer AI
client.

Returns:
A BatchJob object that contains details about the batch job.
Expand Down Expand Up @@ -2808,10 +2825,11 @@ async def create(

Args:
model (str): The model to use for the batch job.
src: The source of the batch job. Currently Vertex AI supports GCS URI(-s)
or BigQuery URI. Example: "gs://path/to/input/data" or
"bq://projectId.bqDatasetId.bqTableId". Gemini Develop API supports List
of inlined_request, or file name. Example: "files/file_name".
src: The source of the batch job. Currently Gemini Enterprise Agent
Platform supports GCS URI(-s) or BigQuery URI. Example:
"gs://path/to/input/data" or "bq://projectId.bqDatasetId.bqTableId".
Gemini Develop API supports List of inlined_request, or file name.
Example: "files/file_name".
config (CreateBatchJobConfig): Optional configuration for the batch job.

Returns:
Expand Down Expand Up @@ -2894,7 +2912,10 @@ async def create_embeddings(
http_options = parameter_model.config.http_options

if self._api_client.vertexai:
raise ValueError('Vertex AI does not support batches.create_embeddings.')
raise ValueError(
'Gemini Enterprise Agent Platform (previously known as Vertex AI)'
' does not support batches.create_embeddings.'
)
else:
return await self._create_embeddings(model=model, src=src, config=config)

Expand Down
Loading
Loading