Skip to content

Commit 16ebde7

Browse files
feat(genai): model and SDK updates for 2025 May (#13381)
* feat(genai): update to use latest SDK version * feat(genai): update to use global endpoint as location * fix(genai): copyright year All genai samples are created only in 2025. * feat(genai): update to use gemini 2.5 flash * fix(genai): lint error * fix(genai): error * fix(genai): lint error * fix(genai): No global endpoint support for Model optimiser * fix(genai): rollback model updates for function calling
1 parent 1013b37 commit 16ebde7

File tree

67 files changed

+83
-82
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

67 files changed

+83
-82
lines changed
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.7.0
1+
google-genai==1.16.1

genai/bounding_box/boundingbox_with_txt_img.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ def plot_bounding_boxes(image_uri: str, bounding_boxes: list[BoundingBox]) -> No
9494
image_uri = "https://storage.googleapis.com/generativeai-downloads/images/socks.jpg"
9595

9696
response = client.models.generate_content(
97-
model="gemini-2.0-flash-001",
97+
model="gemini-2.5-flash-preview-05-20",
9898
contents=[
9999
Part.from_uri(
100100
file_uri=image_uri,

genai/bounding_box/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
1-
google-genai==1.7.0
1+
google-genai==1.16.1
22
pillow==11.1.0

genai/bounding_box/test_bounding_box_examples.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
import boundingbox_with_txt_img
2222

2323
os.environ["GOOGLE_GENAI_USE_VERTEXAI"] = "True"
24-
os.environ["GOOGLE_CLOUD_LOCATION"] = "us-central1"
24+
os.environ["GOOGLE_CLOUD_LOCATION"] = "global" # "us-central1"
2525
# The project name is included in the CICD pipeline
2626
# os.environ['GOOGLE_CLOUD_PROJECT'] = "add-your-project-name"
2727

genai/content_cache/contentcache_create_with_txt_gcs_pdf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def create_content_cache() -> str:
4242
]
4343

4444
content_cache = client.caches.create(
45-
model="gemini-2.0-flash-001",
45+
model="gemini-2.5-flash-preview-05-20",
4646
config=CreateCachedContentConfig(
4747
contents=contents,
4848
system_instruction=system_instruction,

genai/content_cache/contentcache_use_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ def generate_content(cache_name: str) -> str:
2323
# Use content cache to generate text response
2424
# E.g cache_name = 'projects/111111111111/locations/us-central1/cachedContents/1111111111111111111'
2525
response = client.models.generate_content(
26-
model="gemini-2.0-flash-001",
26+
model="gemini-2.5-flash-preview-05-20",
2727
contents="Summarize the pdfs",
2828
config=GenerateContentConfig(
2929
cached_content=cache_name,

genai/content_cache/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.7.0
1+
google-genai==1.16.1

genai/controlled_generation/ctrlgen_with_class_schema.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ class Recipe(BaseModel):
2626

2727
client = genai.Client(http_options=HttpOptions(api_version="v1"))
2828
response = client.models.generate_content(
29-
model="gemini-2.0-flash-001",
29+
model="gemini-2.5-flash-preview-05-20",
3030
contents="List a few popular cookie recipes.",
3131
config=GenerateContentConfig(
3232
response_mime_type="application/json",

genai/controlled_generation/ctrlgen_with_enum_class_schema.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ class InstrumentClass(enum.Enum):
2929

3030
client = genai.Client(http_options=HttpOptions(api_version="v1"))
3131
response = client.models.generate_content(
32-
model="gemini-2.0-flash-001",
32+
model="gemini-2.5-flash-preview-05-20",
3333
contents="What type of instrument is a guitar?",
3434
config={
3535
"response_mime_type": "text/x.enum",

genai/controlled_generation/ctrlgen_with_enum_schema.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def generate_content() -> str:
2020

2121
client = genai.Client(http_options=HttpOptions(api_version="v1"))
2222
response = client.models.generate_content(
23-
model="gemini-2.0-flash-001",
23+
model="gemini-2.5-flash-preview-05-20",
2424
contents="What type of instrument is an oboe?",
2525
config=GenerateContentConfig(
2626
response_mime_type="text/x.enum",

genai/controlled_generation/ctrlgen_with_nested_class_schema.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ class Recipe(BaseModel):
3636

3737
client = genai.Client(http_options=HttpOptions(api_version="v1"))
3838
response = client.models.generate_content(
39-
model="gemini-2.0-flash-001",
39+
model="gemini-2.5-flash-preview-05-20",
4040
contents="List about 10 home-baked cookies and give them grades based on tastiness.",
4141
config=GenerateContentConfig(
4242
response_mime_type="application/json",

genai/controlled_generation/ctrlgen_with_nullable_schema.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ def generate_content() -> str:
5151

5252
client = genai.Client(http_options=HttpOptions(api_version="v1"))
5353
response = client.models.generate_content(
54-
model="gemini-2.0-flash-001",
54+
model="gemini-2.5-flash-preview-05-20",
5555
contents=prompt,
5656
config=GenerateContentConfig(
5757
response_mime_type="application/json",

genai/controlled_generation/ctrlgen_with_resp_schema.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ def generate_content() -> str:
3636

3737
client = genai.Client(http_options=HttpOptions(api_version="v1"))
3838
response = client.models.generate_content(
39-
model="gemini-2.0-flash-001",
39+
model="gemini-2.5-flash-preview-05-20",
4040
contents=prompt,
4141
config={
4242
"response_mime_type": "application/json",
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.7.0
1+
google-genai==1.16.1

genai/controlled_generation/test_controlled_generation_examples.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
import ctrlgen_with_resp_schema
2727

2828
os.environ["GOOGLE_GENAI_USE_VERTEXAI"] = "True"
29-
os.environ["GOOGLE_CLOUD_LOCATION"] = "us-central1"
29+
os.environ["GOOGLE_CLOUD_LOCATION"] = "global" # "us-central1"
3030
# The project name is included in the CICD pipeline
3131
# os.environ['GOOGLE_CLOUD_PROJECT'] = "add-your-project-name"
3232

genai/count_tokens/counttoken_compute_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def compute_tokens_example() -> int:
2020

2121
client = genai.Client(http_options=HttpOptions(api_version="v1"))
2222
response = client.models.compute_tokens(
23-
model="gemini-2.0-flash-001",
23+
model="gemini-2.5-flash-preview-05-20",
2424
contents="What's the longest word in the English language?",
2525
)
2626

genai/count_tokens/counttoken_resp_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def count_tokens_example() -> int:
2424

2525
# Send text to Gemini
2626
response = client.models.generate_content(
27-
model="gemini-2.0-flash-001", contents=prompt
27+
model="gemini-2.5-flash-preview-05-20", contents=prompt
2828
)
2929

3030
# Prompt and response tokens count

genai/count_tokens/counttoken_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def count_tokens() -> int:
2020

2121
client = genai.Client(http_options=HttpOptions(api_version="v1"))
2222
response = client.models.count_tokens(
23-
model="gemini-2.0-flash-001",
23+
model="gemini-2.5-flash-preview-05-20",
2424
contents="What's the highest mountain in Africa?",
2525
)
2626
print(response)

genai/count_tokens/counttoken_with_txt_vid.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ def count_tokens() -> int:
2929
]
3030

3131
response = client.models.count_tokens(
32-
model="gemini-2.0-flash-001",
32+
model="gemini-2.5-flash-preview-05-20",
3333
contents=contents,
3434
)
3535
print(response)

genai/count_tokens/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.7.0
1+
google-genai==1.16.1

genai/count_tokens/test_count_tokens_examples.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
import counttoken_with_txt_vid
2525

2626
os.environ["GOOGLE_GENAI_USE_VERTEXAI"] = "True"
27-
os.environ["GOOGLE_CLOUD_LOCATION"] = "us-central1"
27+
os.environ["GOOGLE_CLOUD_LOCATION"] = "global" # "us-central1"
2828
# The project name is included in the CICD pipeline
2929
# os.environ['GOOGLE_CLOUD_PROJECT'] = "add-your-project-name"
3030

genai/embeddings/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.7.0
1+
google-genai==1.16.1

genai/express_mode/api_key_example.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ def generate_content() -> str:
2323
client = genai.Client(vertexai=True, api_key=API_KEY)
2424

2525
response = client.models.generate_content(
26-
model="gemini-2.0-flash-001",
26+
model="gemini-2.5-flash-preview-05-20",
2727
contents="Explain bubble sort to me.",
2828
)
2929

genai/express_mode/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.7.0
1+
google-genai==1.16.1

genai/express_mode/test_express_mode_examples.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ def test_api_key_example(mock_genai_client: MagicMock) -> None:
4040

4141
mock_genai_client.assert_called_once_with(vertexai=True, api_key="YOUR_API_KEY")
4242
mock_genai_client.return_value.models.generate_content.assert_called_once_with(
43-
model="gemini-2.0-flash-001",
43+
model="gemini-2.5-flash-preview-05-20",
4444
contents="Explain bubble sort to me.",
4545
)
4646
assert response == "This is a mocked bubble sort explanation."
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
1-
google-genai==1.11.0
1+
google-genai==1.16.1
22
pillow==11.1.0

genai/live/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.10.0
1+
google-genai==1.16.1

genai/provisioned_throughput/provisionedthroughput_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ def generate_content() -> str:
3131
)
3232
)
3333
response = client.models.generate_content(
34-
model="gemini-2.0-flash-001",
34+
model="gemini-2.5-flash-preview-05-20",
3535
contents="How does AI work?",
3636
)
3737
print(response.text)
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.7.0
1+
google-genai==1.16.1

genai/safety/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.7.0
1+
google-genai==1.16.1

genai/safety/safety_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ def generate_content() -> GenerateContentResponse:
5454
]
5555

5656
response = client.models.generate_content(
57-
model="gemini-2.0-flash-001",
57+
model="gemini-2.5-flash-preview-05-20",
5858
contents=prompt,
5959
config=GenerateContentConfig(
6060
system_instruction=system_instruction,

genai/safety/test_safety_examples.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222

2323

2424
os.environ["GOOGLE_GENAI_USE_VERTEXAI"] = "True"
25-
os.environ["GOOGLE_CLOUD_LOCATION"] = "us-central1"
25+
os.environ["GOOGLE_CLOUD_LOCATION"] = "global" # "us-central1"
2626
# The project name is included in the CICD pipeline
2727
# os.environ['GOOGLE_CLOUD_PROJECT'] = "add-your-project-name"
2828

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.7.0
1+
google-genai==1.16.1

genai/template_folder/templatefolder_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright 2024 Google LLC
1+
# Copyright 2025 Google LLC
22
#
33
# Licensed under the Apache License, Version 2.0 (the "License");
44
# you may not use this file except in compliance with the License.

genai/template_folder/test_templatefolder_examples.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright 2024 Google LLC
1+
# Copyright 2025 Google LLC
22
#
33
# Licensed under the Apache License, Version 2.0 (the "License");
44
# you may not use this file except in compliance with the License.
@@ -16,7 +16,7 @@
1616
import templatefolder_with_txt
1717

1818
os.environ["GOOGLE_GENAI_USE_VERTEXAI"] = "True"
19-
os.environ["GOOGLE_CLOUD_LOCATION"] = "us-central1"
19+
os.environ["GOOGLE_CLOUD_LOCATION"] = "global" # "us-central1"
2020
# The project name is included in the CICD pipeline
2121
# os.environ['GOOGLE_CLOUD_PROJECT'] = "add-your-project-name"
2222

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.12.1
1+
google-genai==1.16.1

genai/text_generation/test_text_generation_examples.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@
3939
import thinking_textgen_with_txt
4040

4141
os.environ["GOOGLE_GENAI_USE_VERTEXAI"] = "True"
42-
os.environ["GOOGLE_CLOUD_LOCATION"] = "us-central1"
42+
os.environ["GOOGLE_CLOUD_LOCATION"] = "global" # "us-central1"
4343
# The project name is included in the CICD pipeline
4444
# os.environ['GOOGLE_CLOUD_PROJECT'] = "add-your-project-name"
4545

@@ -138,5 +138,7 @@ def test_textgen_with_youtube_video() -> None:
138138

139139

140140
def test_model_optimizer_textgen_with_txt() -> None:
141+
os.environ["GOOGLE_CLOUD_LOCATION"] = "us-central1"
141142
response = model_optimizer_textgen_with_txt.generate_content()
143+
os.environ["GOOGLE_CLOUD_LOCATION"] = "global" # "us-central1"
142144
assert response

genai/text_generation/textgen_async_with_txt.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright 2024 Google LLC
1+
# Copyright 2025 Google LLC
22
#
33
# Licensed under the Apache License, Version 2.0 (the "License");
44
# you may not use this file except in compliance with the License.
@@ -21,7 +21,7 @@ async def generate_content() -> str:
2121
from google.genai.types import GenerateContentConfig, HttpOptions
2222

2323
client = genai.Client(http_options=HttpOptions(api_version="v1"))
24-
model_id = "gemini-2.0-flash-001"
24+
model_id = "gemini-2.5-flash-preview-05-20"
2525

2626
response = await client.aio.models.generate_content(
2727
model=model_id,

genai/text_generation/textgen_chat_stream_with_txt.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -13,25 +13,23 @@
1313
# limitations under the License.
1414

1515

16-
def generate_content() -> str:
16+
def generate_content() -> bool:
1717
# [START googlegenaisdk_textgen_chat_stream_with_txt]
1818
from google import genai
1919
from google.genai.types import HttpOptions
2020

2121
client = genai.Client(http_options=HttpOptions(api_version="v1"))
22-
chat_session = client.chats.create(model="gemini-2.0-flash-001")
23-
response_text = ""
22+
chat_session = client.chats.create(model="gemini-2.5-flash-preview-05-20")
2423

2524
for chunk in chat_session.send_message_stream("Why is the sky blue?"):
2625
print(chunk.text, end="")
27-
response_text += chunk.text
2826
# Example response:
2927
# The
3028
# sky appears blue due to a phenomenon called **Rayleigh scattering**. Here's
3129
# a breakdown of why:
3230
# ...
3331
# [END googlegenaisdk_textgen_chat_stream_with_txt]
34-
return response_text
32+
return True
3533

3634

3735
if __name__ == "__main__":

genai/text_generation/textgen_chat_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def generate_content() -> str:
2020

2121
client = genai.Client(http_options=HttpOptions(api_version="v1"))
2222
chat_session = client.chats.create(
23-
model="gemini-2.0-flash-001",
23+
model="gemini-2.5-flash-preview-05-20",
2424
history=[
2525
UserContent(parts=[Part(text="Hello")]),
2626
ModelContent(

genai/text_generation/textgen_config_with_txt.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,10 @@ def generate_content() -> str:
2020

2121
client = genai.Client(http_options=HttpOptions(api_version="v1"))
2222
response = client.models.generate_content(
23-
model="gemini-2.0-flash-001",
23+
model="gemini-2.5-flash-preview-05-20",
2424
contents="Why is the sky blue?",
25-
# See the documentation: https://googleapis.github.io/python-genai/genai.html#genai.types.GenerateContentConfig
25+
# See the SDK documentation at
26+
# https://googleapis.github.io/python-genai/genai.html#genai.types.GenerateContentConfig
2627
config=GenerateContentConfig(
2728
temperature=0,
2829
candidate_count=1,

genai/text_generation/textgen_sys_instr_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def generate_content() -> str:
2020

2121
client = genai.Client(http_options=HttpOptions(api_version="v1"))
2222
response = client.models.generate_content(
23-
model="gemini-2.0-flash-001",
23+
model="gemini-2.5-flash-preview-05-20",
2424
contents="Why is the sky blue?",
2525
config=GenerateContentConfig(
2626
system_instruction=[

genai/text_generation/textgen_transcript_with_gcs_audio.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def generate_content() -> str:
2424
Use speaker A, speaker B, etc. to identify speakers.
2525
"""
2626
response = client.models.generate_content(
27-
model="gemini-2.0-flash-001",
27+
model="gemini-2.5-flash-preview-05-20",
2828
contents=[
2929
prompt,
3030
Part.from_uri(

genai/text_generation/textgen_with_gcs_audio.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ def generate_content() -> str:
2323
Provide a concise summary of the main points in the audio file.
2424
"""
2525
response = client.models.generate_content(
26-
model="gemini-2.0-flash-001",
26+
model="gemini-2.5-flash-preview-05-20",
2727
contents=[
2828
prompt,
2929
Part.from_uri(

genai/text_generation/textgen_with_local_video.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright 2024 Google LLC
1+
# Copyright 2025 Google LLC
22
#
33
# Licensed under the Apache License, Version 2.0 (the "License");
44
# you may not use this file except in compliance with the License.
@@ -19,7 +19,7 @@ def generate_content() -> str:
1919
from google.genai.types import HttpOptions, Part
2020

2121
client = genai.Client(http_options=HttpOptions(api_version="v1"))
22-
model_id = "gemini-2.0-flash-001"
22+
model_id = "gemini-2.5-flash-preview-05-20"
2323

2424
# Read local video file content
2525
with open("test_data/describe_video_content.mp4", "rb") as fp:
@@ -29,6 +29,7 @@ def generate_content() -> str:
2929
response = client.models.generate_content(
3030
model=model_id,
3131
contents=[
32+
Part.from_text(text="hello-world"),
3233
Part.from_bytes(data=video_content, mime_type="video/mp4"),
3334
"Write a short and engaging blog post based on this video.",
3435
],

genai/text_generation/textgen_with_multi_img.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ def generate_content() -> str:
2828
local_file_img_bytes = f.read()
2929

3030
response = client.models.generate_content(
31-
model="gemini-2.0-flash-001",
31+
model="gemini-2.5-flash-preview-05-20",
3232
contents=[
3333
"Generate a list of all the objects contained in both images.",
3434
Part.from_uri(file_uri=gcs_file_img_path, mime_type="image/jpeg"),

0 commit comments

Comments
 (0)