8000 feat(generative-ai): Add more controlled generation examples (#11884) · dtest/python-docs-samples@dcba5b4 · GitHub
[go: up one dir, main page]

Skip to content

Commit dcba5b4

Browse files
authored
feat(generative-ai): Add more controlled generation examples (GoogleCloudPlatform#11884)
1 parent 372eb71 commit dcba5b4

File tree

3 files changed

+324
-4
lines changed

3 files changed

+324
-4
lines changed

generative_ai/controlled_generation/controlled_generation_test.py

Lines changed: 27 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,10 +22,35 @@
2222

2323

2424
def test_config_response_mime_type() -> None:
25-
response = response_mime_type.config_response_mime_type(PROJECT_ID)
25+
response = response_mime_type.generate_content(PROJECT_ID)
2626
assert response
2727

2828

2929
def test_config_response_schema() -> None:
30-
response = response_schema.config_response_schema(PROJECT_ID)
30+
response = response_schema.generate_content(PROJECT_ID)
31+
assert response
32+
33+
34+
def test_config_response_schema2() -> None:
35+
response = response_schema.generate_content2(PROJECT_ID)
36+
assert response
37+
38+
39+
def test_config_response_schema3() -> None< 8000 /span>:
40+
response = response_schema.generate_content3(PROJECT_ID)
41+
assert response
42+
43+
44+
def test_config_response_schema4() -> None:
45+
response = response_schema.generate_content4(PROJECT_ID)
46+
assert response
47+
48+
49+
def test_config_response_schema5() -> None:
50+
response = response_schema.generate_content5(PROJECT_ID)
51+
assert response
52+
53+
54+
def test_config_response_schema6() -> None:
55+
response = response_schema.generate_content6(PROJECT_ID)
3156
assert response

generative_ai/controlled_generation/response_mime_type.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
# limitations under the License.
1414

1515

16-
def config_response_mime_type(project_id: str) -> str:
16+
def generate_content(project_id: str) -> str:
1717
# [START generativeaionvertexai_gemini_controlled_generation_response_mime_type]
1818
import vertexai
1919

generative_ai/controlled_generation/response_schema.py

Lines changed: 296 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
# limitations under the License.
1414

1515

16-
def config_response_schema(project_id: str) -> str:
16+
def generate_content(project_id: str) -> str:
1717
# [START generativeaionvertexai_gemini_controlled_generation_response_schema]
1818
import vertexai
1919

@@ -49,3 +49,298 @@ def config_response_schema(project_id: str) -> str:
4949
# [END generativeaionvertexai_gemini_controlled_generation_response_schema]
5050

5151
return response.text
52+
53+
54+
def generate_content2(project_id: str) -> str:
55+
# [START generativeaionvertexai_gemini_controlled_generation_response_schema_2]
56+
import vertexai
57+
58+
from vertexai.generative_models import GenerationConfig, GenerativeModel
59+
60+
# TODO(developer): Update and un-comment below line
61+
# project_id = "PROJECT_ID"
62+
vertexai.init(project=project_id, location="us-central1")
63+
64+
response_schema = {
65+
"type": "ARRAY",
66+
"items": {
67+
"type": "ARRAY",
68+
"items": {
69+
"type": "OBJECT",
70+
"properties": {
71+
"rating": {"type": "INTEGER"},
72+
"flavor": {"type": "STRING"},
73+
},
74+
},
75+
},
76+
}
77+
78+
prompt = """
79+
Reviews from our social media:
80+
81+
- "Absolutely loved it! Best ice cream I've ever had." Rating: 4, Flavor: Strawberry Cheesecake
82+
- "Quite good, but a bit too sweet for my taste." Rating: 1, Flavor: Mango Tango
83+
"""
84+
85+
model = GenerativeModel("gemini-1.5-pro-001")
86+
87+
response = model.generate_content(
88+
prompt,
89+
generation_config=GenerationConfig(
90+
response_mime_type="application/json", response_schema=response_schema
91+
),
92+
)
93+
94+
print(response.text)
95+
# [END generativeaionvertexai_gemini_controlled_generation_response_schema_2]
96+
97+
return response.text
98+
99+
100+
def generate_content3(project_id: str) -> str:
101+
# [START generativeaionvertexai_gemini_controlled_generation_response_schema_3]
102+
import vertexai
103+
104+
from vertexai.generative_models import GenerationConfig, GenerativeModel
105+
106+
# TODO(developer): Update and un-comment below line
107+
# project_id = "PROJECT_ID"
108+
vertexai.init(project=project_id, location="us-central1")
109+
110+
response_schema = {
111+
"type": "OBJECT",
112+
"properties": {
113+
"forecast": {
114+
"type": "ARRAY",
115+
"items": {
116+
"type": "OBJECT",
117+
"properties": {
118+
"Forecast": {"type": "STRING"},
119+
"Humidity": {"type": "STRING"},
120+
"Temperature": {"type": "INTEGER"},
121+
"Wind Speed": {"type": "INTEGER"},
122+
},
123+
},
124+
}
125+
},
126+
}
127+
128+
prompt = """
129+
The week ahead brings a mix of weather conditions.
130+
Sunday is expected to be sunny with a temperature of 77°F and a humidity level of 50%. Winds will be light at around 10 km/h.
131+
Monday will see partly cloudy skies with a slightly cooler temperature of 72°F and humidity increasing to 55%. Winds will pick up slightly to around 15 km/h.
132+
Tuesday brings rain showers, with temperatures dropping to 64°F and humidity rising to 70%. Expect stronger winds at 20 km/h.
133+
Wednesday may see thunderstorms, with a temperature of 68°F and high humidity of 75%. Winds will be gusty at 25 km/h.
134+
Thursday will be cloudy with a temperature of 66°F and moderate humidity at 60%. Winds will ease slightly to 18 km/h.
135+
Friday returns to partly cloudy conditions, with a temperature of 73°F and lower humidity at 45%. Winds will be light at 12 km/h.
136+
Finally, Saturday rounds off the week with sunny skies, a temperature of 80°F, and a humidity level of 40%. Winds will be gentle at 8 km/h.
137+
"""
1 10000 38+
139+
model = GenerativeModel("gemini-1.5-pro-001")
140+
141+
response = model.generate_content(
142+
prompt,
143+
generation_config=GenerationConfig(
144+
response_mime_type="application/json", response_schema=response_schema
145+
),
146+
)
147+
148+
print(response.text)
149+
# [END generativeaionvertexai_gemini_controlled_generation_response_schema_3]
150+
151+
return response.text
152+
153+
154+
def generate_content4(project_id: str) -> str:
155+
# [START generativeaionvertexai_gemini_controlled_generation_response_schema_4]
156+
import vertexai
157+
158+
from vertexai.generative_models import GenerationConfig, GenerativeModel
159+
160+
# TODO(developer): Update and un-comment below line
161+
# project_id = "PROJECT_ID"
162+
vertexai.init(project=project_id, location="us-central1")
163+
164+
response_schema = {
165+
"type": "ARRAY",
166+
"items": {
167+
"type": "OBJECT",
168+
"properties": {
169+
"to_discard": {"type": "INTEGER"},
170+
"subcategory": {"type": "STRING"},
171+
"safe_handling": {"type": "INTEGER"},
172+
"item_category": {
173+
"type": "STRING",
174+
"enum": [
175+
"clothing",
176+
"winter apparel",
177+
"specialized apparel",
178+
"furniture",
179+
"decor",
180+
"tableware",
181+
"cookware",
182+
"toys",
183+
],
184+
},
185+
"for_resale": {"type": "INTEGER"},
186+
"condition": {
187+
"type": "STRING",
188+
"enum": [
189+
"new in package",
190+
"like new",
191+
"gently used",
192+
"used",
193+
"damaged",
194+
"soiled",
195+
],
196+
},
197+
},
198+
},
199+
}
200+
201+
prompt = """
202+
Item description:
203+
The item is a long winter coat that has many tears all around the seams and is falling apart.
204+
It has large questionable stains on it.
205+
"""
206+
207+
model = GenerativeModel("gemini-1.5-pro-001")
208+
209+
response = model.generate_content(
210+
prompt,
211+
generation_config=GenerationConfig(
212+
response_mime_type="application/json", response_schema=response_schema
213+
),
214+
)
215+
216+
print(response.text)
217+
# [END generativeaionvertexai_gemini_controlled_generation_response_schema_4]
218+
219+
return response.text
220+
221+
222+
def generate_content5(project_id: str) -> str:
223+
# [START generativeaionvertexai_gemini_controlled_generation_response_schema_5]
224+
import vertexai
225+
226+
from vertexai.generative_models import GenerationConfig, GenerativeModel
227+
228+
# TODO(developer): Update and un-comment below line
229+
# project_id = "PROJECT_ID"
230+
vertexai.init(project=project_id, location="us-central1")
231+
232+
response_schema = {
233+
"type": "ARRAY",
234+
"items": {
235+
"type": "OBJECT",
236+
"properties": {
237+
"Announcement_Date": {"type": "STRING", "nullable": 1},
238+
"Author(s)": {
239+
"type": "ARRAY",
240+
"nullable": 1,
241+
"items": {"type": "STRING"},
242+
},
243+
"Journal_Ref": {"type": "STRING", "nullable": 1},
244+
"Keyword(s)": {
245+
"type": "ARRAY",
246+
"nullable": 1,
247+
"items": {"type": "STRING"},
248+
},
249+
"Subject(s)": {
250+
"type": "ARRAY",
251+
"nullable": 1,
252+
"items": {"type": "STRING"},
253+
},
254+
"Submission_Date": {"type": "STRING", "nullable": 1},
255+
"Title": {"type": "STRING", "nullable": 1},
256+
"Version": {
257+
"type": "STRING",
258+
"nullable": 1,
259+
"enum": [
260+
"Dungeons & Dragons",
261+
"Duel Masters",
262+
"G.I. Joe",
263+
"Jem and The Holograms",
264+
"Littlest Pet Shop",
265+
"Magic: The Gathering",
266+
"Monopoly",
267+
"My Little Pony",
268+
"Nerf",
269+
],
270+
},
271+
},
272+
},
273+
}
274+
275+
prompt = """
276+
Hasbro stock slid 5.2% following a double-downgrade to “underperform” from “buy” at Bank of America.
277+
BofA conducted a “deep dive” on trading card game. BofA said Hasbro has been overprinting cards and
278+
destroying the long-term value of the business.
279+
"""
280+
281+
model = GenerativeModel("gemini-1.5-pro-001")
282+
283+
response = model.generate_content(
284+
prompt,
285+
generation_config=GenerationConfig(
286+
response_mime_type="application/json", response_schema=response_schema
287+
),
288+
)
289+
290+
print(response.text)
291+
# [END generativeaionvertexai_gemini_controlled_generation_response_schema_5]
292+
293+
return response.text
294+
295+
296+
def generate_content6(project_id: str) -> str:
297+
# [START generativeaionvertexai_gemini_controlled_generation_response_schema_6]
298+
import vertexai
299+
300+
from vertexai.generative_models import GenerationConfig, GenerativeModel
301+
302+
# TODO(developer): Update and un-comment below line
303+
# project_id = "PROJECT_ID"
304+
vertexai.init(project=project_id, location="us-central1")
305+
306+
response_schema = {
307+
"type": "OBJECT",
308+
"properties": {
309+
"playlist": {
310+
"type": "ARRAY",
311+
"items": {
312+
"type": "OBJECT",
313+
"properties": {
314+
"artist": {"type": "STRING"},
315+
"song": {"type": "STRING"},
316+
"era": {"type": "STRING"},
317+
"released": {"type": "INTEGER"},
318+
},
319+
},
320+
},
321+
"time_start": {"type": "STRING"},
322+
},
323+
}
324+
325+
prompt = """
326+
We have two friends of the host who have requested a few songs for us to play. We're going to start this playlist at 8:15.
327+
They'll want to hear Black Hole Sun by Soundgarden because their son was born in 1994. They will also want Loser by Beck
328+
coming right after which is a funny choice considering it's also the same year as their son was born, but that's probably
329+
just a coincidence. Add Take On Me from A-ha to the list since they were married when the song released in 1985. Their final
330+
request is Sweet Child O' Mine by Guns N Roses, which I think came out in 1987 when they both finished university.
331+
Thank you, this party should be great!
332+
"""
333+
334+
model = GenerativeModel("gemini-1.5-pro-001")
335+
336+
response = model.generate_content(
337+
prompt,
338+
generation_config=GenerationConfig(
339+
response_mime_type="application/json", response_schema=response_schema
340+
),
341+
)
342+
343+
print(response.text)
344+
# [END generativeaionvertexai_gemini_controlled_generation_response_schema_6]
345+
346+
return response.text

0 commit comments

Comments
 (0)
0