8000 samples: Add snippets for sum and avg (#480) · googleapis/python-datastore@4f91b51 · GitHub
[go: up one dir, main page]

Skip to content

Commit 4f91b51

Browse files
authored
samples: Add snippets for sum and avg (#480)
1 parent 9f3c374 commit 4f91b51

File tree

2 files changed

+254
-0
lines changed

2 files changed

+254
-0
lines changed

samples/snippets/snippets.py

Lines changed: 159 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -230,6 +230,165 @@ def count_query_with_stale_read(client):
230230
return [task1, task2, task3]
231231

232232

233+
def sum_query_on_kind(client):
234+
# [START datastore_sum_aggregation_query_on_kind]
235+
# Set up sample entities
236+
# Use incomplete key to auto-generate ID
237+
task1 = datastore.Entity(client.key("Task"))
238+
task2 = datastore.Entity(client.key("Task"))
239+
task3 = datastore.Entity(client.key("Task"))
240+
241+
task1["hours"] = 5
242+
task2["hours"] = 3
243+
task3["hours"] = 1
244+
245+
tasks = [task1, task2, task3]
246+
client.put_multi(tasks)
247+
248+
# Execute sum aggregation query
249+
all_tasks_query = client.query(kind="Task")
250+
all_tasks_sum_query = client.aggregation_query(all_tasks_query).sum("hours")
251+
query_result = all_tasks_sum_query.fetch()
252+
for aggregation_results in query_result:
253+
for aggregation in aggregation_results:
254+
print(f"Total sum of hours in tasks is {aggregation.value}")
255+
# [END datastore_sum_aggregation_query_on_kind]
256+
return tasks
257+
258+
259+
def sum_query_property_filter(client):
260+
# [START datastore_sum_aggregation_query_with_filters]
261+
# Set up sample entities
262+
# Use incomplete key to auto-generate ID
263+
task1 = datastore.Entity(client.key("Task"))
264+
task2 = datastore.Entity(client.key("Task"))
265+
task3 = datastore.Entity(client.key("Task"))
266+
267+
task1["hours"] = 5
268+
task2["hours"] = 3
269+
task3["hours"] = 1
270+
271+
task1["done"] = True
272+
task2["done"] = True
273+
task3["done"] = False
274+
275+
tasks = [task1, task2, task3]
276+
client.put_multi(tasks)
277+
278+
# Execute sum aggregation query with filters
279+
completed_tasks = client.query(kind="Task").add_filter("done", "=", True)
280+
completed_tasks_query = client.aggregation_query(query=completed_tasks).sum(
281+
property_ref="hours",
282+
alias="total_completed_sum_hours"
283+
)
284+
285+
completed_query_result = completed_tasks_query.fetch()
286+
for aggregation_results in completed_query_result:
287+
for aggregation_result in aggregation_results:
288+
if aggregation_result.alias == "total_completed_sum_hours":
289+
print(f"Total sum of hours in completed tasks is {aggregation_result.value}")
290+
# [END datastore_sum_aggregation_query_with_filters]
291+
return tasks
292+
293+
294+
def avg_query_on_kind(client):
295+
# [START datastore_avg_aggregation_query_on_kind]
296+
# Set up sample entities
297+
# Use incomplete key to auto-generate ID
298+
task1 = datastore.Entity(client.key("Task"))
299+
task2 = datastore.Entity(client.key("Task"))
300+
task3 = datastore.Entity(client.key("Task"))
301+
302+
task1["hours"] = 5
303+
task2["hours"] = 3
304+
task3["hours"] = 1
305+
306+
tasks = [task1, task2, task3]
307+
client.put_multi(tasks)
308+
309+
# Execute average aggregation query
310+
all_tasks_query = client.query(kind="Task")
311+
all_tasks_avg_query = client.aggregation_query(all_tasks_query).avg("hours")
312+
query_result = all_tasks_avg_query.fetch()
313+
for aggregation_results in query_result:
314+
for aggregation in aggregation_results:
315+
print(f"Total average of hours in tasks is {aggregation.value}")
316+
# [END datastore_avg_aggregation_query_on_kind]
317+
return tasks
318+
319+
320+
def avg_query_property_filter(client):
321+
# [START datastore_avg_aggregation_query_with_filters]
322+
# Set up sample entities
323+
# Use incomplete key to auto-generate ID
324+
task1 = datastore.Entity(client.key("Task"))
325+
task2 = datastore.Entity(client.key("Task"))
326+
task3 = datastore.Entity(client.key("Task"))
327+
328+
task1["hours"] = 5
329+
task2["hours"] = 3
330+
task3["hours"] = 1
331+
332+
task1["done"] = True
333+
task2["done"] = True
334+
task3["done"] = False
335+
336+
tasks = [task1, task2, task3]
337+
client.put_multi(tasks)
338+
339+
# Execute average aggregation query with filters
340+
completed_tasks = client.query(kind="Task").add_filter("done", "=", True)
341+
completed_tasks_query = client.aggregation_query(query=completed_tasks).avg(
342+
property_ref="hours",
343+
alias="total_completed_avg_hours"
344+
)
345+
346+
completed_query_result = completed_tasks_query.fetch()
347+
for aggregation_results in completed_query_result:
348+
for aggregation_result in aggregation_results:
349+
if aggregation_result.alias == "total_completed_avg_hours":
350+
print(f"Total average of hours in completed tasks is {aggregation_result.value}")
351+
# [END datastore_avg_aggregation_query_with_filters]
352+
return tasks
353+
354+
355+
def multiple_aggregations_query(client):
356+
# [START datastore_multiple_aggregation_in_structured_query]
357+
# Set up sample entities
358+
# Use incomplete key to auto-generate ID
359+
task1 = datastore.Entity(client.key("Task"))
360+
task2 = datastore.Entity(client.key("Task"))
361+
task3 = datastore.Entity(client.key("Task"))
362+
363+
task1["hours"] = 5
364+
task2["hours"] = 3
365+
task3["hours"] = 1
366+
367+
tasks = [task1, task2, task3]
368+
client.put_multi(tasks)
369+
370+
# Execute query with multiple aggregations
371+
all_tasks_query = client.query(kind="Task")
372+
aggregation_query = client.aggregation_query(all_tasks_query)
373+
# Add aggregations
374+
aggregation_query.add_aggregations(
375+
[
376+
datastore.aggregation.CountAggregation(alias="count_aggregation"),
377+
datastore.aggregation.SumAggregation(
378+
property_ref="hours", alias="sum_aggregation"),
379+
datastore.aggregation.AvgAggregation(
380+
property_ref="hours", alias="avg_aggregation")
381+
]
382+
)
383+
384+
query_result = aggregation_query.fetch()
385+
for aggregation_results in query_result:
386+
for aggregation in aggregation_results:
387+
print(f"{aggregation.alias} value is {aggregation.value}")
388+
# [END datastore_multiple_aggregation_in_structured_query]
389+
return tasks
390+
391+
233392
def main(project_id):
234393
client = datastore.Client(project_id)
235394

samples/snippets/snippets_test.py

Lines changed: 95 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,9 @@
1414
import os
1515

1616
import backoff
17+
import google.api_core.exceptions
1718
from google.cloud import datastore
19+
from google.cloud import datastore_admin_v1
1820
import pytest
1921

2022
import snippets
@@ -43,6 +45,38 @@ def client():
4345
client.cleanup()
4446

4547

48+
@pytest.fixture(scope="session", autouse=True)
49+
def setup_indexes(request):
50+
# Set up required indexes
51+
admin_client = datastore_admin_v1.DatastoreAdminClient()
52+
53+
indexes = []
54+
done_property_index = datastore_admin_v1.Index.IndexedProperty(
55+
name='done',
56+
direction=datastore_admin_v1.Index.Direction.ASCENDING
57+
)
58+
hour_property_index = datastore_admin_v1.Index.IndexedProperty(
59+
name='hours',
60+
direction=datastore_admin_v1.Index.Direction.ASCENDING
61+
)
62+
done_hour_index = datastore_admin_v1.Index(
63+
kind='Task',
64+
ancestor=datastore_admin_v1.Index.AncestorMode.NONE,
65+
properties=[done_property_index, hour_property_index]
66+
)
67+
indexes.append(done_hour_index)
68+
69+
for index in indexes:
70+
request = datastore_admin_v1.CreateIndexRequest(project_id=PROJECT, index=index)
71+
# Create the required index
72+
# Dependant tests will fail until the index is ready
73+
try:
74+
admin_client.create_index(request)
75+
# Pass if the index already exists
76+
except (google.api_core.exceptions.AlreadyExists):
77+
pass
78+
79+
4680
@pytest.mark.flaky
4781
class TestDatastoreSnippets:
4882
# These tests mostly just test the absence of exceptions.
@@ -118,3 +152,64 @@ def test_count_query_with_stale_read(self, capsys, client):
118152
assert captured.err == ""
119153

120154
client.entities_to_delete.extend(tasks)
155+
156+
@backoff.on_exception(backoff.expo, AssertionError, max_time=240)
157+
def test_sum_query_on_kind(self, capsys, client):
158+
tasks = snippets.sum_query_on_kind(client)
159+
captured = capsys.readouterr()
160+
assert (
161+
captured.out.strip() == "Total sum of hours in tasks is 9"
162+
)
163+
assert captured.err == ""
164+
165+
client.entities_to_delete.extend(tasks)
166+
167+
@backoff.on_exception(backoff.expo, AssertionError, max_time=240)
168+
def test_sum_query_property_filter(self, capsys, client):
169+
tasks = snippets.sum_query_property_filter(client)
170+
captured = capsys.readouterr()
171+
assert (
172+
captured.out.strip() == "Total sum of hours in completed tasks is 8"
173+
)
174+
assert captured.err == ""
175+
176+
client.entities_to_delete.extend(tasks)
177+
178+
@backoff.on_exception(backoff.expo, AssertionError, max_time=240)
179+
def test_avg_query_on_kind(self, capsys, client):
180+
tasks = snippets.avg_query_on_kind(client)
181+
captured = capsys.readouterr()
182+
assert (
183+
captured.out.strip() == "Total average of hours in tasks is 3.0"
184+
)
185+
assert captured.err == ""
186+
187+
client.entities_to_delete.extend(tasks)
188+
189+
@backoff.on_exception(backoff.expo, AssertionError, max_time=240)
190+
def test_avg_query_property_filter(self, capsys, client):
191+
tasks = snippets.avg_query_property_filter(client)
192+
captured = capsys.readouterr()
193+
assert (
194+
captured.out.strip() == "Total average of hours in completed tasks is 4.0"
195+
)
196+
assert captured.err == ""
197+
198+
client.entities_to_delete.extend(tasks)
199+
200+
@backoff.on_exception(backoff.expo, AssertionError, max_time=240)
201+
def test_multiple_aggregations_query(self, capsys, client):
202+
tasks = snippets.multiple_aggregations_query(client)
203+
captured = capsys.readouterr()
204+
assert (
205+
'avg_aggregation value is 3.0' in captured.out
206+
)
207+
assert (
208+
'count_aggregation value is 3' in captured.out
209+
)
210+
assert (
211+
'sum_aggregation value is 9' in captured.out
212+
)
213+
assert captured.err == ""
214+
215+
client.entities_to_delete.extend(tasks)

0 commit comments

Comments
 (0)
0