1414import argparse
1515from datetime import datetime , timedelta , timezone
1616from pprint import pprint
17+ import time
1718
1819from google .cloud import datastore # noqa: I100
1920
@@ -63,7 +64,7 @@ def query_with_readtime(client):
6364 read_time = datetime .now (timezone .utc ) - timedelta (seconds = 15 )
6465
6566 # Fetch an entity with read_time
66- task_key = client .key (' Task' , ' sampletask' )
67+ task_key = client .key (" Task" , " sampletask" )
6768 entity = client .get (task_key , read_time = read_time )
6869
6970 # Query Task entities with read_time
@@ -77,11 +78,170 @@ def query_with_readtime(client):
7778 return results
7879
7980
81+ def count_query_in_transaction (client ):
82+ # [START datastore_count_in_transaction]
83+ task1 = datastore .Entity (client .key ("Task" , "task1" ))
84+ task2 = datastore .Entity (client .key ("Task" , "task2" ))
85+
86+ task1 ["owner" ] = "john"
87+ task2 ["owner" ] = "john"
88+
89+ tasks = [task1 , task2 ]
90+ client .put_multi (tasks )
91+
92+ with client .transaction () as transaction :
93+
94+ tasks_of_john = client .query (kind = "Task" )
95+ tasks_of_john .add_filter ("owner" , "=" , "john" )
96+ total_tasks_query = client .aggregation_query (tasks_of_john )
97+
98+ query_result = total_tasks_query .count (alias = "tasks_count" ).fetch ()
99+ for task_result in query_result :
100+ tasks_count = task_result [0 ]
101+ if tasks_count .value < 2 :
102+ task3 = datastore .Entity (client .key ("Task" , "task3" ))
103+ task3 ["owner" ] = "john"
104+ transaction .put (task3 )
105+ tasks .append (task3 )
106+ else :
107+ print (f"Found existing { tasks_count .value } tasks, rolling back" )
108+ client .entities_to_delete .extend (tasks )
109+ raise ValueError ("User 'John' cannot have more than 2 tasks" )
110+ # [END datastore_count_in_transaction]
111+
112+
113+ def count_query_on_kind (client ):
114+ # [START datastore_count_on_kind]
115+ task1 = datastore .Entity (client .key ("Task" , "task1" ))
116+ task2 = datastore .Entity (client .key ("Task" , "task2" ))
117+
118+ tasks = [task1 , task2 ]
119+ client .put_multi (tasks )
120+ all_tasks_query = client .query (kind = "Task" )
121+ all_tasks_count_query = client .aggregation_query (all_tasks_query ).count ()
122+ query_result = all_tasks_count_query .fetch ()
123+ for aggregation_results in query_result :
124+ for aggregation in aggregation_results :
125+ print (f"Total tasks (accessible from default alias) is { aggregation .value } " )
126+ # [END datastore_count_on_kind]
127+ return tasks
128+
129+
130+ def count_query_with_limit (client ):
131+ # [START datastore_count_with_limit]
132+ task1 = datastore .Entity (client .key ("Task" , "task1" ))
133+ task2 = datastore .Entity (client .key ("Task" , "task2" ))
134+ task3 = datastore .Entity (client .key ("Task" , "task3" ))
135+
136+ tasks = [task1 , task2 , task3 ]
137+ client .put_multi (tasks )
138+ all_tasks_query = client .query (kind = "Task" )
139+ all_tasks_count_query = client .aggregation_query (all_tasks_query ).count ()
140+ query_result = all_tasks_count_query .fetch (limit = 2 )
141+ for aggregation_results in query_result :
142+ for aggregation in aggregation_results :
143+ print (f"We have at least { aggregation .value } tasks" )
144+ # [END datastore_count_with_limit]
145+ return tasks
146+
147+
148+ def count_query_property_filter (client ):
149+ # [START datastore_count_with_property_filter]
150+ task1 = datastore .Entity (client .key ("Task" , "task1" ))
151+ task2 = datastore .Entity (client .key ("Task" , "task2" ))
152+ task3 = datastore .Entity (client .key ("Task" , "task3" ))
153+
154+ task1 ["done" ] = True
155+ task2 ["done" ] = False
156+ task3 ["done" ] = True
157+
158+ tasks = [task1 , task2 , task3 ]
159+ client .put_multi (tasks )
160+ completed_tasks = client .query (kind = "Task" ).add_filter ("done" , "=" , True )
161+ remaining_tasks = client .query (kind = "Task" ).add_filter ("done" , "=" , False )
162+
163+ completed_tasks_query = client .aggregation_query (query = completed_tasks ).count (
164+ alias = "total_completed_count"
165+ )
166+ remaining_tasks_query = client .aggregation_query (query = remaining_tasks ).count (
167+ alias = "total_remaining_count"
168+ )
169+
170+ completed_query_result = completed_tasks_query .fetch ()
171+ for aggregation_results in completed_query_result :
172+ for aggregation_result in aggregation_results :
173+ if aggregation_result .alias == "total_completed_count" :
174+ print (f"Total completed tasks count is { aggregation_result .value } " )
175+
176+ remaining_query_result = remaining_tasks_query .fetch ()
177+ for aggregation_results in remaining_query_result :
178+ for aggregation_result in aggregation_results :
179+ if aggregation_result .alias == "total_remaining_count" :
180+ print (f"Total remaining tasks count is { aggregation_result .value } " )
181+ # [END datastore_count_with_property_filter]
182+ return tasks
183+
184+
185+ def count_query_with_stale_read (client ):
186+
187+ tasks = [task for task in client .query (kind = "Task" ).fetch ()]
188+ client .delete_multi (tasks ) # ensure the database is empty before starting
189+
190+ # [START datastore_count_query_with_stale_read]
191+ task1 = datastore .Entity (client .key ("Task" , "task1" ))
192+ task2 = datastore .Entity (client .key ("Task" , "task2" ))
193+
194+ # Saving two tasks
195+ task1 ["done" ] = True
196+ task2 ["done" ] = False
197+ client .put_multi ([task1 , task2 ])
198+ time .sleep (10 )
199+
200+ past_timestamp = datetime .now (
201+ timezone .utc
202+ ) # we have two tasks in database at this time.
203+ time .sleep (10 )
204+
205+ # Saving third task
206+ task3 = datastore .Entity (client .key ("Task" , "task3" ))
207+ task3 ["done" ] = False
208+ client .put (task3 )
209+
210+ all_tasks = client .query (kind = "Task" )
211+ all_tasks_count = client .aggregation_query (
212+ query = all_tasks ,
213+ ).count (alias = "all_tasks_count" )
214+
215+ # Executing aggregation query
216+ query_result = all_tasks_count .fetch ()
217+ for aggregation_results in query_result :
218+ for aggregation_result in aggregation_results :
219+ print (f"Latest tasks count is { aggregation_result .value } " )
220+
221+ # Executing aggregation query with past timestamp
222+ tasks_in_past = client .aggregation_query (query = all_tasks ).count (
223+ alias = "tasks_in_past"
224+ )
225+ tasks_in_the_past_query_result = tasks_in_past .fetch (read_time = past_timestamp )
226+ for aggregation_results in tasks_in_the_past_query_result :
227+ for aggregation_result in aggregation_results :
228+ print (f"Stale tasks count is { aggregation_result .value } " )
229+ # [END datastore_count_query_with_stale_read]
230+ return [task1 , task2 , task3 ]
231+
232+
80233def main (project_id ):
81234 client = datastore .Client (project_id )
82235
83236 for name , function in globals ().items ():
84- if name in ("main" , "_preamble" , "defaultdict" , "datetime" , "timezone" , "timedelta" ) or not callable (function ):
237+ if name in (
238+ "main" ,
239+ "_preamble" ,
240+ "defaultdict" ,
241+ "datetime" ,
242+ "timezone" ,
243+ "timedelta" ,
244+ ) or not callable (function ):
85245 continue
86246
87247 print (name )
0 commit comments