@@ -63,6 +63,30 @@ def query_standard_sql(query):
63
63
print (row )
64
64
65
65
66
+ def query_destination_table (query , dest_dataset_id , dest_table_id ):
67
+ client = bigquery .Client ()
68
+ query_job = client .run_async_query (str (uuid .uuid4 ()), query )
69
+
70
+ # Allow for query results larger than the maximum response size.
71
+ query_job .allow_large_results = True
72
+
73
+ # When large results are allowed, a destination table must be set.
74
+ dest_dataset = client .dataset (dest_dataset_id )
75
+ dest_table = dest_dataset .table (dest_table_id )
76
+ query_job .destination = dest_table
77
+
78
+ # Allow the results table to be overwritten.
79
+ query_job .write_disposition = 'WRITE_TRUNCATE'
80
+
81
+ query_job .begin ()
82
+ query_job .result () # Wait for job to complete.
83
+
84
+ # Verify that the results were written to the destination table.
85
+ dest_table .reload () # Get the table metadata, such as the schema.
86
+ for row in dest_table .fetch_data ():
87
+ print (row )
88
+
89
+
66
90
if __name__ == '__main__' :
67
91
parser = argparse .ArgumentParser (
68
92
description = __doc__ ,
@@ -72,10 +96,19 @@ def query_standard_sql(query):
72
96
'--use_standard_sql' ,
73
97
action = 'store_true' ,
74
98
help = 'Use standard SQL syntax.' )
99
+ parser .add_argument (
100
+ '--destination_table' ,
101
+ type = str ,
102
+ help = (
103
+ 'Destination table to use for results. '
104
+ 'Example: my_dataset.my_table' ))
75
105
76
106
args = parser .parse_args ()
77
107
78
108
if args .use_standard_sql :
79
109
query_standard_sql (args .query )
110
+ elif args .destination_table :
111
+ dataset , table = args .destination_table .split ('.' )
112
+ query_destination_table (args .query , dataset , table )
80
113
else :
81
114
query (args .query )
0 commit comments