|
39 | 39 | from google.cloud import bigquery_storage_v1beta1
|
40 | 40 | except ImportError: # pragma: NO COVER
|
41 | 41 | bigquery_storage_v1beta1 = None
|
| 42 | +from google.cloud import bigquery |
42 | 43 | from google.cloud.bigquery import job
|
43 | 44 | from google.cloud.bigquery import table
|
44 | 45 | from google.cloud.bigquery import magics
|
@@ -336,6 +337,38 @@ def test__make_bqstorage_client_true_missing_gapic(missing_grpcio_lib):
|
336 | 337 | assert "grpcio" in str(exc_context.value)
|
337 | 338 |
|
338 | 339 |
|
| 340 | +def test__create_dataset_if_necessary_exists(): |
| 341 | + project = "project_id" |
| 342 | + dataset_id = "dataset_id" |
| 343 | + dataset_reference = bigquery.dataset.DatasetReference(project, dataset_id) |
| 344 | + dataset = bigquery.Dataset(dataset_reference) |
| 345 | + client_patch = mock.patch( |
| 346 | + "google.cloud.bigquery.magics.bigquery.Client", autospec=True |
| 347 | + ) |
| 348 | + with client_patch as client_mock: |
| 349 | + client_mock().project = project |
| 350 | + client_mock().get_dataset.result_value = dataset |
| 351 | + result = magics._create_dataset_if_necessary(client_mock(), dataset_id) |
| 352 | + client_mock().create_dataset.assert_not_called() |
| 353 | + |
| 354 | + assert result is False |
| 355 | + |
| 356 | + |
| 357 | +def test__create_dataset_if_necessary_not_exist(): |
| 358 | + project = "project_id" |
| 359 | + dataset_id = "dataset_id" |
| 360 | + client_patch = mock.patch( |
| 361 | + "google.cloud.bigquery.magics.bigquery.Client", autospec=True |
| 362 | + ) |
| 363 | + with client_patch as client_mock: |
| 364 | + client_mock().location = "us" |
| 365 | + client_mock().project = project |
| 366 | + client_mock().get_dataset.side_effect = exceptions.NotFound("dataset not found") |
| 367 | + result = magics._create_dataset_if_necessary(client_mock(), dataset_id) |
| 368 | + client_mock().create_dataset.assert_called_once() |
| 369 | + assert result is True |
| 370 | + |
| 371 | + |
339 | 372 | @pytest.mark.usefixtures("ipython_interactive")
|
340 | 373 | def test_extension_load():
|
341 | 374 | ip = IPython.get_ipython()
|
@@ -1199,3 +1232,62 @@ def test_bigquery_magic_omits_tracebacks_from_error_message():
|
1199 | 1232 | assert "400 Syntax error in SQL query" in output
|
1200 | 1233 | assert "Traceback (most recent call last)" not in output
|
1201 | 1234 | assert "Syntax error" not in captured_io.stdout
|
| 1235 | + |
| 1236 | + |
| 1237 | +@pytest.mark.usefixtures("ipython_interactive") |
| 1238 | +def test_bigquery_magic_w_destination_table_invalid_format(): |
| 1239 | + ip = IPython.get_ipython() |
| 1240 | + ip.extension_manager.load_extension("google.cloud.bigquery") |
| 1241 | + magics.context._project = None |
| 1242 | + |
| 1243 | + credentials_mock = mock.create_autospec( |
| 1244 | + google.auth.credentials.Credentials, instance=True |
| 1245 | + ) |
| 1246 | + default_patch = mock.patch( |
| 1247 | + "google.auth.default", return_value=(credentials_mock, "general-project") |
| 1248 | + ) |
| 1249 | + |
| 1250 | + client_patch = mock.patch( |
| 1251 | + "google.cloud.bigquery.magics.bigquery.Client", autospec=True |
| 1252 | + ) |
| 1253 | + |
| 1254 | + with client_patch, default_patch, pytest.raises(ValueError) as exc_context: |
| 1255 | + ip.run_cell_magic( |
| 1256 | + "bigquery", "--destination_table dataset", "SELECT foo FROM WHERE LIMIT bar" |
| 1257 | + ) |
| 1258 | + error_msg = str(exc_context.value) |
| 1259 | + assert ( |
| 1260 | + "--destination_table should be in a " |
| 1261 | + "<dataset_id>.<table_id> format." in error_msg |
| 1262 | + ) |
| 1263 | + |
| 1264 | + |
| 1265 | +@pytest.mark.usefixtures("ipython_interactive") |
| 1266 | +def test_bigquery_magic_w_destination_table(): |
| 1267 | + ip = IPython.get_ipython() |
| 1268 | + ip.extension_manager.load_extension("google.cloud.bigquery") |
| 1269 | + magics.context.credentials = mock.create_autospec( |
| 1270 | + google.auth.credentials.Credentials, instance=True |
| 1271 | + ) |
| 1272 | + |
| 1273 | + create_dataset_if_necessary_patch = mock.patch( |
| 1274 | + "google.cloud.bigquery.magics._create_dataset_if_necessary", autospec=True |
| 1275 | + ) |
| 1276 | + |
| 1277 | + run_query_patch = mock.patch( |
| 1278 | + "google.cloud.bigquery.magics._run_query", autospec=True |
| 1279 | + ) |
| 1280 | + |
| 1281 | + with create_dataset_if_necessary_patch, run_query_patch as run_query_mock: |
| 1282 | + ip.run_cell_magic( |
| 1283 | + "bigquery", |
| 1284 | + "--destination_table dataset_id.table_id", |
| 1285 | + "SELECT foo FROM WHERE LIMIT bar", |
| 1286 | + ) |
| 1287 | + |
| 1288 | + job_config_used = run_query_mock.call_args_list[0][1]["job_config"] |
| 1289 | + assert job_config_used.allow_large_results is True |
| 1290 | + assert job_config_used.create_disposition == "CREATE_IF_NEEDED" |
| 1291 | + assert job_config_used.write_disposition == "WRITE_TRUNCATE" |
| 1292 | + assert job_config_used.destination.dataset_id == "dataset_id" |
| 1293 | + assert job_config_used.destination.table_id == "table_id" |
0 commit comments