8000 bigquery: add client.list_dataset_tables (#4013) · googleapis/google-cloud-python@8ec4c21 · GitHub
[go: up one dir, main page]

Skip to content

Commit 8ec4c21

Browse files
jbatswast
authored andcommitted
bigquery: add client.list_dataset_tables (#4013)
Remove Dataset.list_tables
1 parent 6bdfb60 commit 8ec4c21

File tree

5 files changed

+195
-187
lines changed
  • bigquery
    • google/cloud/bigquery
      • < 8000 div style="width:100%;display:flex">
  • tests
  • 5 files changed

    +195
    -187
    lines changed

    bigquery/google/cloud/bigquery/client.py

    Lines changed: 54 additions & 1 deletion
    Original file line numberDiff line numberDiff line change
    @@ -23,7 +23,7 @@
    2323
    from google.cloud.bigquery._http import Connection
    2424
    from google.cloud.bigquery.dataset import Dataset
    2525
    from google.cloud.bigquery.dataset import DatasetRe 10000 ference
    26-
    from google.cloud.bigquery.dataset import Table
    26+
    from google.cloud.bigquery.table import Table
    2727
    from google.cloud.bigquery.job import CopyJob
    2828
    from google.cloud.bigquery.job import ExtractJob
    2929
    from google.cloud.bigquery.job import LoadJob
    @@ -264,6 +264,44 @@ def update_dataset(self, dataset, fields):
    264264
    method='PATCH', path=path, data=partial, headers=headers)
    265265
    return Dataset.from_api_repr(api_response, self)
    266266

    267+
    def list_dataset_tables(self, dataset, max_results=None, page_token=None):
    268+
    """List tables in the dataset.
    269+
    270+
    See
    271+
    https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/list
    272+
    273+
    :type dataset: One of:
    274+
    :class:`~google.cloud.bigquery.dataset.Dataset`
    275+
    :class:`~google.cloud.bigquery.dataset.DatasetReference`
    276+
    :param dataset: the dataset whose tables to list, or a reference to it.
    277+
    278+
    :type max_results: int
    279+
    :param max_results: (Optional) Maximum number of tables to return.
    280+
    If not passed, defaults to a value set by the API.
    281+
    282+
    :type page_token: str
    283+
    :param page_token: (Optional) Opaque marker for the next "page" of
    284+
    datasets. If not passed, the API will return the
    285+
    first page of datasets.
    286+
    287+
    :rtype: :class:`~google.api.core.page_iterator.Iterator`
    288+
    :returns: Iterator of :class:`~google.cloud.bigquery.table.Table`
    289+
    contained within the current dataset.
    290+
    """
    291+
    if not isinstance(dataset, (Dataset, DatasetReference)):
    292+
    raise TypeError('dataset must be a Dataset or a DatasetReference')
    293+
    path = '%s/tables' % dataset.path
    294+
    result = page_iterator.HTTPIterator(
    295+
    client=self,
    296+
    api_request=self._connection.api_request,
    297+
    path=path,
    298+
    item_to_value=_item_to_table,
    299+
    items_key='tables',
    300+
    page_token=page_token,
    301+
    max_results=max_results)
    302+
    result.dataset = dataset
    303+
    return result
    304+
    267305
    def delete_dataset(self, dataset):
    268306
    """Delete a dataset.
    269307
    @@ -600,3 +638,18 @@ def _item_to_job(iterator, resource):
    600638
    :returns: The next job in the page.
    601639
    """
    602640
    return iterator.client.job_from_resource(resource)
    641+
    642+
    643+
    def _item_to_table(iterator, resource):
    644+
    """Convert a JSON table to the native object.
    645+
    646+
    :type iterator: :class:`~google.api.core.page_iterator.Iterator`
    647+
    :param iterator: The iterator that is currently in use.
    648+
    649+
    :type resource: dict
    650+
    :param resource: An item to be converted to a table.
    651+
    652+
    :rtype: :class:`~google.cloud.bigquery.table.Table`
    653+
    :returns: The next table in the page.
    654+
    """
    655+
    return Table.from_api_repr(resource, iterator.client)

    bigquery/google/cloud/bigquery/dataset.py

    Lines changed: 0 additions & 49 deletions
    Original file line numberDiff line numberDiff line change
    @@ -18,9 +18,7 @@
    1818

    1919
    import six
    2020

    21-
    from google.api.core import page_iterator
    2221
    from google.cloud._helpers import _datetime_from_microseconds
    23-
    from google.cloud.bigquery.table import Table
    2422
    from google.cloud.bigquery.table import TableReference
    2523

    2624

    @@ -476,38 +474,6 @@ def _build_resource(self):
    476474

    477475
    return resource
    478476

    479-
    def list_tables(self, max_results=None, page_token=None):
    480-
    """List tables for the project associated with this client.
    481-
    482-
    See
    483-
    https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/list
    484-
    485-
    :type max_results: int
    486-
    :param max_results: (Optional) Maximum number of tables to return.
    487-
    If not passed, defaults to a value set by the API.
    488-
    489-
    :type page_token: str
    490-
    :param page_token: (Optional) Opaque marker for the next "page" of
    491-
    datasets. If not passed, the API will return the
    492-
    first page of datasets.
    493-
    494-
    :rtype: :class:`~google.api.core.page_iterator.Iterator`
    495-
    :returns: Iterator of :class:`~google.cloud.bigquery.table.Table`
    496-
    contained within the current dataset.
    497-
    """
    498-
    path = '/projects/%s/datasets/%s/tables' % (
    499-
    self.project, self.dataset_id)
    500-
    result = page_iterator.HTTPIterator(
    501-
    client=self._client,
    502-
    api_request=self._client._connection.api_request,
    503-
    path=path,
    504-
    item_to_value=_item_to_table,
    505-
    items_key='tables',
    506-
    page_token=page_token,
    507-
    max_results=max_results)
    508-
    result.dataset = self
    509-
    return result
    510-
    511477
    def table(self, table_id):
    512478
    """Constructs a TableReference.
    513479
    @@ -518,18 +484,3 @@ def table(self, table_id):
    518484
    :returns: a TableReference for a table in this dataset.
    519485
    """
    520486
    return TableReference(self, table_id)
    521-
    522-
    523-
    def _item_to_table(iterator, resource):
    524-
    """Convert a JSON table to the native object.
    525-
    526-
    :type iterator: :class:`~google.api.core.page_iterator.Iterator`
    527-
    :param iterator: The iterator that is currently in use.
    528-
    529-
    :type resource: dict
    530-
    :param resource: An item to be converted to a table.
    531-
    532-
    :rtype: :class:`~google.cloud.bigquery.table.Table`
    533-
    :returns: The next table in the page.
    534-
    """
    535-
    return Table.from_api_repr(resource, iterator.dataset)

    bigquery/tests/system.py

    Lines changed: 3 additions & 3 deletions
    Original file line numberDiff line numberDiff line change
    @@ -202,13 +202,13 @@ def test_get_table_w_public_dataset(self):
    202202
    self.assertEqual(
    203203
    schema_names, ['word', 'word_count', 'corpus', 'corpus_date'])
    204204

    205-
    def test_list_tables(self):
    205+
    def test_list_dataset_tables(self):
    206206
    DATASET_ID = _make_dataset_id('list_tables')
    207207
    dataset = retry_403(Config.CLIENT.create_dataset)(Dataset(DATASET_ID))
    208208
    self.to_delete.append(dataset)
    209209

    210210
    # Retrieve tables before any are created for the dataset.
    211-
    iterator = dataset.list_tables()
    211+
    iterator = Config.CLIENT.list_dataset_tables(dataset)
    212212
    all_tables = list(iterator)
    213213
    self.assertEqual(all_tables, [])
    214214
    self.assertIsNone(iterator.next_page_token)
    @@ -230,7 +230,7 @@ def test_list_tables(self):
    230230
    self.to_delete.insert(0, created_table)
    231231

    232232
    # Retrieve the tables.
    233-
    iterator = dataset.list_tables()
    233+
    iterator = Config.CLIENT.list_dataset_tables(dataset)
    234234
    all_tables = list(iterator)
    235235
    self.assertIsNone(iterator.next_page_token)
    236236
    created = [table for table in all_tables

    bigquery/tests/unit/test_client.py

    Lines changed: 136 additions & 0 deletions
    Original file line numberDiff line numberDiff line change
    @@ -480,6 +480,142 @@ def test_update_dataset(self):
    480480
    req = conn._requested[1]
    481481
    self.assertEqual(req['headers']['If-Match'], 'etag')
    482482

    483+
    def test_list_dataset_tables_empty(self):
    484+
    import six
    485+
    486+
    PROJECT = 'PROJECT'
    487+
    DS_ID = 'DATASET_ID'
    488+
    creds = _make_credentials()
    489+
    client = self._make_one(project=PROJECT, credentials=creds)
    490+
    conn = client._connection = _Connection({})
    491+
    492+
    dataset = client.dataset(DS_ID)
    493+
    iterator = client.list_dataset_tables(dataset)
    494+
    self.assertIs(iterator.dataset, dataset)
    495+
    page = six.next(iterator.pages)
    496+
    tables = list(page)
    497+
    token = iterator.next_page_token
    498+
    499+
    self.assertEqual(tables, [])
    500+
    self.assertIsNone(token)
    501+
    self.assertEqual(len(conn._requested), 1)
    502+
    req = conn._requested[0]
    503+
    self.assertEqual(req['method'], 'GET')
    504+
    PATH = 'projects/%s/datasets/%s/tables' % (PROJECT, DS_ID)
    505+
    self.assertEqual(req['path'], '/%s' % PATH)
    506+
    507+
    def test_list_dataset_tables_defaults(self):
    508+
    import six
    509+
    from google.cloud.bigquery.table import Table
    510+
    511+
    PROJECT = 'PROJECT'
    512+
    DS_ID = 'DATASET_ID'
    513+
    TABLE_1 = 'table_one'
    514+
    TABLE_2 = 'table_two'
    515+
    PATH = 'projects/%s/datasets/%s/tables' % (PROJECT, DS_ID)
    516+
    TOKEN = 'TOKEN'
    517+
    DATA = {
    518+
    'nextPageToken': TOKEN,
    519+
    'tables': [
    520+
    {'kind': 'bigquery#table',
    521+
    'id': '%s:%s.%s' % (PROJECT, DS_ID, TABLE_1),
    522+
    'tableReference': {'tableId': TABLE_1,
    523+
    'datasetId': DS_ID,
    524+
    'projectId': PROJECT},
    525+
    'type': 'TABLE'},
    526+
    {'kind': 'bigquery#table',
    527+
    'id': '%s:%s.%s' % (PROJECT, DS_ID, TABLE_2),
    528+
    'tableReference': {'tableId': TABLE_2,
    529+
    'datasetId': DS_ID,
    530+
    'projectId': PROJECT},
    531+
    'type': 'TABLE'},
    532+
    ]
    533+
    }
    534+
    535+
    creds = _make_credentials()
    536+
    client = self._make_one(project=PROJECT, credentials=creds)
    537+
    conn = client._connection = _Connection(DATA)
    538+
    dataset = client.dataset(DS_ID)
    539+
    540+
    iterator = client.list_dataset_tables(dataset)
    541+
    self.assertIs(iterator.dataset, dataset)
    542+
    page = six.next(iterator.pages)
    543+
    tables = list(page)
    544+
    token = iterator.next_page_token
    545+
    546+
    self.assertEqual(len(tables), len(DATA['tables']))
    547+
    for found, expected in zip(tables, DATA['tables']):
    548+
    self.assertIsInstance(found, Table)
    549+
    self.assertEqual(found.full_table_id, expected['id'])
    550+
    self.assertEqual(found.table_type, expected['type'])
    551+
    self.assertEqual(token, TOKEN)
    552+
    553+
    self.assertEqual(len(conn._requested), 1)
    554+
    req = conn._requested[0]
    555+
    self.assertEqual(req['method'], 'GET')
    556+
    self.assertEqual(req['path'], '/%s' % PATH)
    557+
    558+
    def test_list_dataset_tables_explicit(self):
    559+
    import six
    560+
    from google.cloud.bigquery.table import Table
    561+
    562+
    PROJECT = 'PROJECT'
    563+
    DS_ID = 'DATASET_ID'
    564+
    TABLE_1 = 'table_one'
    565+
    TABLE_2 = 'table_two'
    566+
    PATH = 'projects/%s/datasets/%s/tables' % (PROJECT, DS_ID)
    567+
    TOKEN = 'TOKEN'
    568+
    DATA = {
    569+
    'tables': [
    570+
    {'kind': 'bigquery#dataset',
    571+
    'id': '%s:%s.%s' % (PROJECT, DS_ID, TABLE_1),
    572+
    'tableReference': {'tableId': TABLE_1,
    573+
    'datasetId': DS_ID,
    574+
    'projectId': PROJECT},
    575+
    'type': 'TABLE'},
    576+
    {'kind': 'bigquery#dataset',
    577+
    'id': '%s:%s.%s' % (PROJECT, DS_ID, TABLE_2),
    578+
    'tableReference': {'tableId': TABLE_2,
    579+
    'datasetId': DS_ID,
    580+
    'projectId': PROJECT},
    581+
    'type': 'TABLE'},
    582+
    ]
    583+
    }
    584+
    585+
    creds = _make_credentials()
    586+
    client = self._make_one(project=PROJECT, credentials=creds)
    587+
    conn = client._connection = _Connection(DATA)
    588+
    dataset = client.dataset(DS_ID)
    589+
    590+
    iterator = client.list_dataset_tables(
    591+
    dataset, max_results=3, page_token=TOKEN)
    592+
    self.assertIs(iterator.dataset, dataset)
    593+
    page = six.next(iterator.pages)
    594+
    tables = list(page)
    595+
    token = iterator.next_page_token
    596+
    597+
    self.assertEqual(len(tables), len(DATA['tables']))
    598+
    for found, expected in zip(tables, DATA['tables']):
    599+
    self.assertIsInstance(found, Table)
    600+
    self.assertEqual(found.full_table_id, expected['id'])
    601+
    self.assertEqual(found.table_type, expected['type'])
    602+
    self.assertIsNone(token)
    603+
    604+
    self.assertEqual(len(conn._requested), 1)
    605+
    req = conn._requested[0]
    606+
    self.assertEqual(req['method'], 'GET')
    607+
    self.assertEqual(req['path'], '/%s' % PATH)
    608+
    self.assertEqual(req['query_params'],
    609+
    {'maxResults': 3, 'pageToken': TOKEN})
    610+
    611+
    def test_list_dataset_tables_wrong_type(self):
    612+
    PROJECT = 'PROJECT'
    613+
    DS_ID = 'DATASET_ID'
    614+
    creds = _make_credentials()
    615+
    client = self._make_one(project=PROJECT, credentials=creds)
    616+
    with self.assertRaises(TypeError):
    617+
    client.list_dataset_tables(client.dataset(DS_ID).table("foo"))
    618+
    483619
    def test_delete_dataset(self):
    484620
    from google.cloud.bigquery.dataset import Dataset
    485621

    0 commit comments

    Comments
     (0)
    0