Skip to content

Commit

Permalink
BigQuery: Updates Table constructor to use TableReference as parameter (
Browse files Browse the repository at this point in the history
#3997)

* wip update Table contructor

* BigQuery: Updates Table constructor to use TableReference as parameter

* fixes circular import error with Python 2.7
  • Loading branch information
alixhami committed Sep 19, 2017
1 parent 8847511 commit bb4bc6c
Show file tree
Hide file tree
Showing 6 changed files with 282 additions and 207 deletions.
2 changes: 2 additions & 0 deletions bigquery/google/cloud/bigquery/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@

"""Client for interacting with the Google BigQuery API."""

from __future__ import absolute_import

from google.api.core import page_iterator
from google.cloud.client import ClientWithProject
from google.cloud.bigquery._http import Connection
Expand Down
6 changes: 5 additions & 1 deletion bigquery/google/cloud/bigquery/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,9 @@
# limitations under the License.

"""Define API Datasets."""

from __future__ import absolute_import

import six

from google.api.core import page_iterator
Expand Down Expand Up @@ -598,7 +601,8 @@ def table(self, name, schema=()):
:rtype: :class:`google.cloud.bigquery.table.Table`
:returns: a new ``Table`` instance
"""
return Table(name, dataset=self, schema=schema)
table_ref = TableReference(self, name)
return Table(table_ref, schema=schema, client=self._client)


def _item_to_table(iterator, resource):
Expand Down
13 changes: 9 additions & 4 deletions bigquery/google/cloud/bigquery/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from google.cloud.bigquery.dataset import Dataset
from google.cloud.bigquery.schema import SchemaField
from google.cloud.bigquery.table import Table
from google.cloud.bigquery.table import TableReference
from google.cloud.bigquery.table import _build_schema_resource
from google.cloud.bigquery.table import _parse_schema_resource
from google.cloud.bigquery._helpers import ArrayQueryParameter
Expand Down Expand Up @@ -837,7 +838,8 @@ def from_api_repr(cls, resource, client):
job_id, config = cls._get_resource_config(resource)
dest_config = config['destinationTable']
dataset = Dataset(dest_config['datasetId'], client)
destination = Table(dest_config['tableId'], dataset)
table_ref = TableReference(dataset, dest_config['tableId'])
destination = Table(table_ref, client=client)
source_urls = config.get('sourceUris', ())
job = cls(job_id, destination, source_urls, client=client)
job._set_properties(resource)
Expand Down Expand Up @@ -952,7 +954,8 @@ def from_api_repr(cls, resource, client):
job_id, config = cls._get_resource_config(resource)
dest_config = config['destinationTable']
dataset = Dataset(dest_config['datasetId'], client)
destination = Table(dest_config['tableId'], dataset)
table_ref = TableReference(dataset, dest_config['tableId'])
destination = Table(table_ref, client=client)
sources = []
source_configs = config.get('sourceTables')
if source_configs is None:
Expand All @@ -963,7 +966,8 @@ def from_api_repr(cls, resource, client):
source_configs = [single]
for source_config in source_configs:
dataset = Dataset(source_config['datasetId'], client)
sources.append(Table(source_config['tableId'], dataset))
table_ref = TableReference(dataset, source_config['tableId'])
sources.append(Table(table_ref, client=client))
job = cls(job_id, destination, sources, client=client)
job._set_properties(resource)
return job
Expand Down Expand Up @@ -1109,7 +1113,8 @@ def from_api_repr(cls, resource, client):
job_id, config = cls._get_resource_config(resource)
source_config = config['sourceTable']
dataset = Dataset(source_config['datasetId'], client)
source = Table(source_config['tableId'], dataset)
table_ref = TableReference(dataset, source_config['tableId'])
source = Table(table_ref, client=client)
destination_uris = config['destinationUris']
job = cls(job_id, source, destination_uris, client=client)
job._set_properties(resource)
Expand Down
28 changes: 17 additions & 11 deletions bigquery/google/cloud/bigquery/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@

"""Define API Datasets."""

from __future__ import absolute_import

import datetime
import os

Expand Down Expand Up @@ -90,24 +92,22 @@ class Table(object):
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/tables
:type table_id: str
:param table_id: the ID of the table
:type dataset: :class:`google.cloud.bigquery.dataset.Dataset`
:param dataset: The dataset which contains the table.
:type table_ref: :class:`google.cloud.bigquery.table.TableReference`
:param table_ref: a pointer to a table
:type schema: list of :class:`SchemaField`
:param schema: The table's schema
"""

_schema = None

def __init__(self, table_id, dataset, schema=()):
self._table_id = table_id
self._dataset = dataset
def __init__(self, table_ref, schema=(), client=None):
self._table_id = table_ref.table_id
self._dataset = table_ref.dataset
self._properties = {}
# Let the @property do validation.
self.schema = schema
self._client = client

@property
def project(self):
Expand Down Expand Up @@ -477,7 +477,7 @@ def list_partitions(self, client=None):
return [row[0] for row in query.rows]

@classmethod
def from_api_repr(cls, resource, dataset):
def from_api_repr(cls, resource, client):
"""Factory: construct a table given its API representation
:type resource: dict
Expand All @@ -489,12 +489,18 @@ def from_api_repr(cls, resource, dataset):
:rtype: :class:`google.cloud.bigquery.table.Table`
:returns: Table parsed from ``resource``.
"""
from google.cloud.bigquery import dataset

if ('tableReference' not in resource or
'tableId' not in resource['tableReference']):
raise KeyError('Resource lacks required identity information:'
'["tableReference"]["tableId"]')
project_id = resource['tableReference']['projectId']
table_id = resource['tableReference']['tableId']
table = cls(table_id, dataset=dataset)
dataset_id = resource['tableReference']['datasetId']
dataset_ref = dataset.DatasetReference(project_id, dataset_id)

table = cls(dataset_ref.table(table_id), client=client)
table._set_properties(resource)
return table

Expand All @@ -510,7 +516,7 @@ def _require_client(self, client):
:returns: The client passed in or the currently bound client.
"""
if client is None:
client = self._dataset._client
client = self._client
return client

def _set_properties(self, api_response):
Expand Down
14 changes: 9 additions & 5 deletions bigquery/tests/unit/test_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -2169,6 +2169,7 @@ def test_begin_w_bound_client(self):

def test_begin_w_alternate_client(self):
from google.cloud.bigquery.dataset import Dataset
from google.cloud.bigquery.dataset import DatasetReference
from google.cloud.bigquery.dataset import Table

PATH = '/projects/%s/jobs' % (self.PROJECT,)
Expand Down Expand Up @@ -2203,8 +2204,10 @@ def test_begin_w_alternate_client(self):
client2 = _Client(project=self.PROJECT, connection=conn2)
job = self._make_one(self.JOB_NAME, self.QUERY, client1)

dataset_ref = DatasetReference(self.PROJECT, DS_ID)
dataset = Dataset(DS_ID, client1)
table = Table(TABLE, dataset)
table_ref = dataset_ref.table(TABLE)
table = Table(table_ref, client=client1)

job.allow_large_results = True
job.create_disposition = 'CREATE_NEVER'
Expand Down Expand Up @@ -2460,8 +2463,8 @@ def test_exists_hit_w_alternate_client(self):
self.assertEqual(req['query_params'], {'fields': 'id'})

def test_reload_w_bound_client(self):
from google.cloud.bigquery.dataset import Dataset
from google.cloud.bigquery.dataset import Table
from google.cloud.bigquery.dataset import DatasetReference
from google.cloud.bigquery.table import Table

PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME)
DS_ID = 'DATASET'
Expand All @@ -2471,8 +2474,9 @@ def test_reload_w_bound_client(self):
client = _Client(project=self.PROJECT, connection=conn)
job = self._make_one(self.JOB_NAME, None, client)

dataset = Dataset(DS_ID, client)
table = Table(DEST_TABLE, dataset)
dataset_ref = DatasetReference(self.PROJECT, DS_ID)
table_ref = dataset_ref.table(DEST_TABLE)
table = Table(table_ref, client=client)
job.destination = table

job.reload()
Expand Down
Loading

0 comments on commit bb4bc6c

Please sign in to comment.