Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 24 additions & 16 deletions bigquery/google/cloud/bigquery/dbapi/cursor.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,7 @@ def __init__(self, connection):
# a single row at a time.
self.arraysize = 1
self._query_data = None
self._page_token = None
self._has_fetched_all_rows = True
self._query_results = None

def close(self):
"""No-op."""
Expand Down Expand Up @@ -133,9 +132,8 @@ def execute(self, operation, parameters=None, job_id=None):
:param job_id: (Optional) The job_id to use. If not set, a job ID
is generated at random.
"""
self._query_data = None
self._query_results = None
self._page_token = None
self._has_fetched_all_rows = False
client = self.connection._client
if job_id is None:
job_id = str(uuid.uuid4())
Expand All @@ -161,8 +159,7 @@ def execute(self, operation, parameters=None, job_id=None):
raise exceptions.DatabaseError(query_job.errors)

query_results = query_job.query_results()
self._query_data = iter(
query_results.fetch_data(max_results=self.arraysize))
self._query_results = query_results
self._set_rowcount(query_results)
self._set_description(query_results.schema)

Expand All @@ -178,6 +175,22 @@ def executemany(self, operation, seq_of_parameters):
for parameters in seq_of_parameters:
self.execute(operation, parameters)

def _try_fetch(self, size=None):
"""Try to start fetching data, if not yet started.

Mutates self to indicate that iteration has started.
"""
if self._query_results is None:
raise exceptions.InterfaceError(
'No query results: execute() must be called before fetch.')

if size is None:
size = self.arraysize

if self._query_data is None:
self._query_data = iter(
self._query_results.fetch_data(max_results=size))

def fetchone(self):
"""Fetch a single row from the results of the last ``execute*()`` call.

Expand All @@ -188,10 +201,7 @@ def fetchone(self):
:raises: :class:`~google.cloud.bigquery.dbapi.InterfaceError`
if called before ``execute()``.
"""
if self._query_data is None:
raise exceptions.InterfaceError(
'No query results: execute() must be called before fetch.')

self._try_fetch()
try:
return six.next(self._query_data)
except StopIteration:
Expand All @@ -215,17 +225,17 @@ def fetchmany(self, size=None):
:raises: :class:`~google.cloud.bigquery.dbapi.InterfaceError`
if called before ``execute()``.
"""
if self._query_data is None:
raise exceptions.InterfaceError(
'No query results: execute() must be called before fetch.')
if size is None:
size = self.arraysize

self._try_fetch(size=size)
rows = []

for row in self._query_data:
rows.append(row)
if len(rows) >= size:
break

return rows

def fetchall(self):
Expand All @@ -236,9 +246,7 @@ def fetchall(self):
:raises: :class:`~google.cloud.bigquery.dbapi.InterfaceError`
if called before ``execute()``.
"""
if self._query_data is None:
raise exceptions.InterfaceError(
'No query results: execute() must be called before fetch.')
self._try_fetch()
return [row for row in self._query_data]

def setinputsizes(self, sizes):
Expand Down
7 changes: 4 additions & 3 deletions bigquery/google/cloud/bigquery/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -440,6 +440,9 @@ def fetch_data(self, max_results=None, page_token=None, start_index=None,
if timeout_ms is not None:
params['timeoutMs'] = timeout_ms

if max_results is not None:
params['maxResults'] = max_results

path = '/projects/%s/queries/%s' % (self.project, self.name)
iterator = page_iterator.HTTPIterator(
client=client,
Expand All @@ -448,12 +451,10 @@ def fetch_data(self, max_results=None, page_token=None, start_index=None,
item_to_value=_item_to_row,
items_key='rows',
page_token=page_token,
max_results=max_results,
page_start=_rows_page_start_query,
next_token='pageToken',
extra_params=params)
iterator.query_result = self
# Over-ride the key used to retrieve the next page token.
iterator._NEXT_TOKEN = 'pageToken'
return iterator


Expand Down
12 changes: 8 additions & 4 deletions bigquery/google/cloud/bigquery/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -722,6 +722,11 @@ def fetch_data(self, max_results=None, page_token=None, client=None):
if len(self._schema) == 0:
raise ValueError(_TABLE_HAS_NO_SCHEMA)

params = {}

if max_results is not None:
params['maxResults'] = max_results

client = self._require_client(client)
path = '%s/data' % (self.path,)
iterator = page_iterator.HTTPIterator(
Expand All @@ -731,11 +736,10 @@ def fetch_data(self, max_results=None, page_token=None, client=None):
item_to_value=_item_to_row,
items_key='rows',
page_token=page_token,
max_results=max_results,
page_start=_rows_page_start)
page_start=_rows_page_start,
next_token='pageToken',
extra_params=params)
iterator.schema = self._schema
# Over-ride the key used to retrieve the next page token.
iterator._NEXT_TOKEN = 'pageToken'
return iterator

def row_from_mapping(self, mapping):
Expand Down
12 changes: 11 additions & 1 deletion bigquery/tests/system.py
Original file line number Diff line number Diff line change
Expand Up @@ -747,6 +747,16 @@ def test_dbapi_w_standard_sql_types(self):
row = Config.CURSOR.fetchone()
self.assertIsNone(row)

def test_dbapi_fetchall(self):
query = 'SELECT * FROM UNNEST([(1, 2), (3, 4), (5, 6)])'

for arraysize in range(1, 5):
Config.CURSOR.execute(query)
self.assertEqual(Config.CURSOR.rowcount, 3, "expected 3 rows")
Config.CURSOR.arraysize = arraysize
rows = Config.CURSOR.fetchall()
self.assertEqual(rows, [(1, 2), (3, 4), (5, 6)])

def _load_table_for_dml(self, rows, dataset_name, table_name):
from google.cloud._testing import _NamedTemporaryFile

Expand Down Expand Up @@ -1084,7 +1094,7 @@ def test_large_query_w_public_data(self):
query.use_legacy_sql = False
query.run()

iterator = query.fetch_data()
iterator = query.fetch_data(max_results=100)
rows = list(iterator)
self.assertEqual(len(rows), LIMIT)

Expand Down
2 changes: 1 addition & 1 deletion bigquery/tests/unit/test_dbapi_cursor.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,8 +141,8 @@ def test_fetchmany_w_arraysize(self):
(7, 8, 9),
]))
cursor = connection.cursor()
cursor.arraysize = 2
cursor.execute('SELECT a, b, c;')
cursor.arraysize = 2
rows = cursor.fetchmany()
self.assertEqual(len(rows), 2)
self.assertEqual(rows[0], (1, 2, 3))
Expand Down
9 changes: 6 additions & 3 deletions core/google/api/core/page_iterator.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,6 +275,8 @@ class HTTPIterator(Iterator):
signature takes the :class:`Iterator` that started the page,
the :class:`Page` that was started and the dictionary containing
the page response.
next_token (str): The name of the field used in the response for page
tokens.

.. autoattribute:: pages
"""
Expand All @@ -283,13 +285,13 @@ class HTTPIterator(Iterator):
_PAGE_TOKEN = 'pageToken'
_MAX_RESULTS = 'maxResults'
_NEXT_TOKEN = 'nextPageToken'
_RESERVED_PARAMS = frozenset([_PAGE_TOKEN, _MAX_RESULTS])
_RESERVED_PARAMS = frozenset([_PAGE_TOKEN])
_HTTP_METHOD = 'GET'

def __init__(self, client, api_request, path, item_to_value,
items_key=_DEFAULT_ITEMS_KEY,
page_token=None, max_results=None, extra_params=None,
page_start=_do_nothing_page_start):
page_start=_do_nothing_page_start, next_token=_NEXT_TOKEN):
super(HTTPIterator, self).__init__(
client, item_to_value, page_token=page_token,
max_results=max_results)
Expand All @@ -298,6 +300,7 @@ def __init__(self, client, api_request, path, item_to_value,
self._items_key = items_key
self.extra_params = extra_params
self._page_start = page_start
self._next_token = next_token
# Verify inputs / provide defaults.
if self.extra_params is None:
self.extra_params = {}
Expand Down Expand Up @@ -327,7 +330,7 @@ def _next_page(self):
items = response.get(self._items_key, ())
page = Page(self, items, self._item_to_value)
self._page_start(self, page, response)
self.next_page_token = response.get(self._NEXT_TOKEN)
self.next_page_token = response.get(self._next_token)
return page
else:
return None
Expand Down