diff --git a/google/cloud/bigquery/job/query.py b/google/cloud/bigquery/job/query.py index 8049b748e..4ea5687e0 100644 --- a/google/cloud/bigquery/job/query.py +++ b/google/cloud/bigquery/job/query.py @@ -1532,8 +1532,9 @@ def result( # type: ignore # (incompatible with supertype) # Setting max_results should be equivalent to setting page_size with # regards to allowing the user to tune how many results to download # while we wait for the query to finish. See internal issue: - # 344008814. - if page_size is None and max_results is not None: + # 344008814. But if start_index is set, user is trying to access a + # specific page, so we don't need to set page_size. See issue #1950. + if page_size is None and max_results is not None and start_index is None: page_size = max_results # When timeout has default sentinel value ``object()``, do not pass diff --git a/tests/unit/job/test_query.py b/tests/unit/job/test_query.py index 5b69c98cf..4bbd31c73 100644 --- a/tests/unit/job/test_query.py +++ b/tests/unit/job/test_query.py @@ -1652,7 +1652,17 @@ def test_result_with_start_index(self): start_index = 1 - result = job.result(start_index=start_index) + # Verifies that page_size isn't overwritten by max_results when + # start_index is not None. See + # https://github.com/googleapis/python-bigquery/issues/1950 + page_size = 10 + max_results = 100 + + result = job.result( + page_size=page_size, + max_results=max_results, + start_index=start_index, + ) self.assertIsInstance(result, RowIterator) self.assertEqual(result.total_rows, 5) @@ -1665,6 +1675,9 @@ def test_result_with_start_index(self): self.assertEqual( tabledata_list_request[1]["query_params"]["startIndex"], start_index ) + self.assertEqual( + tabledata_list_request[1]["query_params"]["maxResults"], page_size + ) def test_result_error(self): from google.cloud import exceptions