1+ from contextlib import ExitStack as does_not_raise
12from datetime import datetime
23import os
34import platform
5+ import random
6+ import string
47
58import numpy as np
69import pytest
1821PRIVATE_KEY_JSON_PATH = None
1922PRIVATE_KEY_JSON_CONTENTS = None
2023
21- DATASET_ID = "pydata_pandas_bq_testing_py3"
22-
23- TABLE_ID = "new_test"
24- DESTINATION_TABLE = "{0}.{1}" .format (DATASET_ID + "1" , TABLE_ID )
25-
2624VERSION = platform .python_version ()
2725
2826
@@ -149,34 +147,33 @@ def mock_read_gbq(sql, **kwargs):
149147
150148@pytest .mark .single
151149class TestToGBQIntegrationWithServiceAccountKeyPath :
152- @classmethod
153- def setup_class (cls ):
154- # - GLOBAL CLASS FIXTURES -
155- # put here any instruction you want to execute only *ONCE* *BEFORE*
156- # executing *ALL* tests described below.
157-
150+ @pytest .fixture ()
151+ def gbq_dataset (self ):
152+ # Setup Dataset
158153 _skip_if_no_project_id ()
159154 _skip_if_no_private_key_path ()
160155
161- cls .client = _get_client ()
162- cls .dataset = cls .client .dataset (DATASET_ID + "1" )
156+ dataset_id = "pydata_pandas_bq_testing_py31"
157+
158+ self .client = _get_client ()
159+ self .dataset = self .client .dataset (dataset_id )
163160 try :
164161 # Clean-up previous test runs.
165- cls .client .delete_dataset (cls .dataset , delete_contents = True )
162+ self .client .delete_dataset (self .dataset , delete_contents = True )
166163 except api_exceptions .NotFound :
167164 pass # It's OK if the dataset doesn't already exist.
168165
169- cls .client .create_dataset (bigquery .Dataset (cls .dataset ))
166+ self .client .create_dataset (bigquery .Dataset (self .dataset ))
167+
168+ table_name = "" .join (random .choices (string .ascii_lowercase , k = 10 ))
169+ destination_table = f"{ dataset_id } .{ table_name } "
170+ yield destination_table
170171
171- @classmethod
172- def teardown_class (cls ):
173- # - GLOBAL CLASS FIXTURES -
174- # put here any instruction you want to execute only *ONCE* *AFTER*
175- # executing all tests.
176- cls .client .delete_dataset (cls .dataset , delete_contents = True )
172+ # Teardown Dataset
173+ self .client .delete_dataset (self .dataset , delete_contents = True )
177174
178- def test_roundtrip (self ):
179- destination_table = DESTINATION_TABLE + "1"
175+ def test_roundtrip (self , gbq_dataset ):
176+ destination_table = gbq_dataset
180177
181178 test_size = 20001
182179 df = make_mixed_dataframe_v2 (test_size )
@@ -189,21 +186,26 @@ def test_roundtrip(self):
189186 )
190187
191188 result = pd .read_gbq (
192- "SELECT COUNT(*) AS num_rows FROM {0}" . format ( destination_table ) ,
189+ f "SELECT COUNT(*) AS num_rows FROM { destination_table } " ,
193190 project_id = _get_project_id (),
194191 credentials = _get_credentials (),
195192 dialect = "standard" ,
196193 )
197194 assert result ["num_rows" ][0 ] == test_size
198195
199- @pytest .mark .xfail (reason = "Test breaking master" , strict = False )
200196 @pytest .mark .parametrize (
201- "if_exists, expected_num_rows" ,
202- [("append" , 300 ), ("fail" , 200 ), ("replace" , 100 )],
197+ "if_exists, expected_num_rows, expectation" ,
198+ [
199+ ("append" , 300 , does_not_raise ()),
200+ ("fail" , 200 , pytest .raises (pandas_gbq .gbq .TableCreationError )),
201+ ("replace" , 100 , does_not_raise ()),
202+ ],
203203 )
204- def test_gbq_if_exists (self , if_exists , expected_num_rows ):
204+ def test_gbq_if_exists (
205+ self , if_exists , expected_num_rows , expectation , gbq_dataset
206+ ):
205207 # GH 29598
206- destination_table = DESTINATION_TABLE + "2"
208+ destination_table = gbq_dataset
207209
208210 test_size = 200
209211 df = make_mixed_dataframe_v2 (test_size )
@@ -215,13 +217,14 @@ def test_gbq_if_exists(self, if_exists, expected_num_rows):
215217 credentials = _get_credentials (),
216218 )
217219
218- df .iloc [:100 ].to_gbq (
219- destination_table ,
220- _get_project_id (),
221- if_exists = if_exists ,
222- chunksize = None ,
223- credentials = _get_credentials (),
224- )
220+ with expectation :
221+ df .iloc [:100 ].to_gbq (
222+ destination_table ,
223+ _get_project_id (),
224+ if_exists = if_exists ,
225+ chunksize = None ,
226+ credentials = _get_credentials (),
227+ )
225228
226229 result = pd .read_gbq (
227230 f"SELECT COUNT(*) AS num_rows FROM { destination_table } " ,
0 commit comments