1
+ from contextlib import ExitStack as does_not_raise
1
2
from datetime import datetime
2
3
import os
3
4
import platform
4
5
5
6
import numpy as np
7
+ from pandas_gbq .gbq import TableCreationError
6
8
import pytest
7
9
import pytz
8
10
21
23
DATASET_ID = "pydata_pandas_bq_testing_py3"
22
24
23
25
TABLE_ID = "new_test"
24
- DESTINATION_TABLE = "{0}.{1}" . format ( DATASET_ID + "1" , TABLE_ID )
26
+ DESTINATION_TABLE = f" { DATASET_ID + '1' } . { TABLE_ID } "
25
27
26
28
VERSION = platform .python_version ()
27
29
@@ -149,33 +151,28 @@ def mock_read_gbq(sql, **kwargs):
149
151
150
152
@pytest .mark .single
151
153
class TestToGBQIntegrationWithServiceAccountKeyPath :
152
- @classmethod
153
- def setup_class (cls ):
154
- # - GLOBAL CLASS FIXTURES -
155
- # put here any instruction you want to execute only *ONCE* *BEFORE*
156
- # executing *ALL* tests described below.
157
-
154
+ @pytest .fixture ()
155
+ def gbq_dataset (self ):
156
+ # Setup Dataset
158
157
_skip_if_no_project_id ()
159
158
_skip_if_no_private_key_path ()
160
159
161
- cls .client = _get_client ()
162
- cls .dataset = cls .client .dataset (DATASET_ID + "1" )
160
+ self .client = _get_client ()
161
+ self .dataset = self .client .dataset (DATASET_ID + "1" )
163
162
try :
164
163
# Clean-up previous test runs.
165
- cls .client .delete_dataset (cls .dataset , delete_contents = True )
164
+ self .client .delete_dataset (self .dataset , delete_contents = True )
166
165
except api_exceptions .NotFound :
167
166
pass # It's OK if the dataset doesn't already exist.
168
167
169
- cls .client .create_dataset (bigquery .Dataset (cls .dataset ))
168
+ self .client .create_dataset (bigquery .Dataset (self .dataset ))
169
+
170
+ yield
170
171
171
- @classmethod
172
- def teardown_class (cls ):
173
- # - GLOBAL CLASS FIXTURES -
174
- # put here any instruction you want to execute only *ONCE* *AFTER*
175
- # executing all tests.
176
- cls .client .delete_dataset (cls .dataset , delete_contents = True )
172
+ # Teardown Dataset
173
+ self .client .delete_dataset (self .dataset , delete_contents = True )
177
174
178
- def test_roundtrip (self ):
175
+ def test_roundtrip (self , gbq_dataset ):
179
176
destination_table = DESTINATION_TABLE + "1"
180
177
181
178
test_size = 20001
@@ -189,31 +186,38 @@ def test_roundtrip(self):
189
186
)
190
187
191
188
result = pd .read_gbq (
192
- "SELECT COUNT(*) AS num_rows FROM {0}" . format ( destination_table ) ,
189
+ f "SELECT COUNT(*) AS num_rows FROM { destination_table } " ,
193
190
project_id = _get_project_id (),
194
191
credentials = _get_credentials (),
195
192
dialect = "standard" ,
196
193
)
197
194
assert result ["num_rows" ][0 ] == test_size
198
195
199
- @pytest .mark .xfail (reason = "Test breaking master" )
196
+ @pytest .mark .xfail (reason = "Test breaking master" , strict = False )
200
197
@pytest .mark .parametrize (
201
- "if_exists, expected_num_rows" ,
202
- [("append" , 300 ), ("fail" , 200 ), ("replace" , 100 )],
198
+ "if_exists, expected_num_rows, expectation" ,
199
+ [
200
+ ("append" , 300 , does_not_raise ()),
201
+ ("fail" , 200 , pytest .raises (TableCreationError )),
202
+ ("replace" , 100 , does_not_raise ()),
203
+ ],
203
204
)
204
- def test_gbq_if_exists (self , if_exists , expected_num_rows ):
205
+ def test_gbq_if_exists (
206
+ self , if_exists , expected_num_rows , expectation , gbq_dataset
207
+ ):
205
208
# GH 29598
206
209
destination_table = DESTINATION_TABLE + "2"
207
210
208
211
test_size = 200
209
212
df = make_mixed_dataframe_v2 (test_size )
210
213
211
- df .to_gbq (
212
- destination_table ,
213
- _get_project_id (),
214
- chunksize = None ,
215
- credentials = _get_credentials (),
216
- )
214
+ with expectation :
215
+ df .to_gbq (
216
+ destination_table ,
217
+ _get_project_id (),
218
+ chunksize = None ,
219
+ credentials = _get_credentials (),
220
+ )
217
221
218
222
df .iloc [:100 ].to_gbq (
219
223
destination_table ,
0 commit comments