@@ -376,19 +376,6 @@ def test_columns_dtypes_invalid(self, engine):
376
376
]
377
377
self .check_error_on_write (df , engine , ValueError )
378
378
379
- def test_to_bytes_without_path_or_buf_provided (self , engine , df_full ):
380
- # GH 37105
381
-
382
- buf = df_full .to_parquet (engine = engine )
383
- assert isinstance (buf , bytes )
384
-
385
- with tm .ensure_clean () as path :
386
- with open (path , "wb" ) as f :
387
- f .write (buf )
388
- res = pd .read_parquet (path )
389
-
390
- tm .assert_frame_equal (df_full , res )
391
-
392
379
@pytest .mark .parametrize ("compression" , [None , "gzip" , "snappy" , "brotli" ])
393
380
def test_compression (self , engine , compression ):
394
381
@@ -525,6 +512,19 @@ def test_basic_subset_columns(self, pa, df_full):
525
512
read_kwargs = {"columns" : ["string" , "int" ]},
526
513
)
527
514
515
+ def test_to_bytes_without_path_or_buf_provided (self , pa , df_full ):
516
+ # GH 37105
517
+
518
+ buf = df_full .to_parquet (engine = pa )
519
+ assert isinstance (buf , bytes )
520
+
521
+ with tm .ensure_clean () as path :
522
+ with open (path , "wb" ) as f :
523
+ f .write (buf )
524
+ res = pd .read_parquet (path )
525
+
526
+ tm .assert_frame_equal (df_full , res )
527
+
528
528
def test_duplicate_columns (self , pa ):
529
529
# not currently able to handle duplicate columns
530
530
df = pd .DataFrame (np .arange (12 ).reshape (4 , 3 ), columns = list ("aaa" )).copy ()
0 commit comments