Skip to content

Commit 289ad35

Browse files
author
tmnhat2001
committed
Fix PEP8 issue
1 parent e3cdb6e commit 289ad35

File tree

1 file changed

+58
-58
lines changed

1 file changed

+58
-58
lines changed

pandas/tests/test_categorical.py

Lines changed: 58 additions & 58 deletions
Original file line numberDiff line numberDiff line change
@@ -798,69 +798,69 @@ def test_set_categories_inplace(self):
798798
tm.assert_index_equal(cat.categories, pd.Index(['a', 'b', 'c', 'd']))
799799

800800
@pytest.mark.parametrize(
801-
"input1, input2, cat_array",
802-
[
803-
(
804-
np.array([1, 2, 3, 3], dtype=np.dtype('int_')),
805-
np.array([1, 2, 3, 5, 3, 2, 4], dtype=np.dtype('int_')),
806-
np.array([1, 2, 3, 4, 5], dtype=np.dtype('int_'))
807-
),
808-
(
809-
np.array([1, 2, 3, 3], dtype=np.dtype('uint')),
810-
np.array([1, 2, 3, 5, 3, 2, 4], dtype=np.dtype('uint')),
811-
np.array([1, 2, 3, 4, 5], dtype=np.dtype('uint'))
801+
"input1, input2, cat_array",
802+
[
803+
(
804+
np.array([1, 2, 3, 3], dtype=np.dtype('int_')),
805+
np.array([1, 2, 3, 5, 3, 2, 4], dtype=np.dtype('int_')),
806+
np.array([1, 2, 3, 4, 5], dtype=np.dtype('int_'))
807+
),
808+
(
809+
np.array([1, 2, 3, 3], dtype=np.dtype('uint')),
810+
np.array([1, 2, 3, 5, 3, 2, 4], dtype=np.dtype('uint')),
811+
np.array([1, 2, 3, 4, 5], dtype=np.dtype('uint'))
812+
),
813+
(
814+
np.array([1, 2, 3, 3], dtype=np.dtype('float_')),
815+
np.array([1, 2, 3, 5, 3, 2, 4], dtype=np.dtype('float_')),
816+
np.array([1, 2, 3, 4, 5], dtype=np.dtype('float_'))
817+
),
818+
(
819+
np.array(
820+
[1, 2, 3, 3], dtype=np.dtype('unicode_')
812821
),
813-
(
814-
np.array([1, 2, 3, 3], dtype=np.dtype('float_')),
815-
np.array([1, 2, 3, 5, 3, 2, 4], dtype=np.dtype('float_')),
816-
np.array([1, 2, 3, 4, 5], dtype=np.dtype('float_'))
822+
np.array(
823+
[1, 2, 3, 5, 3, 2, 4], dtype=np.dtype('unicode_')
817824
),
818-
(
819-
np.array(
820-
[1, 2, 3, 3], dtype=np.dtype('unicode_')
821-
),
822-
np.array(
823-
[1, 2, 3, 5, 3, 2, 4], dtype=np.dtype('unicode_')
824-
),
825-
np.array(
826-
[1, 2, 3, 4, 5], dtype=np.dtype('unicode_')
827-
)
825+
np.array(
826+
[1, 2, 3, 4, 5], dtype=np.dtype('unicode_')
827+
)
828+
),
829+
(
830+
np.array(
831+
[
832+
'2017-01-01 10:00:00', '2017-02-01 10:00:00',
833+
'2017-03-01 10:00:00', '2017-03-01 10:00:00'
834+
],
835+
dtype='datetime64'
828836
),
829-
(
830-
np.array(
831-
[
832-
'2017-01-01 10:00:00', '2017-02-01 10:00:00',
833-
'2017-03-01 10:00:00', '2017-03-01 10:00:00'
834-
],
835-
dtype='datetime64'
836-
),
837-
np.array(
838-
[
839-
'2017-01-01 10:00:00', '2017-02-01 10:00:00',
840-
'2017-03-01 10:00:00', '2017-05-01 10:00:00',
841-
'2017-03-01 10:00:00', '2017-02-01 10:00:00',
842-
'2017-04-01 10:00:00'
843-
],
844-
dtype='datetime64'
845-
),
846-
np.array(
847-
[
848-
'2017-01-01 10:00:00', '2017-02-01 10:00:00',
849-
'2017-03-01 10:00:00', '2017-04-01 10:00:00',
850-
'2017-05-01 10:00:00'
851-
],
852-
dtype='datetime64'
853-
)
837+
np.array(
838+
[
839+
'2017-01-01 10:00:00', '2017-02-01 10:00:00',
840+
'2017-03-01 10:00:00', '2017-05-01 10:00:00',
841+
'2017-03-01 10:00:00', '2017-02-01 10:00:00',
842+
'2017-04-01 10:00:00'
843+
],
844+
dtype='datetime64'
854845
),
855-
(
856-
pd.to_timedelta(['1 days', '2 days', '3 days', '3 days'],
857-
unit="D"),
858-
pd.to_timedelta(['1 days', '2 days', '3 days', '5 days',
859-
'3 days', '2 days', '4 days'], unit="D"),
860-
pd.timedelta_range("1 days", periods=5, freq="D")
846+
np.array(
847+
[
848+
'2017-01-01 10:00:00', '2017-02-01 10:00:00',
849+
'2017-03-01 10:00:00', '2017-04-01 10:00:00',
850+
'2017-05-01 10:00:00'
851+
],
852+
dtype='datetime64'
861853
)
862-
]
863-
)
854+
),
855+
(
856+
pd.to_timedelta(['1 days', '2 days', '3 days', '3 days'],
857+
unit="D"),
858+
pd.to_timedelta(['1 days', '2 days', '3 days', '5 days',
859+
'3 days', '2 days', '4 days'], unit="D"),
860+
pd.timedelta_range("1 days", periods=5, freq="D")
861+
)
862+
]
863+
)
864864
@pytest.mark.parametrize("is_ordered", [True, False])
865865
def test_drop_duplicates_non_bool(self, input1, input2,
866866
cat_array, is_ordered):

0 commit comments

Comments
 (0)