|
16 | 16 | # specific language governing permissions and limitations
|
17 | 17 | # under the License.
|
18 | 18 |
|
19 |
| -import sys |
20 | 19 | import uuid
|
21 | 20 | from datetime import datetime
|
22 | 21 | from decimal import Decimal
|
|
31 | 30 |
|
32 | 31 | import re
|
33 | 32 |
|
| 33 | +from elasticsearch.serializer import JSONSerializer, OrjsonSerializer, TextSerializer |
| 34 | + |
34 | 35 | from elasticsearch_serverless import Elasticsearch
|
35 | 36 | from elasticsearch_serverless.exceptions import SerializationError
|
36 |
| -from elasticsearch_serverless.serializer import JSONSerializer, TextSerializer |
37 | 37 |
|
38 | 38 | requires_numpy_and_pandas = pytest.mark.skipif(
|
39 |
| - np is None or pd is None, reason="Test requires numpy or pandas to be available" |
| 39 | + np is None or pd is None, reason="Test requires numpy and pandas to be available" |
40 | 40 | )
|
41 | 41 |
|
42 | 42 |
|
43 |
| -def test_datetime_serialization(): |
44 |
| - assert b'{"d":"2010-10-01T02:30:00"}' == JSONSerializer().dumps( |
| 43 | +@pytest.fixture(params=[JSONSerializer, OrjsonSerializer]) |
| 44 | +def json_serializer(request: pytest.FixtureRequest): |
| 45 | + yield request.param() |
| 46 | + |
| 47 | + |
| 48 | +def test_datetime_serialization(json_serializer): |
| 49 | + assert b'{"d":"2010-10-01T02:30:00"}' == json_serializer.dumps( |
45 | 50 | {"d": datetime(2010, 10, 1, 2, 30)}
|
46 | 51 | )
|
47 | 52 |
|
48 | 53 |
|
49 |
| -def test_decimal_serialization(): |
50 |
| - requires_numpy_and_pandas() |
| 54 | +def test_decimal_serialization(json_serializer): |
| 55 | + assert b'{"d":3.8}' == json_serializer.dumps({"d": Decimal("3.8")}) |
51 | 56 |
|
52 |
| - if sys.version_info[:2] == (2, 6): |
53 |
| - pytest.skip("Float rounding is broken in 2.6.") |
54 |
| - assert b'{"d":3.8}' == JSONSerializer().dumps({"d": Decimal("3.8")}) |
55 | 57 |
|
56 |
| - |
57 |
| -def test_uuid_serialization(): |
58 |
| - assert b'{"d":"00000000-0000-0000-0000-000000000003"}' == JSONSerializer().dumps( |
| 58 | +def test_uuid_serialization(json_serializer): |
| 59 | + assert b'{"d":"00000000-0000-0000-0000-000000000003"}' == json_serializer.dumps( |
59 | 60 | {"d": uuid.UUID("00000000-0000-0000-0000-000000000003")}
|
60 | 61 | )
|
61 | 62 |
|
62 | 63 |
|
63 | 64 | @requires_numpy_and_pandas
|
64 |
| -def test_serializes_numpy_bool(): |
65 |
| - assert b'{"d":true}' == JSONSerializer().dumps({"d": np.bool_(True)}) |
| 65 | +def test_serializes_numpy_bool(json_serializer): |
| 66 | + assert b'{"d":true}' == json_serializer.dumps({"d": np.bool_(True)}) |
66 | 67 |
|
67 | 68 |
|
68 | 69 | @requires_numpy_and_pandas
|
69 |
| -def test_serializes_numpy_integers(): |
70 |
| - ser = JSONSerializer() |
| 70 | +def test_serializes_numpy_integers(json_serializer): |
71 | 71 | for np_type in (
|
72 | 72 | np.int_,
|
73 | 73 | np.int8,
|
74 | 74 | np.int16,
|
75 | 75 | np.int32,
|
76 | 76 | np.int64,
|
77 | 77 | ):
|
78 |
| - assert ser.dumps({"d": np_type(-1)}) == b'{"d":-1}' |
| 78 | + assert json_serializer.dumps({"d": np_type(-1)}) == b'{"d":-1}' |
79 | 79 |
|
80 | 80 | for np_type in (
|
81 | 81 | np.uint8,
|
82 | 82 | np.uint16,
|
83 | 83 | np.uint32,
|
84 | 84 | np.uint64,
|
85 | 85 | ):
|
86 |
| - assert ser.dumps({"d": np_type(1)}) == b'{"d":1}' |
| 86 | + assert json_serializer.dumps({"d": np_type(1)}) == b'{"d":1}' |
87 | 87 |
|
88 | 88 |
|
89 | 89 | @requires_numpy_and_pandas
|
90 |
| -def test_serializes_numpy_floats(): |
91 |
| - ser = JSONSerializer() |
| 90 | +def test_serializes_numpy_floats(json_serializer): |
92 | 91 | for np_type in (
|
93 | 92 | np.float32,
|
94 | 93 | np.float64,
|
95 | 94 | ):
|
96 |
| - assert re.search(rb'^{"d":1\.2[\d]*}$', ser.dumps({"d": np_type(1.2)})) |
| 95 | + assert re.search( |
| 96 | + rb'^{"d":1\.2[\d]*}$', json_serializer.dumps({"d": np_type(1.2)}) |
| 97 | + ) |
97 | 98 |
|
98 | 99 |
|
99 | 100 | @requires_numpy_and_pandas
|
100 |
| -def test_serializes_numpy_datetime(): |
101 |
| - assert b'{"d":"2010-10-01T02:30:00"}' == JSONSerializer().dumps( |
| 101 | +def test_serializes_numpy_datetime(json_serializer): |
| 102 | + assert b'{"d":"2010-10-01T02:30:00"}' == json_serializer.dumps( |
102 | 103 | {"d": np.datetime64("2010-10-01T02:30:00")}
|
103 | 104 | )
|
104 | 105 |
|
105 | 106 |
|
106 | 107 | @requires_numpy_and_pandas
|
107 |
| -def test_serializes_numpy_ndarray(): |
108 |
| - assert b'{"d":[0,0,0,0,0]}' == JSONSerializer().dumps( |
| 108 | +def test_serializes_numpy_ndarray(json_serializer): |
| 109 | + assert b'{"d":[0,0,0,0,0]}' == json_serializer.dumps( |
109 | 110 | {"d": np.zeros((5,), dtype=np.uint8)}
|
110 | 111 | )
|
111 | 112 | # This isn't useful for Elasticsearch, just want to make sure it works.
|
112 |
| - assert b'{"d":[[0,0],[0,0]]}' == JSONSerializer().dumps( |
| 113 | + assert b'{"d":[[0,0],[0,0]]}' == json_serializer.dumps( |
113 | 114 | {"d": np.zeros((2, 2), dtype=np.uint8)}
|
114 | 115 | )
|
115 | 116 |
|
116 | 117 |
|
117 | 118 | @requires_numpy_and_pandas
|
118 | 119 | def test_serializes_numpy_nan_to_nan():
|
119 |
| - assert b'{"d":NaN}' == JSONSerializer().dumps({"d": np.nan}) |
| 120 | + assert b'{"d":NaN}' == JSONSerializer().dumps({"d": float("NaN")}) |
| 121 | + # NaN is invalid JSON, and orjson silently converts it to null |
| 122 | + assert b'{"d":null}' == OrjsonSerializer().dumps({"d": float("NaN")}) |
120 | 123 |
|
121 | 124 |
|
122 | 125 | @requires_numpy_and_pandas
|
123 |
| -def test_serializes_pandas_timestamp(): |
124 |
| - assert b'{"d":"2010-10-01T02:30:00"}' == JSONSerializer().dumps( |
125 |
| - {"d": pd.Timestamp("2010-10-01T02:30:00")} |
126 |
| - ) |
| 126 | +def test_serializes_pandas_timestamp(json_serializer): |
| 127 | + assert b'{"d":"2010-10-01T02:30:00"}' == json_serializer.dumps() |
127 | 128 |
|
128 | 129 |
|
129 | 130 | @requires_numpy_and_pandas
|
130 |
| -def test_serializes_pandas_series(): |
131 |
| - assert b'{"d":["a","b","c","d"]}' == JSONSerializer().dumps( |
| 131 | +def test_serializes_pandas_series(json_serializer): |
| 132 | + assert b'{"d":["a","b","c","d"]}' == json_serializer.dumps( |
132 | 133 | {"d": pd.Series(["a", "b", "c", "d"])}
|
133 | 134 | )
|
134 | 135 |
|
135 | 136 |
|
136 | 137 | @requires_numpy_and_pandas
|
137 | 138 | @pytest.mark.skipif(not hasattr(pd, "NA"), reason="pandas.NA is required")
|
138 |
| -def test_serializes_pandas_na(): |
139 |
| - assert b'{"d":null}' == JSONSerializer().dumps({"d": pd.NA}) |
| 139 | +def test_serializes_pandas_na(json_serializer): |
| 140 | + assert b'{"d":null}' == json_serializer.dumps({"d": pd.NA}) |
140 | 141 |
|
141 | 142 |
|
142 | 143 | @requires_numpy_and_pandas
|
143 | 144 | @pytest.mark.skipif(not hasattr(pd, "NaT"), reason="pandas.NaT required")
|
144 |
| -def test_raises_serialization_error_pandas_nat(): |
| 145 | +def test_raises_serialization_error_pandas_nat(json_serializer): |
145 | 146 | with pytest.raises(SerializationError):
|
146 |
| - JSONSerializer().dumps({"d": pd.NaT}) |
| 147 | + json_serializer.dumps({"d": pd.NaT}) |
147 | 148 |
|
148 | 149 |
|
149 | 150 | @requires_numpy_and_pandas
|
150 |
| -def test_serializes_pandas_category(): |
| 151 | +def test_serializes_pandas_category(json_serializer): |
151 | 152 | cat = pd.Categorical(["a", "c", "b", "a"], categories=["a", "b", "c"])
|
152 |
| - assert b'{"d":["a","c","b","a"]}' == JSONSerializer().dumps({"d": cat}) |
| 153 | + assert b'{"d":["a","c","b","a"]}' == json_serializer.dumps({"d": cat}) |
153 | 154 |
|
154 | 155 | cat = pd.Categorical([1, 2, 3], categories=[1, 2, 3])
|
155 |
| - assert b'{"d":[1,2,3]}' == JSONSerializer().dumps({"d": cat}) |
| 156 | + assert b'{"d":[1,2,3]}' == json_serializer.dumps({"d": cat}) |
156 | 157 |
|
157 | 158 |
|
158 |
| -def test_json_raises_serialization_error_on_dump_error(): |
| 159 | +def test_json_raises_serialization_error_on_dump_error(json_serializer): |
159 | 160 | with pytest.raises(SerializationError):
|
160 |
| - JSONSerializer().dumps(object()) |
| 161 | + json_serializer.dumps(object()) |
161 | 162 |
|
162 | 163 |
|
163 | 164 | def test_raises_serialization_error_on_load_error():
|
|
0 commit comments