|
28 | 28 | from plotly.matplotlylib import Exporter, PlotlyRenderer
|
29 | 29 |
|
30 | 30 |
|
| 31 | +## JSON encoding |
| 32 | +numeric_list = [1, 2, 3] |
| 33 | +np_list = np.array([1, 2, 3, np.NaN, np.NAN, np.Inf, dt(2014, 1, 5)]) |
| 34 | +mixed_list = [1, 'A', dt(2014, 1, 5), dt(2014, 1, 5, 1, 1, 1), |
| 35 | + dt(2014, 1, 5, 1, 1, 1, 1)] |
| 36 | +dt_list = [dt(2014, 1, 5), dt(2014, 1, 5, 1, 1, 1), |
| 37 | + dt(2014, 1, 5, 1, 1, 1, 1)] |
| 38 | + |
| 39 | +df = pd.DataFrame(columns=['col 1'], |
| 40 | + data=[1, 2, 3, dt(2014, 1, 5), pd.NaT, np.NaN, np.Inf]) |
| 41 | + |
| 42 | +rng = pd.date_range('1/1/2011', periods=2, freq='H') |
| 43 | +ts = pd.Series([1.5, 2.5], index=rng) |
| 44 | + |
| 45 | + |
31 | 46 | class TestJSONEncoder(TestCase):
|
32 | 47 |
|
33 | 48 | def test_encode_as_plotly(self):
|
@@ -161,104 +176,100 @@ def test_encode_as_decimal(self):
|
161 | 176 | self.assertAlmostEqual(res, 1.023452) # Checks upto 7 decimal places
|
162 | 177 | self.assertIsInstance(res, float)
|
163 | 178 |
|
164 |
| -## JSON encoding |
165 |
| -numeric_list = [1, 2, 3] |
166 |
| -np_list = np.array([1, 2, 3, np.NaN, np.NAN, np.Inf, dt(2014, 1, 5)]) |
167 |
| -mixed_list = [1, 'A', dt(2014, 1, 5), dt(2014, 1, 5, 1, 1, 1), |
168 |
| - dt(2014, 1, 5, 1, 1, 1, 1)] |
169 |
| -dt_list = [dt(2014, 1, 5), dt(2014, 1, 5, 1, 1, 1), |
170 |
| - dt(2014, 1, 5, 1, 1, 1, 1)] |
171 | 179 |
|
172 |
| -df = pd.DataFrame(columns=['col 1'], |
173 |
| - data=[1, 2, 3, dt(2014, 1, 5), pd.NaT, np.NaN, np.Inf]) |
174 |
| - |
175 |
| -rng = pd.date_range('1/1/2011', periods=2, freq='H') |
176 |
| -ts = pd.Series([1.5, 2.5], index=rng) |
177 |
| - |
178 |
| - |
179 |
| -def test_figure_json_encoding(): |
180 |
| - df = pd.DataFrame(columns=['col 1'], data=[1, 2, 3]) |
181 |
| - s1 = Scatter3d(x=numeric_list, y=np_list, z=mixed_list) |
182 |
| - s2 = Scatter(x=df['col 1']) |
183 |
| - data = Data([s1, s2]) |
184 |
| - figure = Figure(data=data) |
185 |
| - |
186 |
| - js1 = _json.dumps(s1, cls=utils.PlotlyJSONEncoder, sort_keys=True) |
187 |
| - js2 = _json.dumps(s2, cls=utils.PlotlyJSONEncoder, sort_keys=True) |
188 |
| - |
189 |
| - assert(js1 == '{"type": "scatter3d", "x": [1, 2, 3], ' |
190 |
| - '"y": [1, 2, 3, null, null, null, "2014-01-05"], ' |
191 |
| - '"z": [1, "A", "2014-01-05", ' |
192 |
| - '"2014-01-05 01:01:01", "2014-01-05 01:01:01.000001"]}') |
193 |
| - assert(js2 == '{"type": "scatter", "x": [1, 2, 3]}') |
194 |
| - |
195 |
| - # Test JSON encoding works |
196 |
| - _json.dumps(data, cls=utils.PlotlyJSONEncoder, sort_keys=True) |
197 |
| - _json.dumps(figure, cls=utils.PlotlyJSONEncoder, sort_keys=True) |
198 |
| - |
199 |
| - # Test data wasn't mutated |
200 |
| - np_array = np.array( |
201 |
| - [1, 2, 3, np.NaN, np.NAN, np.Inf, dt(2014, 1, 5)] |
202 |
| - ) |
203 |
| - for k in range(len(np_array)): |
204 |
| - if k in [3, 4]: |
205 |
| - # check NaN |
206 |
| - assert np.isnan(np_list[k]) and np.isnan(np_array[k]) |
207 |
| - else: |
208 |
| - # non-NaN |
209 |
| - assert np_list[k] == np_array[k] |
210 |
| - |
211 |
| - assert(set(data[0]['z']) == |
212 |
| - set([1, 'A', dt(2014, 1, 5), dt(2014, 1, 5, 1, 1, 1), |
213 |
| - dt(2014, 1, 5, 1, 1, 1, 1)])) |
| 180 | + def test_figure_json_encoding(self): |
| 181 | + df = pd.DataFrame(columns=['col 1'], data=[1, 2, 3]) |
| 182 | + s1 = Scatter3d(x=numeric_list, y=np_list, z=mixed_list) |
| 183 | + s2 = Scatter(x=df['col 1']) |
| 184 | + data = Data([s1, s2]) |
| 185 | + figure = Figure(data=data) |
214 | 186 |
|
| 187 | + js1 = _json.dumps(s1, cls=utils.PlotlyJSONEncoder, sort_keys=True) |
| 188 | + js2 = _json.dumps(s2, cls=utils.PlotlyJSONEncoder, sort_keys=True) |
215 | 189 |
|
216 |
| -def test_datetime_json_encoding(): |
217 |
| - j1 = _json.dumps(dt_list, cls=utils.PlotlyJSONEncoder) |
218 |
| - assert(j1 == '["2014-01-05", ' |
219 |
| - '"2014-01-05 01:01:01", ' |
220 |
| - '"2014-01-05 01:01:01.000001"]') |
221 |
| - j2 = _json.dumps({"x": dt_list}, cls=utils.PlotlyJSONEncoder) |
222 |
| - assert(j2 == '{"x": ["2014-01-05", ' |
223 |
| - '"2014-01-05 01:01:01", ' |
224 |
| - '"2014-01-05 01:01:01.000001"]}') |
| 190 | + assert(js1 == '{"type": "scatter3d", "x": [1, 2, 3], ' |
| 191 | + '"y": [1, 2, 3, null, null, null, "2014-01-05T00:00:00"], ' |
| 192 | + '"z": [1, "A", "2014-01-05T00:00:00", ' |
| 193 | + '"2014-01-05T01:01:01", "2014-01-05T01:01:01.000001"]}') |
| 194 | + assert(js2 == '{"type": "scatter", "x": [1, 2, 3]}') |
225 | 195 |
|
| 196 | + # Test JSON encoding works |
| 197 | + _json.dumps(data, cls=utils.PlotlyJSONEncoder, sort_keys=True) |
| 198 | + _json.dumps(figure, cls=utils.PlotlyJSONEncoder, sort_keys=True) |
226 | 199 |
|
227 |
| -def test_pandas_json_encoding(): |
228 |
| - j1 = _json.dumps(df['col 1'], cls=utils.PlotlyJSONEncoder) |
229 |
| - print(j1) |
230 |
| - print('\n') |
231 |
| - assert(j1 == '[1, 2, 3, "2014-01-05", null, null, null]') |
232 |
| - |
233 |
| - # Test that data wasn't mutated |
234 |
| - assert_series_equal(df['col 1'], |
235 |
| - pd.Series([1, 2, 3, dt(2014, 1, 5), |
236 |
| - pd.NaT, np.NaN, np.Inf], name='col 1')) |
237 |
| - |
238 |
| - j2 = _json.dumps(df.index, cls=utils.PlotlyJSONEncoder) |
239 |
| - assert(j2 == '[0, 1, 2, 3, 4, 5, 6]') |
240 |
| - |
241 |
| - nat = [pd.NaT] |
242 |
| - j3 = _json.dumps(nat, cls=utils.PlotlyJSONEncoder) |
243 |
| - assert(j3 == '[null]') |
244 |
| - assert(nat[0] is pd.NaT) |
245 |
| - |
246 |
| - j4 = _json.dumps(rng, cls=utils.PlotlyJSONEncoder) |
247 |
| - assert(j4 == '["2011-01-01", "2011-01-01 01:00:00"]') |
248 |
| - |
249 |
| - j5 = _json.dumps(ts, cls=utils.PlotlyJSONEncoder) |
250 |
| - assert(j5 == '[1.5, 2.5]') |
251 |
| - assert_series_equal(ts, pd.Series([1.5, 2.5], index=rng)) |
252 |
| - |
253 |
| - j6 = _json.dumps(ts.index, cls=utils.PlotlyJSONEncoder) |
254 |
| - assert(j6 == '["2011-01-01", "2011-01-01 01:00:00"]') |
255 |
| - |
256 |
| - |
257 |
| -def test_numpy_masked_json_encoding(): |
258 |
| - l = [1, 2, np.ma.core.masked] |
259 |
| - j1 = _json.dumps(l, cls=utils.PlotlyJSONEncoder) |
260 |
| - print(j1) |
261 |
| - assert(j1 == '[1, 2, null]') |
| 200 | + # Test data wasn't mutated |
| 201 | + np_array = np.array( |
| 202 | + [1, 2, 3, np.NaN, np.NAN, np.Inf, dt(2014, 1, 5)] |
| 203 | + ) |
| 204 | + for k in range(len(np_array)): |
| 205 | + if k in [3, 4]: |
| 206 | + # check NaN |
| 207 | + assert np.isnan(np_list[k]) and np.isnan(np_array[k]) |
| 208 | + else: |
| 209 | + # non-NaN |
| 210 | + assert np_list[k] == np_array[k] |
| 211 | + |
| 212 | + assert(set(data[0]['z']) == |
| 213 | + set([1, 'A', dt(2014, 1, 5), dt(2014, 1, 5, 1, 1, 1), |
| 214 | + dt(2014, 1, 5, 1, 1, 1, 1)])) |
| 215 | + |
| 216 | + def test_datetime_json_encoding(self): |
| 217 | + j1 = _json.dumps(dt_list, cls=utils.PlotlyJSONEncoder) |
| 218 | + assert(j1 == '["2014-01-05T00:00:00", ' |
| 219 | + '"2014-01-05T01:01:01", ' |
| 220 | + '"2014-01-05T01:01:01.000001"]') |
| 221 | + j2 = _json.dumps({"x": dt_list}, cls=utils.PlotlyJSONEncoder) |
| 222 | + assert(j2 == '{"x": ["2014-01-05T00:00:00", ' |
| 223 | + '"2014-01-05T01:01:01", ' |
| 224 | + '"2014-01-05T01:01:01.000001"]}') |
| 225 | + |
| 226 | + def test_pandas_json_encoding(self): |
| 227 | + j1 = _json.dumps(df['col 1'], cls=utils.PlotlyJSONEncoder) |
| 228 | + print(j1) |
| 229 | + print('\n') |
| 230 | + assert(j1 == '[1, 2, 3, "2014-01-05T00:00:00", null, null, null]') |
| 231 | + |
| 232 | + # Test that data wasn't mutated |
| 233 | + assert_series_equal(df['col 1'], |
| 234 | + pd.Series([1, 2, 3, dt(2014, 1, 5), |
| 235 | + pd.NaT, np.NaN, np.Inf], name='col 1')) |
| 236 | + |
| 237 | + j2 = _json.dumps(df.index, cls=utils.PlotlyJSONEncoder) |
| 238 | + assert(j2 == '[0, 1, 2, 3, 4, 5, 6]') |
| 239 | + |
| 240 | + nat = [pd.NaT] |
| 241 | + j3 = _json.dumps(nat, cls=utils.PlotlyJSONEncoder) |
| 242 | + assert(j3 == '[null]') |
| 243 | + assert(nat[0] is pd.NaT) |
| 244 | + |
| 245 | + j4 = _json.dumps(rng, cls=utils.PlotlyJSONEncoder) |
| 246 | + assert(j4 == '["2011-01-01T00:00:00", "2011-01-01T01:00:00"]') |
| 247 | + |
| 248 | + j5 = _json.dumps(ts, cls=utils.PlotlyJSONEncoder) |
| 249 | + assert(j5 == '[1.5, 2.5]') |
| 250 | + assert_series_equal(ts, pd.Series([1.5, 2.5], index=rng)) |
| 251 | + |
| 252 | + j6 = _json.dumps(ts.index, cls=utils.PlotlyJSONEncoder) |
| 253 | + assert(j6 == '["2011-01-01T00:00:00", "2011-01-01T01:00:00"]') |
| 254 | + |
| 255 | + def test_numpy_masked_json_encoding(self): |
| 256 | + l = [1, 2, np.ma.core.masked] |
| 257 | + j1 = _json.dumps(l, cls=utils.PlotlyJSONEncoder) |
| 258 | + print(j1) |
| 259 | + assert(j1 == '[1, 2, null]') |
| 260 | + |
| 261 | + def test_numpy_dates(self): |
| 262 | + a = np.arange(np.datetime64('2011-07-11'), np.datetime64('2011-07-18')) |
| 263 | + j1 = _json.dumps(a, cls=utils.PlotlyJSONEncoder) |
| 264 | + assert(j1 == '["2011-07-11", "2011-07-12", "2011-07-13", ' |
| 265 | + '"2011-07-14", "2011-07-15", "2011-07-16", ' |
| 266 | + '"2011-07-17"]') |
| 267 | + |
| 268 | + |
| 269 | + def test_datetime_dot_date(self): |
| 270 | + a = [datetime.date(2014, 1, 1), datetime.date(2014, 1, 2)] |
| 271 | + j1 = _json.dumps(a, cls=utils.PlotlyJSONEncoder) |
| 272 | + assert(j1 == '["2014-01-01", "2014-01-02"]') |
262 | 273 |
|
263 | 274 |
|
264 | 275 | if matplotlylib:
|
@@ -290,15 +301,3 @@ def test_masked_constants_example():
|
290 | 301 | assert(array == [-398.11793027, -398.11792966, -398.11786308, None])
|
291 | 302 |
|
292 | 303 |
|
293 |
| -def test_numpy_dates(): |
294 |
| - a = np.arange(np.datetime64('2011-07-11'), np.datetime64('2011-07-18')) |
295 |
| - j1 = _json.dumps(a, cls=utils.PlotlyJSONEncoder) |
296 |
| - assert(j1 == '["2011-07-11", "2011-07-12", "2011-07-13", ' |
297 |
| - '"2011-07-14", "2011-07-15", "2011-07-16", ' |
298 |
| - '"2011-07-17"]') |
299 |
| - |
300 |
| - |
301 |
| -def test_datetime_dot_date(): |
302 |
| - a = [datetime.date(2014, 1, 1), datetime.date(2014, 1, 2)] |
303 |
| - j1 = _json.dumps(a, cls=utils.PlotlyJSONEncoder) |
304 |
| - assert(j1 == '["2014-01-01", "2014-01-02"]') |
0 commit comments