Show More
@@ -1,131 +1,143 b'' | |||
|
1 | 1 | """Test suite for our JSON utilities. |
|
2 | 2 | """ |
|
3 | 3 | #----------------------------------------------------------------------------- |
|
4 | 4 | # Copyright (C) 2010-2011 The IPython Development Team |
|
5 | 5 | # |
|
6 | 6 | # Distributed under the terms of the BSD License. The full license is in |
|
7 | 7 | # the file COPYING.txt, distributed as part of this software. |
|
8 | 8 | #----------------------------------------------------------------------------- |
|
9 | 9 | |
|
10 | 10 | #----------------------------------------------------------------------------- |
|
11 | 11 | # Imports |
|
12 | 12 | #----------------------------------------------------------------------------- |
|
13 | 13 | # stdlib |
|
14 | 14 | import datetime |
|
15 | 15 | import json |
|
16 | 16 | from base64 import decodestring |
|
17 | 17 | |
|
18 | 18 | # third party |
|
19 | 19 | import nose.tools as nt |
|
20 | 20 | |
|
21 | 21 | # our own |
|
22 | 22 | from IPython.utils import jsonutil, tz |
|
23 | 23 | from ..jsonutil import json_clean, encode_images |
|
24 | 24 | from ..py3compat import unicode_to_str, str_to_bytes, iteritems |
|
25 | 25 | |
|
26 | 26 | #----------------------------------------------------------------------------- |
|
27 | 27 | # Test functions |
|
28 | 28 | #----------------------------------------------------------------------------- |
|
29 | 29 | |
|
30 | 30 | def test(): |
|
31 | 31 | # list of input/expected output. Use None for the expected output if it |
|
32 | 32 | # can be the same as the input. |
|
33 | 33 | pairs = [(1, None), # start with scalars |
|
34 | 34 | (1.0, None), |
|
35 | 35 | ('a', None), |
|
36 | 36 | (True, None), |
|
37 | 37 | (False, None), |
|
38 | 38 | (None, None), |
|
39 | 39 | # complex numbers for now just go to strings, as otherwise they |
|
40 | 40 | # are unserializable |
|
41 | 41 | (1j, '1j'), |
|
42 | 42 | # Containers |
|
43 | 43 | ([1, 2], None), |
|
44 | 44 | ((1, 2), [1, 2]), |
|
45 | 45 | (set([1, 2]), [1, 2]), |
|
46 | 46 | (dict(x=1), None), |
|
47 | 47 | ({'x': 1, 'y':[1,2,3], '1':'int'}, None), |
|
48 | 48 | # More exotic objects |
|
49 | 49 | ((x for x in range(3)), [0, 1, 2]), |
|
50 | 50 | (iter([1, 2]), [1, 2]), |
|
51 | 51 | ] |
|
52 | 52 | |
|
53 | 53 | for val, jval in pairs: |
|
54 | 54 | if jval is None: |
|
55 | 55 | jval = val |
|
56 | 56 | out = json_clean(val) |
|
57 | 57 | # validate our cleanup |
|
58 | 58 | nt.assert_equal(out, jval) |
|
59 | 59 | # and ensure that what we return, indeed encodes cleanly |
|
60 | 60 | json.loads(json.dumps(out)) |
|
61 | 61 | |
|
62 | 62 | |
|
63 | 63 | |
|
64 | 64 | def test_encode_images(): |
|
65 | 65 | # invalid data, but the header and footer are from real files |
|
66 | 66 | pngdata = b'\x89PNG\r\n\x1a\nblahblahnotactuallyvalidIEND\xaeB`\x82' |
|
67 | 67 | jpegdata = b'\xff\xd8\xff\xe0\x00\x10JFIFblahblahjpeg(\xa0\x0f\xff\xd9' |
|
68 | 68 | |
|
69 | 69 | fmt = { |
|
70 | 70 | 'image/png' : pngdata, |
|
71 | 71 | 'image/jpeg' : jpegdata, |
|
72 | 72 | } |
|
73 | 73 | encoded = encode_images(fmt) |
|
74 | 74 | for key, value in iteritems(fmt): |
|
75 | 75 | # encoded has unicode, want bytes |
|
76 | 76 | decoded = decodestring(encoded[key].encode('ascii')) |
|
77 | 77 | nt.assert_equal(decoded, value) |
|
78 | 78 | encoded2 = encode_images(encoded) |
|
79 | 79 | nt.assert_equal(encoded, encoded2) |
|
80 | 80 | |
|
81 | 81 | b64_str = {} |
|
82 | 82 | for key, encoded in iteritems(encoded): |
|
83 | 83 | b64_str[key] = unicode_to_str(encoded) |
|
84 | 84 | encoded3 = encode_images(b64_str) |
|
85 | 85 | nt.assert_equal(encoded3, b64_str) |
|
86 | 86 | for key, value in iteritems(fmt): |
|
87 | 87 | # encoded3 has str, want bytes |
|
88 | 88 | decoded = decodestring(str_to_bytes(encoded3[key])) |
|
89 | 89 | nt.assert_equal(decoded, value) |
|
90 | 90 | |
|
91 | 91 | def test_lambda(): |
|
92 | 92 | jc = json_clean(lambda : 1) |
|
93 | 93 | assert isinstance(jc, str) |
|
94 | 94 | assert '<lambda>' in jc |
|
95 | 95 | json.dumps(jc) |
|
96 | 96 | |
|
97 | 97 | def test_extract_dates(): |
|
98 | 98 | timestamps = [ |
|
99 | 99 | '2013-07-03T16:34:52.249482', |
|
100 | 100 | '2013-07-03T16:34:52.249482Z', |
|
101 | 101 | '2013-07-03T16:34:52.249482Z-0800', |
|
102 | 102 | '2013-07-03T16:34:52.249482Z+0800', |
|
103 | 103 | '2013-07-03T16:34:52.249482Z+08:00', |
|
104 | 104 | '2013-07-03T16:34:52.249482Z-08:00', |
|
105 | 105 | '2013-07-03T16:34:52.249482-0800', |
|
106 | 106 | '2013-07-03T16:34:52.249482+0800', |
|
107 | 107 | '2013-07-03T16:34:52.249482+08:00', |
|
108 | 108 | '2013-07-03T16:34:52.249482-08:00', |
|
109 | 109 | ] |
|
110 | 110 | extracted = jsonutil.extract_dates(timestamps) |
|
111 | 111 | ref = extracted[0] |
|
112 | 112 | for dt in extracted: |
|
113 | 113 | nt.assert_true(isinstance(dt, datetime.datetime)) |
|
114 | 114 | nt.assert_equal(dt, ref) |
|
115 | 115 | |
|
116 | def test_parse_ms_precision(): | |
|
117 | base = '2013-07-03T16:34:52.' | |
|
118 | digits = '1234567890' | |
|
119 | ||
|
120 | for i in range(len(digits)): | |
|
121 | ts = base + digits[:i] | |
|
122 | parsed = jsonutil.parse_date(ts) | |
|
123 | if i >= 1 and i <= 6: | |
|
124 | assert isinstance(parsed, datetime.datetime) | |
|
125 | else: | |
|
126 | assert isinstance(parsed, str) | |
|
127 | ||
|
116 | 128 | def test_date_default(): |
|
117 | 129 | data = dict(today=datetime.datetime.now(), utcnow=tz.utcnow()) |
|
118 | 130 | jsondata = json.dumps(data, default=jsonutil.date_default) |
|
119 | 131 | nt.assert_in("+00", jsondata) |
|
120 | 132 | nt.assert_equal(jsondata.count("+00"), 1) |
|
121 | 133 | extracted = jsonutil.extract_dates(json.loads(jsondata)) |
|
122 | 134 | for dt in extracted.values(): |
|
123 | 135 | nt.assert_true(isinstance(dt, datetime.datetime)) |
|
124 | 136 | |
|
125 | 137 | def test_exception(): |
|
126 | 138 | bad_dicts = [{1:'number', '1':'string'}, |
|
127 | 139 | {True:'bool', 'True':'string'}, |
|
128 | 140 | ] |
|
129 | 141 | for d in bad_dicts: |
|
130 | 142 | nt.assert_raises(ValueError, json_clean, d) |
|
131 | 143 |
General Comments 0
You need to be logged in to leave comments.
Login now