Show More
@@ -81,9 +81,9 b' def squash_unicode(obj):' | |||||
81 |
|
81 | |||
82 | # ISO8601-ify datetime objects |
|
82 | # ISO8601-ify datetime objects | |
83 | json_packer = lambda obj: jsonapi.dumps(obj, default=date_default) |
|
83 | json_packer = lambda obj: jsonapi.dumps(obj, default=date_default) | |
84 |
json_unpacker = lambda s: |
|
84 | json_unpacker = lambda s: jsonapi.loads(s) | |
85 |
|
85 | |||
86 | pickle_packer = lambda o: pickle.dumps(o,-1) |
|
86 | pickle_packer = lambda o: pickle.dumps(squash_dates(o),-1) | |
87 | pickle_unpacker = pickle.loads |
|
87 | pickle_unpacker = pickle.loads | |
88 |
|
88 | |||
89 | default_packer = json_packer |
|
89 | default_packer = json_packer | |
@@ -429,7 +429,7 b' class Session(Configurable):' | |||||
429 | return str(uuid.uuid4()) |
|
429 | return str(uuid.uuid4()) | |
430 |
|
430 | |||
431 | def _check_packers(self): |
|
431 | def _check_packers(self): | |
432 |
"""check packers for |
|
432 | """check packers for datetime support.""" | |
433 | pack = self.pack |
|
433 | pack = self.pack | |
434 | unpack = self.unpack |
|
434 | unpack = self.unpack | |
435 |
|
435 | |||
@@ -469,9 +469,11 b' class Session(Configurable):' | |||||
469 | msg = dict(t=datetime.now()) |
|
469 | msg = dict(t=datetime.now()) | |
470 | try: |
|
470 | try: | |
471 | unpacked = unpack(pack(msg)) |
|
471 | unpacked = unpack(pack(msg)) | |
|
472 | if isinstance(unpacked['t'], datetime): | |||
|
473 | raise ValueError("Shouldn't deserialize to datetime") | |||
472 | except Exception: |
|
474 | except Exception: | |
473 | self.pack = lambda o: pack(squash_dates(o)) |
|
475 | self.pack = lambda o: pack(squash_dates(o)) | |
474 |
self.unpack = lambda s: |
|
476 | self.unpack = lambda s: unpack(s) | |
475 |
|
477 | |||
476 | def msg_header(self, msg_type): |
|
478 | def msg_header(self, msg_type): | |
477 | return msg_header(self.msg_id, msg_type, self.username, self.session) |
|
479 | return msg_header(self.msg_id, msg_type, self.username, self.session) | |
@@ -815,10 +817,10 b' class Session(Configurable):' | |||||
815 | if not len(msg_list) >= minlen: |
|
817 | if not len(msg_list) >= minlen: | |
816 | raise TypeError("malformed message, must have at least %i elements"%minlen) |
|
818 | raise TypeError("malformed message, must have at least %i elements"%minlen) | |
817 | header = self.unpack(msg_list[1]) |
|
819 | header = self.unpack(msg_list[1]) | |
818 | message['header'] = header |
|
820 | message['header'] = extract_dates(header) | |
819 | message['msg_id'] = header['msg_id'] |
|
821 | message['msg_id'] = header['msg_id'] | |
820 | message['msg_type'] = header['msg_type'] |
|
822 | message['msg_type'] = header['msg_type'] | |
821 | message['parent_header'] = self.unpack(msg_list[2]) |
|
823 | message['parent_header'] = extract_dates(self.unpack(msg_list[2])) | |
822 | message['metadata'] = self.unpack(msg_list[3]) |
|
824 | message['metadata'] = self.unpack(msg_list[3]) | |
823 | if content: |
|
825 | if content: | |
824 | message['content'] = self.unpack(msg_list[4]) |
|
826 | message['content'] = self.unpack(msg_list[4]) |
@@ -13,6 +13,8 b'' | |||||
13 |
|
13 | |||
14 | import os |
|
14 | import os | |
15 | import uuid |
|
15 | import uuid | |
|
16 | from datetime import datetime | |||
|
17 | ||||
16 | import zmq |
|
18 | import zmq | |
17 |
|
19 | |||
18 | from zmq.tests import BaseZMQTestCase |
|
20 | from zmq.tests import BaseZMQTestCase | |
@@ -20,6 +22,10 b' from zmq.eventloop.zmqstream import ZMQStream' | |||||
20 |
|
22 | |||
21 | from IPython.kernel.zmq import session as ss |
|
23 | from IPython.kernel.zmq import session as ss | |
22 |
|
24 | |||
|
25 | from IPython.testing.decorators import skipif, module_not_available | |||
|
26 | from IPython.utils.py3compat import string_types | |||
|
27 | from IPython.utils import jsonutil | |||
|
28 | ||||
23 | def _bad_packer(obj): |
|
29 | def _bad_packer(obj): | |
24 | raise TypeError("I don't work") |
|
30 | raise TypeError("I don't work") | |
25 |
|
31 | |||
@@ -155,24 +161,6 b' class TestSession(SessionTestCase):' | |||||
155 | t.wait(1) # this will raise |
|
161 | t.wait(1) # this will raise | |
156 |
|
162 | |||
157 |
|
163 | |||
158 | # def test_rekey(self): |
|
|||
159 | # """rekeying dict around json str keys""" |
|
|||
160 | # d = {'0': uuid.uuid4(), 0:uuid.uuid4()} |
|
|||
161 | # self.assertRaises(KeyError, ss.rekey, d) |
|
|||
162 | # |
|
|||
163 | # d = {'0': uuid.uuid4(), 1:uuid.uuid4(), 'asdf':uuid.uuid4()} |
|
|||
164 | # d2 = {0:d['0'],1:d[1],'asdf':d['asdf']} |
|
|||
165 | # rd = ss.rekey(d) |
|
|||
166 | # self.assertEqual(d2,rd) |
|
|||
167 | # |
|
|||
168 | # d = {'1.5':uuid.uuid4(),'1':uuid.uuid4()} |
|
|||
169 | # d2 = {1.5:d['1.5'],1:d['1']} |
|
|||
170 | # rd = ss.rekey(d) |
|
|||
171 | # self.assertEqual(d2,rd) |
|
|||
172 | # |
|
|||
173 | # d = {'1.0':uuid.uuid4(),'1':uuid.uuid4()} |
|
|||
174 | # self.assertRaises(KeyError, ss.rekey, d) |
|
|||
175 | # |
|
|||
176 | def test_unique_msg_ids(self): |
|
164 | def test_unique_msg_ids(self): | |
177 | """test that messages receive unique ids""" |
|
165 | """test that messages receive unique ids""" | |
178 | ids = set() |
|
166 | ids = set() | |
@@ -267,5 +255,35 b' class TestSession(SessionTestCase):' | |||||
267 |
|
255 | |||
268 | def test_bad_roundtrip(self): |
|
256 | def test_bad_roundtrip(self): | |
269 | with self.assertRaises(ValueError): |
|
257 | with self.assertRaises(ValueError): | |
270 |
session= |
|
258 | session = ss.Session(unpack=lambda b: 5) | |
|
259 | ||||
|
260 | def _datetime_test(self, session): | |||
|
261 | content = dict(t=datetime.now()) | |||
|
262 | metadata = dict(t=datetime.now()) | |||
|
263 | p = session.msg('msg') | |||
|
264 | msg = session.msg('msg', content=content, metadata=metadata, parent=p['header']) | |||
|
265 | smsg = session.serialize(msg) | |||
|
266 | msg2 = session.unserialize(session.feed_identities(smsg)[1]) | |||
|
267 | assert isinstance(msg2['header']['date'], datetime) | |||
|
268 | self.assertEqual(msg['header'], msg2['header']) | |||
|
269 | self.assertEqual(msg['parent_header'], msg2['parent_header']) | |||
|
270 | self.assertEqual(msg['parent_header'], msg2['parent_header']) | |||
|
271 | assert isinstance(msg['content']['t'], datetime) | |||
|
272 | assert isinstance(msg['metadata']['t'], datetime) | |||
|
273 | assert isinstance(msg2['content']['t'], string_types) | |||
|
274 | assert isinstance(msg2['metadata']['t'], string_types) | |||
|
275 | self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content'])) | |||
|
276 | self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content'])) | |||
|
277 | ||||
|
278 | def test_datetimes(self): | |||
|
279 | self._datetime_test(self.session) | |||
|
280 | ||||
|
281 | def test_datetimes_pickle(self): | |||
|
282 | session = ss.Session(packer='pickle') | |||
|
283 | self._datetime_test(session) | |||
|
284 | ||||
|
285 | @skipif(module_not_available('msgpack')) | |||
|
286 | def test_datetimes_msgpack(self): | |||
|
287 | session = ss.Session(packer='msgpack.packb', unpacker='msgpack.unpackb') | |||
|
288 | self._datetime_test(session) | |||
271 |
|
289 |
@@ -37,7 +37,7 b' from IPython.core.profiledir import ProfileDir, ProfileDirError' | |||||
37 |
|
37 | |||
38 | from IPython.utils.capture import RichOutput |
|
38 | from IPython.utils.capture import RichOutput | |
39 | from IPython.utils.coloransi import TermColors |
|
39 | from IPython.utils.coloransi import TermColors | |
40 | from IPython.utils.jsonutil import rekey |
|
40 | from IPython.utils.jsonutil import rekey, extract_dates, parse_date | |
41 | from IPython.utils.localinterfaces import localhost, is_local_ip |
|
41 | from IPython.utils.localinterfaces import localhost, is_local_ip | |
42 | from IPython.utils.path import get_ipython_dir |
|
42 | from IPython.utils.path import get_ipython_dir | |
43 | from IPython.utils.py3compat import cast_bytes, string_types, xrange, iteritems |
|
43 | from IPython.utils.py3compat import cast_bytes, string_types, xrange, iteritems | |
@@ -675,7 +675,7 b' class Client(HasTraits):' | |||||
675 | if 'date' in parent: |
|
675 | if 'date' in parent: | |
676 | md['submitted'] = parent['date'] |
|
676 | md['submitted'] = parent['date'] | |
677 | if 'started' in msg_meta: |
|
677 | if 'started' in msg_meta: | |
678 | md['started'] = msg_meta['started'] |
|
678 | md['started'] = parse_date(msg_meta['started']) | |
679 | if 'date' in header: |
|
679 | if 'date' in header: | |
680 | md['completed'] = header['date'] |
|
680 | md['completed'] = header['date'] | |
681 | return md |
|
681 | return md | |
@@ -1564,8 +1564,8 b' class Client(HasTraits):' | |||||
1564 | for msg_id in sorted(theids): |
|
1564 | for msg_id in sorted(theids): | |
1565 | if msg_id in content['completed']: |
|
1565 | if msg_id in content['completed']: | |
1566 | rec = content[msg_id] |
|
1566 | rec = content[msg_id] | |
1567 | parent = rec['header'] |
|
1567 | parent = extract_dates(rec['header']) | |
1568 | header = rec['result_header'] |
|
1568 | header = extract_dates(rec['result_header']) | |
1569 | rcontent = rec['result_content'] |
|
1569 | rcontent = rec['result_content'] | |
1570 | iodict = rec['io'] |
|
1570 | iodict = rec['io'] | |
1571 | if isinstance(rcontent, str): |
|
1571 | if isinstance(rcontent, str): | |
@@ -1580,7 +1580,7 b' class Client(HasTraits):' | |||||
1580 | ) |
|
1580 | ) | |
1581 | md.update(self._extract_metadata(md_msg)) |
|
1581 | md.update(self._extract_metadata(md_msg)) | |
1582 | if rec.get('received'): |
|
1582 | if rec.get('received'): | |
1583 | md['received'] = rec['received'] |
|
1583 | md['received'] = parse_date(rec['received']) | |
1584 | md.update(iodict) |
|
1584 | md.update(iodict) | |
1585 |
|
1585 | |||
1586 | if rcontent['status'] == 'ok': |
|
1586 | if rcontent['status'] == 'ok': | |
@@ -1842,6 +1842,13 b' class Client(HasTraits):' | |||||
1842 | has_bufs = buffer_lens is not None |
|
1842 | has_bufs = buffer_lens is not None | |
1843 | has_rbufs = result_buffer_lens is not None |
|
1843 | has_rbufs = result_buffer_lens is not None | |
1844 | for i,rec in enumerate(records): |
|
1844 | for i,rec in enumerate(records): | |
|
1845 | # unpack datetime objects | |||
|
1846 | for hkey in ('header', 'result_header'): | |||
|
1847 | if hkey in rec: | |||
|
1848 | rec[hkey] = extract_dates(rec[hkey]) | |||
|
1849 | for dtkey in ('submitted', 'started', 'completed', 'received'): | |||
|
1850 | if dtkey in rec: | |||
|
1851 | rec[dtkey] = parse_date(rec[dtkey]) | |||
1845 | # relink buffers |
|
1852 | # relink buffers | |
1846 | if has_bufs: |
|
1853 | if has_bufs: | |
1847 | blen = buffer_lens[i] |
|
1854 | blen = buffer_lens[i] |
@@ -30,6 +30,7 b' from zmq.eventloop.zmqstream import ZMQStream' | |||||
30 |
|
30 | |||
31 | # internal: |
|
31 | # internal: | |
32 | from IPython.utils.importstring import import_item |
|
32 | from IPython.utils.importstring import import_item | |
|
33 | from IPython.utils.jsonutil import extract_dates | |||
33 | from IPython.utils.localinterfaces import localhost |
|
34 | from IPython.utils.localinterfaces import localhost | |
34 | from IPython.utils.py3compat import cast_bytes, unicode_type, iteritems |
|
35 | from IPython.utils.py3compat import cast_bytes, unicode_type, iteritems | |
35 | from IPython.utils.traitlets import ( |
|
36 | from IPython.utils.traitlets import ( | |
@@ -1385,7 +1386,7 b' class Hub(SessionFactory):' | |||||
1385 | def db_query(self, client_id, msg): |
|
1386 | def db_query(self, client_id, msg): | |
1386 | """Perform a raw query on the task record database.""" |
|
1387 | """Perform a raw query on the task record database.""" | |
1387 | content = msg['content'] |
|
1388 | content = msg['content'] | |
1388 | query = content.get('query', {}) |
|
1389 | query = extract_dates(content.get('query', {})) | |
1389 | keys = content.get('keys', None) |
|
1390 | keys = content.get('keys', None) | |
1390 | buffers = [] |
|
1391 | buffers = [] | |
1391 | empty = list() |
|
1392 | empty = list() |
@@ -33,8 +33,8 b" next_attr_name = '__next__' if py3compat.PY3 else 'next'" | |||||
33 | #----------------------------------------------------------------------------- |
|
33 | #----------------------------------------------------------------------------- | |
34 |
|
34 | |||
35 | # timestamp formats |
|
35 | # timestamp formats | |
36 | ISO8601="%Y-%m-%dT%H:%M:%S.%f" |
|
36 | ISO8601 = "%Y-%m-%dT%H:%M:%S.%f" | |
37 |
ISO8601_PAT=re.compile(r"^(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d |
|
37 | ISO8601_PAT=re.compile(r"^(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{1,6})Z?([\+\-]\d{2}:?\d{2})?$") | |
38 |
|
38 | |||
39 | #----------------------------------------------------------------------------- |
|
39 | #----------------------------------------------------------------------------- | |
40 | # Classes and functions |
|
40 | # Classes and functions | |
@@ -62,22 +62,34 b' def rekey(dikt):' | |||||
62 | dikt[nk] = dikt.pop(k) |
|
62 | dikt[nk] = dikt.pop(k) | |
63 | return dikt |
|
63 | return dikt | |
64 |
|
64 | |||
|
65 | def parse_date(s): | |||
|
66 | """parse an ISO8601 date string | |||
|
67 | ||||
|
68 | If it is None or not a valid ISO8601 timestamp, | |||
|
69 | it will be returned unmodified. | |||
|
70 | Otherwise, it will return a datetime object. | |||
|
71 | """ | |||
|
72 | if s is None: | |||
|
73 | return s | |||
|
74 | m = ISO8601_PAT.match(s) | |||
|
75 | if m: | |||
|
76 | # FIXME: add actual timezone support | |||
|
77 | # this just drops the timezone info | |||
|
78 | notz = m.groups()[0] | |||
|
79 | return datetime.strptime(notz, ISO8601) | |||
|
80 | return s | |||
65 |
|
81 | |||
66 | def extract_dates(obj): |
|
82 | def extract_dates(obj): | |
67 | """extract ISO8601 dates from unpacked JSON""" |
|
83 | """extract ISO8601 dates from unpacked JSON""" | |
68 | if isinstance(obj, dict): |
|
84 | if isinstance(obj, dict): | |
69 |
obj = |
|
85 | new_obj = {} # don't clobber | |
70 | for k,v in iteritems(obj): |
|
86 | for k,v in iteritems(obj): | |
71 | obj[k] = extract_dates(v) |
|
87 | new_obj[k] = extract_dates(v) | |
|
88 | obj = new_obj | |||
72 | elif isinstance(obj, (list, tuple)): |
|
89 | elif isinstance(obj, (list, tuple)): | |
73 | obj = [ extract_dates(o) for o in obj ] |
|
90 | obj = [ extract_dates(o) for o in obj ] | |
74 | elif isinstance(obj, string_types): |
|
91 | elif isinstance(obj, string_types): | |
75 |
|
|
92 | obj = parse_date(obj) | |
76 | if m: |
|
|||
77 | # FIXME: add actual timezone support |
|
|||
78 | # this just drops the timezone info |
|
|||
79 | notz = m.groups()[0] |
|
|||
80 | obj = datetime.strptime(notz, ISO8601) |
|
|||
81 | return obj |
|
93 | return obj | |
82 |
|
94 | |||
83 | def squash_dates(obj): |
|
95 | def squash_dates(obj): |
@@ -113,6 +113,18 b' def test_extract_dates():' | |||||
113 | nt.assert_true(isinstance(dt, datetime.datetime)) |
|
113 | nt.assert_true(isinstance(dt, datetime.datetime)) | |
114 | nt.assert_equal(dt, ref) |
|
114 | nt.assert_equal(dt, ref) | |
115 |
|
115 | |||
|
116 | def test_parse_ms_precision(): | |||
|
117 | base = '2013-07-03T16:34:52.' | |||
|
118 | digits = '1234567890' | |||
|
119 | ||||
|
120 | for i in range(len(digits)): | |||
|
121 | ts = base + digits[:i] | |||
|
122 | parsed = jsonutil.parse_date(ts) | |||
|
123 | if i >= 1 and i <= 6: | |||
|
124 | assert isinstance(parsed, datetime.datetime) | |||
|
125 | else: | |||
|
126 | assert isinstance(parsed, str) | |||
|
127 | ||||
116 | def test_date_default(): |
|
128 | def test_date_default(): | |
117 | data = dict(today=datetime.datetime.now(), utcnow=tz.utcnow()) |
|
129 | data = dict(today=datetime.datetime.now(), utcnow=tz.utcnow()) | |
118 | jsondata = json.dumps(data, default=jsonutil.date_default) |
|
130 | jsondata = json.dumps(data, default=jsonutil.date_default) |
General Comments 0
You need to be logged in to leave comments.
Login now