##// END OF EJS Templates
pyerr -> error
MinRK -
Show More
@@ -1,962 +1,965 b''
1 // Copyright (c) IPython Development Team.
1 // Copyright (c) IPython Development Team.
2 // Distributed under the terms of the Modified BSD License.
2 // Distributed under the terms of the Modified BSD License.
3
3
4 //============================================================================
4 //============================================================================
5 // OutputArea
5 // OutputArea
6 //============================================================================
6 //============================================================================
7
7
8 /**
8 /**
9 * @module IPython
9 * @module IPython
10 * @namespace IPython
10 * @namespace IPython
11 * @submodule OutputArea
11 * @submodule OutputArea
12 */
12 */
13 var IPython = (function (IPython) {
13 var IPython = (function (IPython) {
14 "use strict";
14 "use strict";
15
15
16 var utils = IPython.utils;
16 var utils = IPython.utils;
17
17
18 /**
18 /**
19 * @class OutputArea
19 * @class OutputArea
20 *
20 *
21 * @constructor
21 * @constructor
22 */
22 */
23
23
24 var OutputArea = function (selector, prompt_area) {
24 var OutputArea = function (selector, prompt_area) {
25 this.selector = selector;
25 this.selector = selector;
26 this.wrapper = $(selector);
26 this.wrapper = $(selector);
27 this.outputs = [];
27 this.outputs = [];
28 this.collapsed = false;
28 this.collapsed = false;
29 this.scrolled = false;
29 this.scrolled = false;
30 this.trusted = true;
30 this.trusted = true;
31 this.clear_queued = null;
31 this.clear_queued = null;
32 if (prompt_area === undefined) {
32 if (prompt_area === undefined) {
33 this.prompt_area = true;
33 this.prompt_area = true;
34 } else {
34 } else {
35 this.prompt_area = prompt_area;
35 this.prompt_area = prompt_area;
36 }
36 }
37 this.create_elements();
37 this.create_elements();
38 this.style();
38 this.style();
39 this.bind_events();
39 this.bind_events();
40 };
40 };
41
41
42
42
43 /**
43 /**
44 * Class prototypes
44 * Class prototypes
45 **/
45 **/
46
46
47 OutputArea.prototype.create_elements = function () {
47 OutputArea.prototype.create_elements = function () {
48 this.element = $("<div/>");
48 this.element = $("<div/>");
49 this.collapse_button = $("<div/>");
49 this.collapse_button = $("<div/>");
50 this.prompt_overlay = $("<div/>");
50 this.prompt_overlay = $("<div/>");
51 this.wrapper.append(this.prompt_overlay);
51 this.wrapper.append(this.prompt_overlay);
52 this.wrapper.append(this.element);
52 this.wrapper.append(this.element);
53 this.wrapper.append(this.collapse_button);
53 this.wrapper.append(this.collapse_button);
54 };
54 };
55
55
56
56
57 OutputArea.prototype.style = function () {
57 OutputArea.prototype.style = function () {
58 this.collapse_button.hide();
58 this.collapse_button.hide();
59 this.prompt_overlay.hide();
59 this.prompt_overlay.hide();
60
60
61 this.wrapper.addClass('output_wrapper');
61 this.wrapper.addClass('output_wrapper');
62 this.element.addClass('output');
62 this.element.addClass('output');
63
63
64 this.collapse_button.addClass("btn output_collapsed");
64 this.collapse_button.addClass("btn output_collapsed");
65 this.collapse_button.attr('title', 'click to expand output');
65 this.collapse_button.attr('title', 'click to expand output');
66 this.collapse_button.text('. . .');
66 this.collapse_button.text('. . .');
67
67
68 this.prompt_overlay.addClass('out_prompt_overlay prompt');
68 this.prompt_overlay.addClass('out_prompt_overlay prompt');
69 this.prompt_overlay.attr('title', 'click to expand output; double click to hide output');
69 this.prompt_overlay.attr('title', 'click to expand output; double click to hide output');
70
70
71 this.collapse();
71 this.collapse();
72 };
72 };
73
73
74 /**
74 /**
75 * Should the OutputArea scroll?
75 * Should the OutputArea scroll?
76 * Returns whether the height (in lines) exceeds a threshold.
76 * Returns whether the height (in lines) exceeds a threshold.
77 *
77 *
78 * @private
78 * @private
79 * @method _should_scroll
79 * @method _should_scroll
80 * @param [lines=100]{Integer}
80 * @param [lines=100]{Integer}
81 * @return {Bool}
81 * @return {Bool}
82 *
82 *
83 */
83 */
84 OutputArea.prototype._should_scroll = function (lines) {
84 OutputArea.prototype._should_scroll = function (lines) {
85 if (lines <=0 ){ return }
85 if (lines <=0 ){ return }
86 if (!lines) {
86 if (!lines) {
87 lines = 100;
87 lines = 100;
88 }
88 }
89 // line-height from http://stackoverflow.com/questions/1185151
89 // line-height from http://stackoverflow.com/questions/1185151
90 var fontSize = this.element.css('font-size');
90 var fontSize = this.element.css('font-size');
91 var lineHeight = Math.floor(parseInt(fontSize.replace('px','')) * 1.5);
91 var lineHeight = Math.floor(parseInt(fontSize.replace('px','')) * 1.5);
92
92
93 return (this.element.height() > lines * lineHeight);
93 return (this.element.height() > lines * lineHeight);
94 };
94 };
95
95
96
96
97 OutputArea.prototype.bind_events = function () {
97 OutputArea.prototype.bind_events = function () {
98 var that = this;
98 var that = this;
99 this.prompt_overlay.dblclick(function () { that.toggle_output(); });
99 this.prompt_overlay.dblclick(function () { that.toggle_output(); });
100 this.prompt_overlay.click(function () { that.toggle_scroll(); });
100 this.prompt_overlay.click(function () { that.toggle_scroll(); });
101
101
102 this.element.resize(function () {
102 this.element.resize(function () {
103 // FIXME: Firefox on Linux misbehaves, so automatic scrolling is disabled
103 // FIXME: Firefox on Linux misbehaves, so automatic scrolling is disabled
104 if ( IPython.utils.browser[0] === "Firefox" ) {
104 if ( IPython.utils.browser[0] === "Firefox" ) {
105 return;
105 return;
106 }
106 }
107 // maybe scroll output,
107 // maybe scroll output,
108 // if it's grown large enough and hasn't already been scrolled.
108 // if it's grown large enough and hasn't already been scrolled.
109 if ( !that.scrolled && that._should_scroll(OutputArea.auto_scroll_threshold)) {
109 if ( !that.scrolled && that._should_scroll(OutputArea.auto_scroll_threshold)) {
110 that.scroll_area();
110 that.scroll_area();
111 }
111 }
112 });
112 });
113 this.collapse_button.click(function () {
113 this.collapse_button.click(function () {
114 that.expand();
114 that.expand();
115 });
115 });
116 };
116 };
117
117
118
118
119 OutputArea.prototype.collapse = function () {
119 OutputArea.prototype.collapse = function () {
120 if (!this.collapsed) {
120 if (!this.collapsed) {
121 this.element.hide();
121 this.element.hide();
122 this.prompt_overlay.hide();
122 this.prompt_overlay.hide();
123 if (this.element.html()){
123 if (this.element.html()){
124 this.collapse_button.show();
124 this.collapse_button.show();
125 }
125 }
126 this.collapsed = true;
126 this.collapsed = true;
127 }
127 }
128 };
128 };
129
129
130
130
131 OutputArea.prototype.expand = function () {
131 OutputArea.prototype.expand = function () {
132 if (this.collapsed) {
132 if (this.collapsed) {
133 this.collapse_button.hide();
133 this.collapse_button.hide();
134 this.element.show();
134 this.element.show();
135 this.prompt_overlay.show();
135 this.prompt_overlay.show();
136 this.collapsed = false;
136 this.collapsed = false;
137 }
137 }
138 };
138 };
139
139
140
140
141 OutputArea.prototype.toggle_output = function () {
141 OutputArea.prototype.toggle_output = function () {
142 if (this.collapsed) {
142 if (this.collapsed) {
143 this.expand();
143 this.expand();
144 } else {
144 } else {
145 this.collapse();
145 this.collapse();
146 }
146 }
147 };
147 };
148
148
149
149
150 OutputArea.prototype.scroll_area = function () {
150 OutputArea.prototype.scroll_area = function () {
151 this.element.addClass('output_scroll');
151 this.element.addClass('output_scroll');
152 this.prompt_overlay.attr('title', 'click to unscroll output; double click to hide');
152 this.prompt_overlay.attr('title', 'click to unscroll output; double click to hide');
153 this.scrolled = true;
153 this.scrolled = true;
154 };
154 };
155
155
156
156
157 OutputArea.prototype.unscroll_area = function () {
157 OutputArea.prototype.unscroll_area = function () {
158 this.element.removeClass('output_scroll');
158 this.element.removeClass('output_scroll');
159 this.prompt_overlay.attr('title', 'click to scroll output; double click to hide');
159 this.prompt_overlay.attr('title', 'click to scroll output; double click to hide');
160 this.scrolled = false;
160 this.scrolled = false;
161 };
161 };
162
162
163 /**
163 /**
164 *
164 *
165 * Scroll OutputArea if height supperior than a threshold (in lines).
165 * Scroll OutputArea if height supperior than a threshold (in lines).
166 *
166 *
167 * Threshold is a maximum number of lines. If unspecified, defaults to
167 * Threshold is a maximum number of lines. If unspecified, defaults to
168 * OutputArea.minimum_scroll_threshold.
168 * OutputArea.minimum_scroll_threshold.
169 *
169 *
170 * Negative threshold will prevent the OutputArea from ever scrolling.
170 * Negative threshold will prevent the OutputArea from ever scrolling.
171 *
171 *
172 * @method scroll_if_long
172 * @method scroll_if_long
173 *
173 *
174 * @param [lines=20]{Number} Default to 20 if not set,
174 * @param [lines=20]{Number} Default to 20 if not set,
175 * behavior undefined for value of `0`.
175 * behavior undefined for value of `0`.
176 *
176 *
177 **/
177 **/
178 OutputArea.prototype.scroll_if_long = function (lines) {
178 OutputArea.prototype.scroll_if_long = function (lines) {
179 var n = lines | OutputArea.minimum_scroll_threshold;
179 var n = lines | OutputArea.minimum_scroll_threshold;
180 if(n <= 0){
180 if(n <= 0){
181 return
181 return
182 }
182 }
183
183
184 if (this._should_scroll(n)) {
184 if (this._should_scroll(n)) {
185 // only allow scrolling long-enough output
185 // only allow scrolling long-enough output
186 this.scroll_area();
186 this.scroll_area();
187 }
187 }
188 };
188 };
189
189
190
190
191 OutputArea.prototype.toggle_scroll = function () {
191 OutputArea.prototype.toggle_scroll = function () {
192 if (this.scrolled) {
192 if (this.scrolled) {
193 this.unscroll_area();
193 this.unscroll_area();
194 } else {
194 } else {
195 // only allow scrolling long-enough output
195 // only allow scrolling long-enough output
196 this.scroll_if_long();
196 this.scroll_if_long();
197 }
197 }
198 };
198 };
199
199
200
200
201 // typeset with MathJax if MathJax is available
201 // typeset with MathJax if MathJax is available
202 OutputArea.prototype.typeset = function () {
202 OutputArea.prototype.typeset = function () {
203 if (window.MathJax){
203 if (window.MathJax){
204 MathJax.Hub.Queue(["Typeset",MathJax.Hub]);
204 MathJax.Hub.Queue(["Typeset",MathJax.Hub]);
205 }
205 }
206 };
206 };
207
207
208
208
209 OutputArea.prototype.handle_output = function (msg) {
209 OutputArea.prototype.handle_output = function (msg) {
210 var json = {};
210 var json = {};
211 var msg_type = json.output_type = msg.header.msg_type;
211 var msg_type = json.output_type = msg.header.msg_type;
212 var content = msg.content;
212 var content = msg.content;
213 if (msg_type === "stream") {
213 if (msg_type === "stream") {
214 json.text = content.data;
214 json.text = content.data;
215 json.stream = content.name;
215 json.stream = content.name;
216 } else if (msg_type === "display_data") {
216 } else if (msg_type === "display_data") {
217 json = content.data;
217 json = content.data;
218 json.output_type = msg_type;
218 json.output_type = msg_type;
219 json.metadata = content.metadata;
219 json.metadata = content.metadata;
220 } else if (msg_type === "execute_result") {
220 } else if (msg_type === "execute_result") {
221 json = content.data;
221 json = content.data;
222 // pyout message has been renamed to execute_result,
222 // pyout message has been renamed to execute_result,
223 // but the nbformat has not been updated,
223 // but the nbformat has not been updated,
224 // so transform back to pyout for json.
224 // so transform back to pyout for json.
225 json.output_type = "pyout";
225 json.output_type = "pyout";
226 json.metadata = content.metadata;
226 json.metadata = content.metadata;
227 json.prompt_number = content.execution_count;
227 json.prompt_number = content.execution_count;
228 } else if (msg_type === "pyerr") {
228 } else if (msg_type === "error") {
229 json.ename = content.ename;
229 // pyerr message has been renamed to error,
230 json.evalue = content.evalue;
230 // but the nbformat has not been updated,
231 json.traceback = content.traceback;
231 // so transform back to pyerr for json.
232 json.output_type = "pyerr";
233 json = this.convert_mime_types(json, content.data);
234 json.metadata = this.convert_mime_types({}, content.metadata);
232 }
235 }
233 this.append_output(json);
236 this.append_output(json);
234 };
237 };
235
238
236
239
237 OutputArea.prototype.rename_keys = function (data, key_map) {
240 OutputArea.prototype.rename_keys = function (data, key_map) {
238 var remapped = {};
241 var remapped = {};
239 for (var key in data) {
242 for (var key in data) {
240 var new_key = key_map[key] || key;
243 var new_key = key_map[key] || key;
241 remapped[new_key] = data[key];
244 remapped[new_key] = data[key];
242 }
245 }
243 return remapped;
246 return remapped;
244 };
247 };
245
248
246
249
247 OutputArea.output_types = [
250 OutputArea.output_types = [
248 'application/javascript',
251 'application/javascript',
249 'text/html',
252 'text/html',
250 'text/markdown',
253 'text/markdown',
251 'text/latex',
254 'text/latex',
252 'image/svg+xml',
255 'image/svg+xml',
253 'image/png',
256 'image/png',
254 'image/jpeg',
257 'image/jpeg',
255 'application/pdf',
258 'application/pdf',
256 'text/plain'
259 'text/plain'
257 ];
260 ];
258
261
259 OutputArea.prototype.validate_output = function (json) {
262 OutputArea.prototype.validate_output = function (json) {
260 // scrub invalid outputs
263 // scrub invalid outputs
261 // TODO: right now everything is a string, but JSON really shouldn't be.
264 // TODO: right now everything is a string, but JSON really shouldn't be.
262 // nbformat 4 will fix that.
265 // nbformat 4 will fix that.
263 $.map(OutputArea.output_types, function(key){
266 $.map(OutputArea.output_types, function(key){
264 if (json[key] !== undefined && typeof json[key] !== 'string') {
267 if (json[key] !== undefined && typeof json[key] !== 'string') {
265 console.log("Invalid type for " + key, json[key]);
268 console.log("Invalid type for " + key, json[key]);
266 delete json[key];
269 delete json[key];
267 }
270 }
268 });
271 });
269 return json;
272 return json;
270 };
273 };
271
274
272 OutputArea.prototype.append_output = function (json) {
275 OutputArea.prototype.append_output = function (json) {
273 this.expand();
276 this.expand();
274
277
275 // validate output data types
278 // validate output data types
276 json = this.validate_output(json);
279 json = this.validate_output(json);
277
280
278 // Clear the output if clear is queued.
281 // Clear the output if clear is queued.
279 var needs_height_reset = false;
282 var needs_height_reset = false;
280 if (this.clear_queued) {
283 if (this.clear_queued) {
281 this.clear_output(false);
284 this.clear_output(false);
282 needs_height_reset = true;
285 needs_height_reset = true;
283 }
286 }
284
287
285 if (json.output_type === 'pyout') {
288 if (json.output_type === 'pyout') {
286 this.append_execute_result(json);
289 this.append_execute_result(json);
287 } else if (json.output_type === 'pyerr') {
290 } else if (json.output_type === 'pyerr') {
288 this.append_pyerr(json);
291 this.append_error(json);
289 } else if (json.output_type === 'stream') {
292 } else if (json.output_type === 'stream') {
290 this.append_stream(json);
293 this.append_stream(json);
291 }
294 }
292
295
293 // We must release the animation fixed height in a callback since Gecko
296 // We must release the animation fixed height in a callback since Gecko
294 // (FireFox) doesn't render the image immediately as the data is
297 // (FireFox) doesn't render the image immediately as the data is
295 // available.
298 // available.
296 var that = this;
299 var that = this;
297 var handle_appended = function ($el) {
300 var handle_appended = function ($el) {
298 // Only reset the height to automatic if the height is currently
301 // Only reset the height to automatic if the height is currently
299 // fixed (done by wait=True flag on clear_output).
302 // fixed (done by wait=True flag on clear_output).
300 if (needs_height_reset) {
303 if (needs_height_reset) {
301 that.element.height('');
304 that.element.height('');
302 }
305 }
303 that.element.trigger('resize');
306 that.element.trigger('resize');
304 };
307 };
305 if (json.output_type === 'display_data') {
308 if (json.output_type === 'display_data') {
306 this.append_display_data(json, handle_appended);
309 this.append_display_data(json, handle_appended);
307 } else {
310 } else {
308 handle_appended();
311 handle_appended();
309 }
312 }
310
313
311 this.outputs.push(json);
314 this.outputs.push(json);
312 };
315 };
313
316
314
317
315 OutputArea.prototype.create_output_area = function () {
318 OutputArea.prototype.create_output_area = function () {
316 var oa = $("<div/>").addClass("output_area");
319 var oa = $("<div/>").addClass("output_area");
317 if (this.prompt_area) {
320 if (this.prompt_area) {
318 oa.append($('<div/>').addClass('prompt'));
321 oa.append($('<div/>').addClass('prompt'));
319 }
322 }
320 return oa;
323 return oa;
321 };
324 };
322
325
323
326
324 function _get_metadata_key(metadata, key, mime) {
327 function _get_metadata_key(metadata, key, mime) {
325 var mime_md = metadata[mime];
328 var mime_md = metadata[mime];
326 // mime-specific higher priority
329 // mime-specific higher priority
327 if (mime_md && mime_md[key] !== undefined) {
330 if (mime_md && mime_md[key] !== undefined) {
328 return mime_md[key];
331 return mime_md[key];
329 }
332 }
330 // fallback on global
333 // fallback on global
331 return metadata[key];
334 return metadata[key];
332 }
335 }
333
336
334 OutputArea.prototype.create_output_subarea = function(md, classes, mime) {
337 OutputArea.prototype.create_output_subarea = function(md, classes, mime) {
335 var subarea = $('<div/>').addClass('output_subarea').addClass(classes);
338 var subarea = $('<div/>').addClass('output_subarea').addClass(classes);
336 if (_get_metadata_key(md, 'isolated', mime)) {
339 if (_get_metadata_key(md, 'isolated', mime)) {
337 // Create an iframe to isolate the subarea from the rest of the
340 // Create an iframe to isolate the subarea from the rest of the
338 // document
341 // document
339 var iframe = $('<iframe/>').addClass('box-flex1');
342 var iframe = $('<iframe/>').addClass('box-flex1');
340 iframe.css({'height':1, 'width':'100%', 'display':'block'});
343 iframe.css({'height':1, 'width':'100%', 'display':'block'});
341 iframe.attr('frameborder', 0);
344 iframe.attr('frameborder', 0);
342 iframe.attr('scrolling', 'auto');
345 iframe.attr('scrolling', 'auto');
343
346
344 // Once the iframe is loaded, the subarea is dynamically inserted
347 // Once the iframe is loaded, the subarea is dynamically inserted
345 iframe.on('load', function() {
348 iframe.on('load', function() {
346 // Workaround needed by Firefox, to properly render svg inside
349 // Workaround needed by Firefox, to properly render svg inside
347 // iframes, see http://stackoverflow.com/questions/10177190/
350 // iframes, see http://stackoverflow.com/questions/10177190/
348 // svg-dynamically-added-to-iframe-does-not-render-correctly
351 // svg-dynamically-added-to-iframe-does-not-render-correctly
349 this.contentDocument.open();
352 this.contentDocument.open();
350
353
351 // Insert the subarea into the iframe
354 // Insert the subarea into the iframe
352 // We must directly write the html. When using Jquery's append
355 // We must directly write the html. When using Jquery's append
353 // method, javascript is evaluated in the parent document and
356 // method, javascript is evaluated in the parent document and
354 // not in the iframe document. At this point, subarea doesn't
357 // not in the iframe document. At this point, subarea doesn't
355 // contain any user content.
358 // contain any user content.
356 this.contentDocument.write(subarea.html());
359 this.contentDocument.write(subarea.html());
357
360
358 this.contentDocument.close();
361 this.contentDocument.close();
359
362
360 var body = this.contentDocument.body;
363 var body = this.contentDocument.body;
361 // Adjust the iframe height automatically
364 // Adjust the iframe height automatically
362 iframe.height(body.scrollHeight + 'px');
365 iframe.height(body.scrollHeight + 'px');
363 });
366 });
364
367
365 // Elements should be appended to the inner subarea and not to the
368 // Elements should be appended to the inner subarea and not to the
366 // iframe
369 // iframe
367 iframe.append = function(that) {
370 iframe.append = function(that) {
368 subarea.append(that);
371 subarea.append(that);
369 };
372 };
370
373
371 return iframe;
374 return iframe;
372 } else {
375 } else {
373 return subarea;
376 return subarea;
374 }
377 }
375 }
378 }
376
379
377
380
378 OutputArea.prototype._append_javascript_error = function (err, element) {
381 OutputArea.prototype._append_javascript_error = function (err, element) {
379 // display a message when a javascript error occurs in display output
382 // display a message when a javascript error occurs in display output
380 var msg = "Javascript error adding output!"
383 var msg = "Javascript error adding output!"
381 if ( element === undefined ) return;
384 if ( element === undefined ) return;
382 element
385 element
383 .append($('<div/>').text(msg).addClass('js-error'))
386 .append($('<div/>').text(msg).addClass('js-error'))
384 .append($('<div/>').text(err.toString()).addClass('js-error'))
387 .append($('<div/>').text(err.toString()).addClass('js-error'))
385 .append($('<div/>').text('See your browser Javascript console for more details.').addClass('js-error'));
388 .append($('<div/>').text('See your browser Javascript console for more details.').addClass('js-error'));
386 };
389 };
387
390
388 OutputArea.prototype._safe_append = function (toinsert) {
391 OutputArea.prototype._safe_append = function (toinsert) {
389 // safely append an item to the document
392 // safely append an item to the document
390 // this is an object created by user code,
393 // this is an object created by user code,
391 // and may have errors, which should not be raised
394 // and may have errors, which should not be raised
392 // under any circumstances.
395 // under any circumstances.
393 try {
396 try {
394 this.element.append(toinsert);
397 this.element.append(toinsert);
395 } catch(err) {
398 } catch(err) {
396 console.log(err);
399 console.log(err);
397 // Create an actual output_area and output_subarea, which creates
400 // Create an actual output_area and output_subarea, which creates
398 // the prompt area and the proper indentation.
401 // the prompt area and the proper indentation.
399 var toinsert = this.create_output_area();
402 var toinsert = this.create_output_area();
400 var subarea = $('<div/>').addClass('output_subarea');
403 var subarea = $('<div/>').addClass('output_subarea');
401 toinsert.append(subarea);
404 toinsert.append(subarea);
402 this._append_javascript_error(err, subarea);
405 this._append_javascript_error(err, subarea);
403 this.element.append(toinsert);
406 this.element.append(toinsert);
404 }
407 }
405 };
408 };
406
409
407
410
408 OutputArea.prototype.append_execute_result = function (json) {
411 OutputArea.prototype.append_execute_result = function (json) {
409 var n = json.prompt_number || ' ';
412 var n = json.prompt_number || ' ';
410 var toinsert = this.create_output_area();
413 var toinsert = this.create_output_area();
411 if (this.prompt_area) {
414 if (this.prompt_area) {
412 toinsert.find('div.prompt').addClass('output_prompt').text('Out[' + n + ']:');
415 toinsert.find('div.prompt').addClass('output_prompt').text('Out[' + n + ']:');
413 }
416 }
414 var inserted = this.append_mime_type(json, toinsert);
417 var inserted = this.append_mime_type(json, toinsert);
415 if (inserted) {
418 if (inserted) {
416 inserted.addClass('output_pyout');
419 inserted.addClass('output_pyout');
417 }
420 }
418 this._safe_append(toinsert);
421 this._safe_append(toinsert);
419 // If we just output latex, typeset it.
422 // If we just output latex, typeset it.
420 if ((json['text/latex'] !== undefined) ||
423 if ((json['text/latex'] !== undefined) ||
421 (json['text/html'] !== undefined) ||
424 (json['text/html'] !== undefined) ||
422 (json['text/markdown'] !== undefined)) {
425 (json['text/markdown'] !== undefined)) {
423 this.typeset();
426 this.typeset();
424 }
427 }
425 };
428 };
426
429
427
430
428 OutputArea.prototype.append_pyerr = function (json) {
431 OutputArea.prototype.append_error = function (json) {
429 var tb = json.traceback;
432 var tb = json.traceback;
430 if (tb !== undefined && tb.length > 0) {
433 if (tb !== undefined && tb.length > 0) {
431 var s = '';
434 var s = '';
432 var len = tb.length;
435 var len = tb.length;
433 for (var i=0; i<len; i++) {
436 for (var i=0; i<len; i++) {
434 s = s + tb[i] + '\n';
437 s = s + tb[i] + '\n';
435 }
438 }
436 s = s + '\n';
439 s = s + '\n';
437 var toinsert = this.create_output_area();
440 var toinsert = this.create_output_area();
438 var append_text = OutputArea.append_map['text/plain'];
441 var append_text = OutputArea.append_map['text/plain'];
439 if (append_text) {
442 if (append_text) {
440 append_text.apply(this, [s, {}, toinsert]).addClass('output_pyerr');
443 append_text.apply(this, [s, {}, toinsert]).addClass('output_pyerr');
441 }
444 }
442 this._safe_append(toinsert);
445 this._safe_append(toinsert);
443 }
446 }
444 };
447 };
445
448
446
449
447 OutputArea.prototype.append_stream = function (json) {
450 OutputArea.prototype.append_stream = function (json) {
448 // temporary fix: if stream undefined (json file written prior to this patch),
451 // temporary fix: if stream undefined (json file written prior to this patch),
449 // default to most likely stdout:
452 // default to most likely stdout:
450 if (json.stream === undefined){
453 if (json.stream === undefined){
451 json.stream = 'stdout';
454 json.stream = 'stdout';
452 }
455 }
453 var text = json.text;
456 var text = json.text;
454 var subclass = "output_"+json.stream;
457 var subclass = "output_"+json.stream;
455 if (this.outputs.length > 0){
458 if (this.outputs.length > 0){
456 // have at least one output to consider
459 // have at least one output to consider
457 var last = this.outputs[this.outputs.length-1];
460 var last = this.outputs[this.outputs.length-1];
458 if (last.output_type == 'stream' && json.stream == last.stream){
461 if (last.output_type == 'stream' && json.stream == last.stream){
459 // latest output was in the same stream,
462 // latest output was in the same stream,
460 // so append directly into its pre tag
463 // so append directly into its pre tag
461 // escape ANSI & HTML specials:
464 // escape ANSI & HTML specials:
462 var pre = this.element.find('div.'+subclass).last().find('pre');
465 var pre = this.element.find('div.'+subclass).last().find('pre');
463 var html = utils.fixCarriageReturn(
466 var html = utils.fixCarriageReturn(
464 pre.html() + utils.fixConsole(text));
467 pre.html() + utils.fixConsole(text));
465 // The only user content injected with this HTML call is
468 // The only user content injected with this HTML call is
466 // escaped by the fixConsole() method.
469 // escaped by the fixConsole() method.
467 pre.html(html);
470 pre.html(html);
468 return;
471 return;
469 }
472 }
470 }
473 }
471
474
472 if (!text.replace("\r", "")) {
475 if (!text.replace("\r", "")) {
473 // text is nothing (empty string, \r, etc.)
476 // text is nothing (empty string, \r, etc.)
474 // so don't append any elements, which might add undesirable space
477 // so don't append any elements, which might add undesirable space
475 return;
478 return;
476 }
479 }
477
480
478 // If we got here, attach a new div
481 // If we got here, attach a new div
479 var toinsert = this.create_output_area();
482 var toinsert = this.create_output_area();
480 var append_text = OutputArea.append_map['text/plain'];
483 var append_text = OutputArea.append_map['text/plain'];
481 if (append_text) {
484 if (append_text) {
482 append_text.apply(this, [text, {}, toinsert]).addClass("output_stream " + subclass);
485 append_text.apply(this, [text, {}, toinsert]).addClass("output_stream " + subclass);
483 }
486 }
484 this._safe_append(toinsert);
487 this._safe_append(toinsert);
485 };
488 };
486
489
487
490
488 OutputArea.prototype.append_display_data = function (json, handle_inserted) {
491 OutputArea.prototype.append_display_data = function (json, handle_inserted) {
489 var toinsert = this.create_output_area();
492 var toinsert = this.create_output_area();
490 if (this.append_mime_type(json, toinsert, handle_inserted)) {
493 if (this.append_mime_type(json, toinsert, handle_inserted)) {
491 this._safe_append(toinsert);
494 this._safe_append(toinsert);
492 // If we just output latex, typeset it.
495 // If we just output latex, typeset it.
493 if ((json['text/latex'] !== undefined) ||
496 if ((json['text/latex'] !== undefined) ||
494 (json['text/html'] !== undefined) ||
497 (json['text/html'] !== undefined) ||
495 (json['text/markdown'] !== undefined)) {
498 (json['text/markdown'] !== undefined)) {
496 this.typeset();
499 this.typeset();
497 }
500 }
498 }
501 }
499 };
502 };
500
503
501
504
502 OutputArea.safe_outputs = {
505 OutputArea.safe_outputs = {
503 'text/plain' : true,
506 'text/plain' : true,
504 'text/latex' : true,
507 'text/latex' : true,
505 'image/png' : true,
508 'image/png' : true,
506 'image/jpeg' : true
509 'image/jpeg' : true
507 };
510 };
508
511
509 OutputArea.prototype.append_mime_type = function (json, element, handle_inserted) {
512 OutputArea.prototype.append_mime_type = function (json, element, handle_inserted) {
510 for (var i=0; i < OutputArea.display_order.length; i++) {
513 for (var i=0; i < OutputArea.display_order.length; i++) {
511 var type = OutputArea.display_order[i];
514 var type = OutputArea.display_order[i];
512 var append = OutputArea.append_map[type];
515 var append = OutputArea.append_map[type];
513 if ((json[type] !== undefined) && append) {
516 if ((json[type] !== undefined) && append) {
514 var value = json[type];
517 var value = json[type];
515 if (!this.trusted && !OutputArea.safe_outputs[type]) {
518 if (!this.trusted && !OutputArea.safe_outputs[type]) {
516 // not trusted, sanitize HTML
519 // not trusted, sanitize HTML
517 if (type==='text/html' || type==='text/svg') {
520 if (type==='text/html' || type==='text/svg') {
518 value = IPython.security.sanitize_html(value);
521 value = IPython.security.sanitize_html(value);
519 } else {
522 } else {
520 // don't display if we don't know how to sanitize it
523 // don't display if we don't know how to sanitize it
521 console.log("Ignoring untrusted " + type + " output.");
524 console.log("Ignoring untrusted " + type + " output.");
522 continue;
525 continue;
523 }
526 }
524 }
527 }
525 var md = json.metadata || {};
528 var md = json.metadata || {};
526 var toinsert = append.apply(this, [value, md, element, handle_inserted]);
529 var toinsert = append.apply(this, [value, md, element, handle_inserted]);
527 // Since only the png and jpeg mime types call the inserted
530 // Since only the png and jpeg mime types call the inserted
528 // callback, if the mime type is something other we must call the
531 // callback, if the mime type is something other we must call the
529 // inserted callback only when the element is actually inserted
532 // inserted callback only when the element is actually inserted
530 // into the DOM. Use a timeout of 0 to do this.
533 // into the DOM. Use a timeout of 0 to do this.
531 if (['image/png', 'image/jpeg'].indexOf(type) < 0 && handle_inserted !== undefined) {
534 if (['image/png', 'image/jpeg'].indexOf(type) < 0 && handle_inserted !== undefined) {
532 setTimeout(handle_inserted, 0);
535 setTimeout(handle_inserted, 0);
533 }
536 }
534 $([IPython.events]).trigger('output_appended.OutputArea', [type, value, md, toinsert]);
537 $([IPython.events]).trigger('output_appended.OutputArea', [type, value, md, toinsert]);
535 return toinsert;
538 return toinsert;
536 }
539 }
537 }
540 }
538 return null;
541 return null;
539 };
542 };
540
543
541
544
542 var append_html = function (html, md, element) {
545 var append_html = function (html, md, element) {
543 var type = 'text/html';
546 var type = 'text/html';
544 var toinsert = this.create_output_subarea(md, "output_html rendered_html", type);
547 var toinsert = this.create_output_subarea(md, "output_html rendered_html", type);
545 IPython.keyboard_manager.register_events(toinsert);
548 IPython.keyboard_manager.register_events(toinsert);
546 toinsert.append(html);
549 toinsert.append(html);
547 element.append(toinsert);
550 element.append(toinsert);
548 return toinsert;
551 return toinsert;
549 };
552 };
550
553
551
554
552 var append_markdown = function(markdown, md, element) {
555 var append_markdown = function(markdown, md, element) {
553 var type = 'text/markdown';
556 var type = 'text/markdown';
554 var toinsert = this.create_output_subarea(md, "output_markdown", type);
557 var toinsert = this.create_output_subarea(md, "output_markdown", type);
555 var text_and_math = IPython.mathjaxutils.remove_math(markdown);
558 var text_and_math = IPython.mathjaxutils.remove_math(markdown);
556 var text = text_and_math[0];
559 var text = text_and_math[0];
557 var math = text_and_math[1];
560 var math = text_and_math[1];
558 var html = marked.parser(marked.lexer(text));
561 var html = marked.parser(marked.lexer(text));
559 html = IPython.mathjaxutils.replace_math(html, math);
562 html = IPython.mathjaxutils.replace_math(html, math);
560 toinsert.append(html);
563 toinsert.append(html);
561 element.append(toinsert);
564 element.append(toinsert);
562 return toinsert;
565 return toinsert;
563 };
566 };
564
567
565
568
566 var append_javascript = function (js, md, element) {
569 var append_javascript = function (js, md, element) {
567 // We just eval the JS code, element appears in the local scope.
570 // We just eval the JS code, element appears in the local scope.
568 var type = 'application/javascript';
571 var type = 'application/javascript';
569 var toinsert = this.create_output_subarea(md, "output_javascript", type);
572 var toinsert = this.create_output_subarea(md, "output_javascript", type);
570 IPython.keyboard_manager.register_events(toinsert);
573 IPython.keyboard_manager.register_events(toinsert);
571 element.append(toinsert);
574 element.append(toinsert);
572 // FIXME TODO : remove `container element for 3.0`
575 // FIXME TODO : remove `container element for 3.0`
573 //backward compat, js should be eval'ed in a context where `container` is defined.
576 //backward compat, js should be eval'ed in a context where `container` is defined.
574 var container = element;
577 var container = element;
575 container.show = function(){console.log('Warning "container.show()" is deprecated.')};
578 container.show = function(){console.log('Warning "container.show()" is deprecated.')};
576 // end backward compat
579 // end backward compat
577
580
578 // Fix for ipython/issues/5293, make sure `element` is the area which
581 // Fix for ipython/issues/5293, make sure `element` is the area which
579 // output can be inserted into at the time of JS execution.
582 // output can be inserted into at the time of JS execution.
580 element = toinsert;
583 element = toinsert;
581 try {
584 try {
582 eval(js);
585 eval(js);
583 } catch(err) {
586 } catch(err) {
584 console.log(err);
587 console.log(err);
585 this._append_javascript_error(err, toinsert);
588 this._append_javascript_error(err, toinsert);
586 }
589 }
587 return toinsert;
590 return toinsert;
588 };
591 };
589
592
590
593
591 var append_text = function (data, md, element) {
594 var append_text = function (data, md, element) {
592 var type = 'text/plain';
595 var type = 'text/plain';
593 var toinsert = this.create_output_subarea(md, "output_text", type);
596 var toinsert = this.create_output_subarea(md, "output_text", type);
594 // escape ANSI & HTML specials in plaintext:
597 // escape ANSI & HTML specials in plaintext:
595 data = utils.fixConsole(data);
598 data = utils.fixConsole(data);
596 data = utils.fixCarriageReturn(data);
599 data = utils.fixCarriageReturn(data);
597 data = utils.autoLinkUrls(data);
600 data = utils.autoLinkUrls(data);
598 // The only user content injected with this HTML call is
601 // The only user content injected with this HTML call is
599 // escaped by the fixConsole() method.
602 // escaped by the fixConsole() method.
600 toinsert.append($("<pre/>").html(data));
603 toinsert.append($("<pre/>").html(data));
601 element.append(toinsert);
604 element.append(toinsert);
602 return toinsert;
605 return toinsert;
603 };
606 };
604
607
605
608
606 var append_svg = function (svg_html, md, element) {
609 var append_svg = function (svg_html, md, element) {
607 var type = 'image/svg+xml';
610 var type = 'image/svg+xml';
608 var toinsert = this.create_output_subarea(md, "output_svg", type);
611 var toinsert = this.create_output_subarea(md, "output_svg", type);
609
612
610 // Get the svg element from within the HTML.
613 // Get the svg element from within the HTML.
611 var svg = $('<div />').html(svg_html).find('svg');
614 var svg = $('<div />').html(svg_html).find('svg');
612 var svg_area = $('<div />');
615 var svg_area = $('<div />');
613 var width = svg.attr('width');
616 var width = svg.attr('width');
614 var height = svg.attr('height');
617 var height = svg.attr('height');
615 svg
618 svg
616 .width('100%')
619 .width('100%')
617 .height('100%');
620 .height('100%');
618 svg_area
621 svg_area
619 .width(width)
622 .width(width)
620 .height(height);
623 .height(height);
621
624
622 // The jQuery resize handlers don't seem to work on the svg element.
625 // The jQuery resize handlers don't seem to work on the svg element.
623 // When the svg renders completely, measure it's size and set the parent
626 // When the svg renders completely, measure it's size and set the parent
624 // div to that size. Then set the svg to 100% the size of the parent
627 // div to that size. Then set the svg to 100% the size of the parent
625 // div and make the parent div resizable.
628 // div and make the parent div resizable.
626 this._dblclick_to_reset_size(svg_area, true, false);
629 this._dblclick_to_reset_size(svg_area, true, false);
627
630
628 svg_area.append(svg);
631 svg_area.append(svg);
629 toinsert.append(svg_area);
632 toinsert.append(svg_area);
630 element.append(toinsert);
633 element.append(toinsert);
631
634
632 return toinsert;
635 return toinsert;
633 };
636 };
634
637
635 OutputArea.prototype._dblclick_to_reset_size = function (img, immediately, resize_parent) {
638 OutputArea.prototype._dblclick_to_reset_size = function (img, immediately, resize_parent) {
636 // Add a resize handler to an element
639 // Add a resize handler to an element
637 //
640 //
638 // img: jQuery element
641 // img: jQuery element
639 // immediately: bool=False
642 // immediately: bool=False
640 // Wait for the element to load before creating the handle.
643 // Wait for the element to load before creating the handle.
641 // resize_parent: bool=True
644 // resize_parent: bool=True
642 // Should the parent of the element be resized when the element is
645 // Should the parent of the element be resized when the element is
643 // reset (by double click).
646 // reset (by double click).
644 var callback = function (){
647 var callback = function (){
645 var h0 = img.height();
648 var h0 = img.height();
646 var w0 = img.width();
649 var w0 = img.width();
647 if (!(h0 && w0)) {
650 if (!(h0 && w0)) {
648 // zero size, don't make it resizable
651 // zero size, don't make it resizable
649 return;
652 return;
650 }
653 }
651 img.resizable({
654 img.resizable({
652 aspectRatio: true,
655 aspectRatio: true,
653 autoHide: true
656 autoHide: true
654 });
657 });
655 img.dblclick(function () {
658 img.dblclick(function () {
656 // resize wrapper & image together for some reason:
659 // resize wrapper & image together for some reason:
657 img.height(h0);
660 img.height(h0);
658 img.width(w0);
661 img.width(w0);
659 if (resize_parent === undefined || resize_parent) {
662 if (resize_parent === undefined || resize_parent) {
660 img.parent().height(h0);
663 img.parent().height(h0);
661 img.parent().width(w0);
664 img.parent().width(w0);
662 }
665 }
663 });
666 });
664 };
667 };
665
668
666 if (immediately) {
669 if (immediately) {
667 callback();
670 callback();
668 } else {
671 } else {
669 img.on("load", callback);
672 img.on("load", callback);
670 }
673 }
671 };
674 };
672
675
673 var set_width_height = function (img, md, mime) {
676 var set_width_height = function (img, md, mime) {
674 // set width and height of an img element from metadata
677 // set width and height of an img element from metadata
675 var height = _get_metadata_key(md, 'height', mime);
678 var height = _get_metadata_key(md, 'height', mime);
676 if (height !== undefined) img.attr('height', height);
679 if (height !== undefined) img.attr('height', height);
677 var width = _get_metadata_key(md, 'width', mime);
680 var width = _get_metadata_key(md, 'width', mime);
678 if (width !== undefined) img.attr('width', width);
681 if (width !== undefined) img.attr('width', width);
679 };
682 };
680
683
681 var append_png = function (png, md, element, handle_inserted) {
684 var append_png = function (png, md, element, handle_inserted) {
682 var type = 'image/png';
685 var type = 'image/png';
683 var toinsert = this.create_output_subarea(md, "output_png", type);
686 var toinsert = this.create_output_subarea(md, "output_png", type);
684 var img = $("<img/>");
687 var img = $("<img/>");
685 if (handle_inserted !== undefined) {
688 if (handle_inserted !== undefined) {
686 img.on('load', function(){
689 img.on('load', function(){
687 handle_inserted(img);
690 handle_inserted(img);
688 });
691 });
689 }
692 }
690 img[0].src = 'data:image/png;base64,'+ png;
693 img[0].src = 'data:image/png;base64,'+ png;
691 set_width_height(img, md, 'image/png');
694 set_width_height(img, md, 'image/png');
692 this._dblclick_to_reset_size(img);
695 this._dblclick_to_reset_size(img);
693 toinsert.append(img);
696 toinsert.append(img);
694 element.append(toinsert);
697 element.append(toinsert);
695 return toinsert;
698 return toinsert;
696 };
699 };
697
700
698
701
699 var append_jpeg = function (jpeg, md, element, handle_inserted) {
702 var append_jpeg = function (jpeg, md, element, handle_inserted) {
700 var type = 'image/jpeg';
703 var type = 'image/jpeg';
701 var toinsert = this.create_output_subarea(md, "output_jpeg", type);
704 var toinsert = this.create_output_subarea(md, "output_jpeg", type);
702 var img = $("<img/>");
705 var img = $("<img/>");
703 if (handle_inserted !== undefined) {
706 if (handle_inserted !== undefined) {
704 img.on('load', function(){
707 img.on('load', function(){
705 handle_inserted(img);
708 handle_inserted(img);
706 });
709 });
707 }
710 }
708 img[0].src = 'data:image/jpeg;base64,'+ jpeg;
711 img[0].src = 'data:image/jpeg;base64,'+ jpeg;
709 set_width_height(img, md, 'image/jpeg');
712 set_width_height(img, md, 'image/jpeg');
710 this._dblclick_to_reset_size(img);
713 this._dblclick_to_reset_size(img);
711 toinsert.append(img);
714 toinsert.append(img);
712 element.append(toinsert);
715 element.append(toinsert);
713 return toinsert;
716 return toinsert;
714 };
717 };
715
718
716
719
717 var append_pdf = function (pdf, md, element) {
720 var append_pdf = function (pdf, md, element) {
718 var type = 'application/pdf';
721 var type = 'application/pdf';
719 var toinsert = this.create_output_subarea(md, "output_pdf", type);
722 var toinsert = this.create_output_subarea(md, "output_pdf", type);
720 var a = $('<a/>').attr('href', 'data:application/pdf;base64,'+pdf);
723 var a = $('<a/>').attr('href', 'data:application/pdf;base64,'+pdf);
721 a.attr('target', '_blank');
724 a.attr('target', '_blank');
722 a.text('View PDF')
725 a.text('View PDF')
723 toinsert.append(a);
726 toinsert.append(a);
724 element.append(toinsert);
727 element.append(toinsert);
725 return toinsert;
728 return toinsert;
726 }
729 }
727
730
728 var append_latex = function (latex, md, element) {
731 var append_latex = function (latex, md, element) {
729 // This method cannot do the typesetting because the latex first has to
732 // This method cannot do the typesetting because the latex first has to
730 // be on the page.
733 // be on the page.
731 var type = 'text/latex';
734 var type = 'text/latex';
732 var toinsert = this.create_output_subarea(md, "output_latex", type);
735 var toinsert = this.create_output_subarea(md, "output_latex", type);
733 toinsert.append(latex);
736 toinsert.append(latex);
734 element.append(toinsert);
737 element.append(toinsert);
735 return toinsert;
738 return toinsert;
736 };
739 };
737
740
738
741
739 OutputArea.prototype.append_raw_input = function (msg) {
742 OutputArea.prototype.append_raw_input = function (msg) {
740 var that = this;
743 var that = this;
741 this.expand();
744 this.expand();
742 var content = msg.content;
745 var content = msg.content;
743 var area = this.create_output_area();
746 var area = this.create_output_area();
744
747
745 // disable any other raw_inputs, if they are left around
748 // disable any other raw_inputs, if they are left around
746 $("div.output_subarea.raw_input_container").remove();
749 $("div.output_subarea.raw_input_container").remove();
747
750
748 area.append(
751 area.append(
749 $("<div/>")
752 $("<div/>")
750 .addClass("box-flex1 output_subarea raw_input_container")
753 .addClass("box-flex1 output_subarea raw_input_container")
751 .append(
754 .append(
752 $("<span/>")
755 $("<span/>")
753 .addClass("raw_input_prompt")
756 .addClass("raw_input_prompt")
754 .text(content.prompt)
757 .text(content.prompt)
755 )
758 )
756 .append(
759 .append(
757 $("<input/>")
760 $("<input/>")
758 .addClass("raw_input")
761 .addClass("raw_input")
759 .attr('type', 'text')
762 .attr('type', 'text')
760 .attr("size", 47)
763 .attr("size", 47)
761 .keydown(function (event, ui) {
764 .keydown(function (event, ui) {
762 // make sure we submit on enter,
765 // make sure we submit on enter,
763 // and don't re-execute the *cell* on shift-enter
766 // and don't re-execute the *cell* on shift-enter
764 if (event.which === IPython.keyboard.keycodes.enter) {
767 if (event.which === IPython.keyboard.keycodes.enter) {
765 that._submit_raw_input();
768 that._submit_raw_input();
766 return false;
769 return false;
767 }
770 }
768 })
771 })
769 )
772 )
770 );
773 );
771
774
772 this.element.append(area);
775 this.element.append(area);
773 var raw_input = area.find('input.raw_input');
776 var raw_input = area.find('input.raw_input');
774 // Register events that enable/disable the keyboard manager while raw
777 // Register events that enable/disable the keyboard manager while raw
775 // input is focused.
778 // input is focused.
776 IPython.keyboard_manager.register_events(raw_input);
779 IPython.keyboard_manager.register_events(raw_input);
777 // Note, the following line used to read raw_input.focus().focus().
780 // Note, the following line used to read raw_input.focus().focus().
778 // This seemed to be needed otherwise only the cell would be focused.
781 // This seemed to be needed otherwise only the cell would be focused.
779 // But with the modal UI, this seems to work fine with one call to focus().
782 // But with the modal UI, this seems to work fine with one call to focus().
780 raw_input.focus();
783 raw_input.focus();
781 }
784 }
782
785
783 OutputArea.prototype._submit_raw_input = function (evt) {
786 OutputArea.prototype._submit_raw_input = function (evt) {
784 var container = this.element.find("div.raw_input_container");
787 var container = this.element.find("div.raw_input_container");
785 var theprompt = container.find("span.raw_input_prompt");
788 var theprompt = container.find("span.raw_input_prompt");
786 var theinput = container.find("input.raw_input");
789 var theinput = container.find("input.raw_input");
787 var value = theinput.val();
790 var value = theinput.val();
788 var content = {
791 var content = {
789 output_type : 'stream',
792 output_type : 'stream',
790 name : 'stdout',
793 name : 'stdout',
791 text : theprompt.text() + value + '\n'
794 text : theprompt.text() + value + '\n'
792 }
795 }
793 // remove form container
796 // remove form container
794 container.parent().remove();
797 container.parent().remove();
795 // replace with plaintext version in stdout
798 // replace with plaintext version in stdout
796 this.append_output(content, false);
799 this.append_output(content, false);
797 $([IPython.events]).trigger('send_input_reply.Kernel', value);
800 $([IPython.events]).trigger('send_input_reply.Kernel', value);
798 }
801 }
799
802
800
803
801 OutputArea.prototype.handle_clear_output = function (msg) {
804 OutputArea.prototype.handle_clear_output = function (msg) {
802 // msg spec v4 had stdout, stderr, display keys
805 // msg spec v4 had stdout, stderr, display keys
803 // v4.1 replaced these with just wait
806 // v4.1 replaced these with just wait
804 // The default behavior is the same (stdout=stderr=display=True, wait=False),
807 // The default behavior is the same (stdout=stderr=display=True, wait=False),
805 // so v4 messages will still be properly handled,
808 // so v4 messages will still be properly handled,
806 // except for the rarely used clearing less than all output.
809 // except for the rarely used clearing less than all output.
807 this.clear_output(msg.content.wait || false);
810 this.clear_output(msg.content.wait || false);
808 };
811 };
809
812
810
813
811 OutputArea.prototype.clear_output = function(wait) {
814 OutputArea.prototype.clear_output = function(wait) {
812 if (wait) {
815 if (wait) {
813
816
814 // If a clear is queued, clear before adding another to the queue.
817 // If a clear is queued, clear before adding another to the queue.
815 if (this.clear_queued) {
818 if (this.clear_queued) {
816 this.clear_output(false);
819 this.clear_output(false);
817 };
820 };
818
821
819 this.clear_queued = true;
822 this.clear_queued = true;
820 } else {
823 } else {
821
824
822 // Fix the output div's height if the clear_output is waiting for
825 // Fix the output div's height if the clear_output is waiting for
823 // new output (it is being used in an animation).
826 // new output (it is being used in an animation).
824 if (this.clear_queued) {
827 if (this.clear_queued) {
825 var height = this.element.height();
828 var height = this.element.height();
826 this.element.height(height);
829 this.element.height(height);
827 this.clear_queued = false;
830 this.clear_queued = false;
828 }
831 }
829
832
830 // Clear all
833 // Clear all
831 // Remove load event handlers from img tags because we don't want
834 // Remove load event handlers from img tags because we don't want
832 // them to fire if the image is never added to the page.
835 // them to fire if the image is never added to the page.
833 this.element.find('img').off('load');
836 this.element.find('img').off('load');
834 this.element.html("");
837 this.element.html("");
835 this.outputs = [];
838 this.outputs = [];
836 this.trusted = true;
839 this.trusted = true;
837 this.unscroll_area();
840 this.unscroll_area();
838 return;
841 return;
839 };
842 };
840 };
843 };
841
844
842
845
843 // JSON serialization
846 // JSON serialization
844
847
845 OutputArea.prototype.fromJSON = function (outputs) {
848 OutputArea.prototype.fromJSON = function (outputs) {
846 var len = outputs.length;
849 var len = outputs.length;
847 var data;
850 var data;
848
851
849 for (var i=0; i<len; i++) {
852 for (var i=0; i<len; i++) {
850 data = outputs[i];
853 data = outputs[i];
851 var msg_type = data.output_type;
854 var msg_type = data.output_type;
852 if (msg_type === "display_data" || msg_type === "pyout") {
855 if (msg_type === "display_data" || msg_type === "pyout") {
853 // convert short keys to mime keys
856 // convert short keys to mime keys
854 // TODO: remove mapping of short keys when we update to nbformat 4
857 // TODO: remove mapping of short keys when we update to nbformat 4
855 data = this.rename_keys(data, OutputArea.mime_map_r);
858 data = this.rename_keys(data, OutputArea.mime_map_r);
856 data.metadata = this.rename_keys(data.metadata, OutputArea.mime_map_r);
859 data.metadata = this.rename_keys(data.metadata, OutputArea.mime_map_r);
857 }
860 }
858
861
859 this.append_output(data);
862 this.append_output(data);
860 }
863 }
861 };
864 };
862
865
863
866
864 OutputArea.prototype.toJSON = function () {
867 OutputArea.prototype.toJSON = function () {
865 var outputs = [];
868 var outputs = [];
866 var len = this.outputs.length;
869 var len = this.outputs.length;
867 var data;
870 var data;
868 for (var i=0; i<len; i++) {
871 for (var i=0; i<len; i++) {
869 data = this.outputs[i];
872 data = this.outputs[i];
870 var msg_type = data.output_type;
873 var msg_type = data.output_type;
871 if (msg_type === "display_data" || msg_type === "pyout") {
874 if (msg_type === "display_data" || msg_type === "pyout") {
872 // convert mime keys to short keys
875 // convert mime keys to short keys
873 data = this.rename_keys(data, OutputArea.mime_map);
876 data = this.rename_keys(data, OutputArea.mime_map);
874 data.metadata = this.rename_keys(data.metadata, OutputArea.mime_map);
877 data.metadata = this.rename_keys(data.metadata, OutputArea.mime_map);
875 }
878 }
876 outputs[i] = data;
879 outputs[i] = data;
877 }
880 }
878 return outputs;
881 return outputs;
879 };
882 };
880
883
881 /**
884 /**
882 * Class properties
885 * Class properties
883 **/
886 **/
884
887
885 /**
888 /**
886 * Threshold to trigger autoscroll when the OutputArea is resized,
889 * Threshold to trigger autoscroll when the OutputArea is resized,
887 * typically when new outputs are added.
890 * typically when new outputs are added.
888 *
891 *
889 * Behavior is undefined if autoscroll is lower than minimum_scroll_threshold,
892 * Behavior is undefined if autoscroll is lower than minimum_scroll_threshold,
890 * unless it is < 0, in which case autoscroll will never be triggered
893 * unless it is < 0, in which case autoscroll will never be triggered
891 *
894 *
892 * @property auto_scroll_threshold
895 * @property auto_scroll_threshold
893 * @type Number
896 * @type Number
894 * @default 100
897 * @default 100
895 *
898 *
896 **/
899 **/
897 OutputArea.auto_scroll_threshold = 100;
900 OutputArea.auto_scroll_threshold = 100;
898
901
899 /**
902 /**
900 * Lower limit (in lines) for OutputArea to be made scrollable. OutputAreas
903 * Lower limit (in lines) for OutputArea to be made scrollable. OutputAreas
901 * shorter than this are never scrolled.
904 * shorter than this are never scrolled.
902 *
905 *
903 * @property minimum_scroll_threshold
906 * @property minimum_scroll_threshold
904 * @type Number
907 * @type Number
905 * @default 20
908 * @default 20
906 *
909 *
907 **/
910 **/
908 OutputArea.minimum_scroll_threshold = 20;
911 OutputArea.minimum_scroll_threshold = 20;
909
912
910
913
911
914
912 OutputArea.mime_map = {
915 OutputArea.mime_map = {
913 "text/plain" : "text",
916 "text/plain" : "text",
914 "text/html" : "html",
917 "text/html" : "html",
915 "image/svg+xml" : "svg",
918 "image/svg+xml" : "svg",
916 "image/png" : "png",
919 "image/png" : "png",
917 "image/jpeg" : "jpeg",
920 "image/jpeg" : "jpeg",
918 "text/latex" : "latex",
921 "text/latex" : "latex",
919 "application/json" : "json",
922 "application/json" : "json",
920 "application/javascript" : "javascript",
923 "application/javascript" : "javascript",
921 };
924 };
922
925
923 OutputArea.mime_map_r = {
926 OutputArea.mime_map_r = {
924 "text" : "text/plain",
927 "text" : "text/plain",
925 "html" : "text/html",
928 "html" : "text/html",
926 "svg" : "image/svg+xml",
929 "svg" : "image/svg+xml",
927 "png" : "image/png",
930 "png" : "image/png",
928 "jpeg" : "image/jpeg",
931 "jpeg" : "image/jpeg",
929 "latex" : "text/latex",
932 "latex" : "text/latex",
930 "json" : "application/json",
933 "json" : "application/json",
931 "javascript" : "application/javascript",
934 "javascript" : "application/javascript",
932 };
935 };
933
936
934 OutputArea.display_order = [
937 OutputArea.display_order = [
935 'application/javascript',
938 'application/javascript',
936 'text/html',
939 'text/html',
937 'text/markdown',
940 'text/markdown',
938 'text/latex',
941 'text/latex',
939 'image/svg+xml',
942 'image/svg+xml',
940 'image/png',
943 'image/png',
941 'image/jpeg',
944 'image/jpeg',
942 'application/pdf',
945 'application/pdf',
943 'text/plain'
946 'text/plain'
944 ];
947 ];
945
948
946 OutputArea.append_map = {
949 OutputArea.append_map = {
947 "text/plain" : append_text,
950 "text/plain" : append_text,
948 "text/html" : append_html,
951 "text/html" : append_html,
949 "text/markdown": append_markdown,
952 "text/markdown": append_markdown,
950 "image/svg+xml" : append_svg,
953 "image/svg+xml" : append_svg,
951 "image/png" : append_png,
954 "image/png" : append_png,
952 "image/jpeg" : append_jpeg,
955 "image/jpeg" : append_jpeg,
953 "text/latex" : append_latex,
956 "text/latex" : append_latex,
954 "application/javascript" : append_javascript,
957 "application/javascript" : append_javascript,
955 "application/pdf" : append_pdf
958 "application/pdf" : append_pdf
956 };
959 };
957
960
958 IPython.OutputArea = OutputArea;
961 IPython.OutputArea = OutputArea;
959
962
960 return IPython;
963 return IPython;
961
964
962 }(IPython));
965 }(IPython));
@@ -1,624 +1,624 b''
1 // Copyright (c) IPython Development Team.
1 // Copyright (c) IPython Development Team.
2 // Distributed under the terms of the Modified BSD License.
2 // Distributed under the terms of the Modified BSD License.
3
3
4 //============================================================================
4 //============================================================================
5 // Kernel
5 // Kernel
6 //============================================================================
6 //============================================================================
7
7
8 /**
8 /**
9 * @module IPython
9 * @module IPython
10 * @namespace IPython
10 * @namespace IPython
11 * @submodule Kernel
11 * @submodule Kernel
12 */
12 */
13
13
14 var IPython = (function (IPython) {
14 var IPython = (function (IPython) {
15 "use strict";
15 "use strict";
16
16
17 var utils = IPython.utils;
17 var utils = IPython.utils;
18
18
19 // Initialization and connection.
19 // Initialization and connection.
20 /**
20 /**
21 * A Kernel Class to communicate with the Python kernel
21 * A Kernel Class to communicate with the Python kernel
22 * @Class Kernel
22 * @Class Kernel
23 */
23 */
24 var Kernel = function (kernel_service_url) {
24 var Kernel = function (kernel_service_url) {
25 this.kernel_id = null;
25 this.kernel_id = null;
26 this.shell_channel = null;
26 this.shell_channel = null;
27 this.iopub_channel = null;
27 this.iopub_channel = null;
28 this.stdin_channel = null;
28 this.stdin_channel = null;
29 this.kernel_service_url = kernel_service_url;
29 this.kernel_service_url = kernel_service_url;
30 this.running = false;
30 this.running = false;
31 this.username = "username";
31 this.username = "username";
32 this.session_id = utils.uuid();
32 this.session_id = utils.uuid();
33 this._msg_callbacks = {};
33 this._msg_callbacks = {};
34 this.post = $.post;
34 this.post = $.post;
35
35
36 if (typeof(WebSocket) !== 'undefined') {
36 if (typeof(WebSocket) !== 'undefined') {
37 this.WebSocket = WebSocket;
37 this.WebSocket = WebSocket;
38 } else if (typeof(MozWebSocket) !== 'undefined') {
38 } else if (typeof(MozWebSocket) !== 'undefined') {
39 this.WebSocket = MozWebSocket;
39 this.WebSocket = MozWebSocket;
40 } else {
40 } else {
41 alert('Your browser does not have WebSocket support, please try Chrome, Safari or Firefox β‰₯ 6. Firefox 4 and 5 are also supported by you have to enable WebSockets in about:config.');
41 alert('Your browser does not have WebSocket support, please try Chrome, Safari or Firefox β‰₯ 6. Firefox 4 and 5 are also supported by you have to enable WebSockets in about:config.');
42 }
42 }
43
43
44 this.bind_events();
44 this.bind_events();
45 this.init_iopub_handlers();
45 this.init_iopub_handlers();
46 this.comm_manager = new IPython.CommManager(this);
46 this.comm_manager = new IPython.CommManager(this);
47 this.widget_manager = new IPython.WidgetManager(this.comm_manager);
47 this.widget_manager = new IPython.WidgetManager(this.comm_manager);
48
48
49 this.last_msg_id = null;
49 this.last_msg_id = null;
50 this.last_msg_callbacks = {};
50 this.last_msg_callbacks = {};
51 };
51 };
52
52
53
53
54 Kernel.prototype._get_msg = function (msg_type, content, metadata) {
54 Kernel.prototype._get_msg = function (msg_type, content, metadata) {
55 var msg = {
55 var msg = {
56 header : {
56 header : {
57 msg_id : utils.uuid(),
57 msg_id : utils.uuid(),
58 username : this.username,
58 username : this.username,
59 session : this.session_id,
59 session : this.session_id,
60 msg_type : msg_type
60 msg_type : msg_type
61 },
61 },
62 metadata : metadata || {},
62 metadata : metadata || {},
63 content : content,
63 content : content,
64 parent_header : {}
64 parent_header : {}
65 };
65 };
66 return msg;
66 return msg;
67 };
67 };
68
68
69 Kernel.prototype.bind_events = function () {
69 Kernel.prototype.bind_events = function () {
70 var that = this;
70 var that = this;
71 $([IPython.events]).on('send_input_reply.Kernel', function(evt, data) {
71 $([IPython.events]).on('send_input_reply.Kernel', function(evt, data) {
72 that.send_input_reply(data);
72 that.send_input_reply(data);
73 });
73 });
74 };
74 };
75
75
76 // Initialize the iopub handlers
76 // Initialize the iopub handlers
77
77
78 Kernel.prototype.init_iopub_handlers = function () {
78 Kernel.prototype.init_iopub_handlers = function () {
79 var output_types = ['stream', 'display_data', 'execute_result', 'pyerr'];
79 var output_msg_types = ['stream', 'display_data', 'execute_result', 'error'];
80 this._iopub_handlers = {};
80 this._iopub_handlers = {};
81 this.register_iopub_handler('status', $.proxy(this._handle_status_message, this));
81 this.register_iopub_handler('status', $.proxy(this._handle_status_message, this));
82 this.register_iopub_handler('clear_output', $.proxy(this._handle_clear_output, this));
82 this.register_iopub_handler('clear_output', $.proxy(this._handle_clear_output, this));
83
83
84 for (var i=0; i < output_types.length; i++) {
84 for (var i=0; i < output_msg_types.length; i++) {
85 this.register_iopub_handler(output_types[i], $.proxy(this._handle_output_message, this));
85 this.register_iopub_handler(output_msg_types[i], $.proxy(this._handle_output_message, this));
86 }
86 }
87 };
87 };
88
88
89 /**
89 /**
90 * Start the Python kernel
90 * Start the Python kernel
91 * @method start
91 * @method start
92 */
92 */
93 Kernel.prototype.start = function (params) {
93 Kernel.prototype.start = function (params) {
94 params = params || {};
94 params = params || {};
95 if (!this.running) {
95 if (!this.running) {
96 var qs = $.param(params);
96 var qs = $.param(params);
97 this.post(utils.url_join_encode(this.kernel_service_url) + '?' + qs,
97 this.post(utils.url_join_encode(this.kernel_service_url) + '?' + qs,
98 $.proxy(this._kernel_started, this),
98 $.proxy(this._kernel_started, this),
99 'json'
99 'json'
100 );
100 );
101 }
101 }
102 };
102 };
103
103
104 /**
104 /**
105 * Restart the python kernel.
105 * Restart the python kernel.
106 *
106 *
107 * Emit a 'status_restarting.Kernel' event with
107 * Emit a 'status_restarting.Kernel' event with
108 * the current object as parameter
108 * the current object as parameter
109 *
109 *
110 * @method restart
110 * @method restart
111 */
111 */
112 Kernel.prototype.restart = function () {
112 Kernel.prototype.restart = function () {
113 $([IPython.events]).trigger('status_restarting.Kernel', {kernel: this});
113 $([IPython.events]).trigger('status_restarting.Kernel', {kernel: this});
114 if (this.running) {
114 if (this.running) {
115 this.stop_channels();
115 this.stop_channels();
116 this.post(utils.url_join_encode(this.kernel_url, "restart"),
116 this.post(utils.url_join_encode(this.kernel_url, "restart"),
117 $.proxy(this._kernel_started, this),
117 $.proxy(this._kernel_started, this),
118 'json'
118 'json'
119 );
119 );
120 }
120 }
121 };
121 };
122
122
123
123
124 Kernel.prototype._kernel_started = function (json) {
124 Kernel.prototype._kernel_started = function (json) {
125 console.log("Kernel started: ", json.id);
125 console.log("Kernel started: ", json.id);
126 this.running = true;
126 this.running = true;
127 this.kernel_id = json.id;
127 this.kernel_id = json.id;
128 // trailing 's' in https will become wss for secure web sockets
128 // trailing 's' in https will become wss for secure web sockets
129 this.ws_host = location.protocol.replace('http', 'ws') + "//" + location.host;
129 this.ws_host = location.protocol.replace('http', 'ws') + "//" + location.host;
130 this.kernel_url = utils.url_path_join(this.kernel_service_url, this.kernel_id);
130 this.kernel_url = utils.url_path_join(this.kernel_service_url, this.kernel_id);
131 this.start_channels();
131 this.start_channels();
132 };
132 };
133
133
134
134
135 Kernel.prototype._websocket_closed = function(ws_url, early) {
135 Kernel.prototype._websocket_closed = function(ws_url, early) {
136 this.stop_channels();
136 this.stop_channels();
137 $([IPython.events]).trigger('websocket_closed.Kernel',
137 $([IPython.events]).trigger('websocket_closed.Kernel',
138 {ws_url: ws_url, kernel: this, early: early}
138 {ws_url: ws_url, kernel: this, early: early}
139 );
139 );
140 };
140 };
141
141
142 /**
142 /**
143 * Start the `shell`and `iopub` channels.
143 * Start the `shell`and `iopub` channels.
144 * Will stop and restart them if they already exist.
144 * Will stop and restart them if they already exist.
145 *
145 *
146 * @method start_channels
146 * @method start_channels
147 */
147 */
148 Kernel.prototype.start_channels = function () {
148 Kernel.prototype.start_channels = function () {
149 var that = this;
149 var that = this;
150 this.stop_channels();
150 this.stop_channels();
151 var ws_host_url = this.ws_host + this.kernel_url;
151 var ws_host_url = this.ws_host + this.kernel_url;
152 console.log("Starting WebSockets:", ws_host_url);
152 console.log("Starting WebSockets:", ws_host_url);
153 this.shell_channel = new this.WebSocket(
153 this.shell_channel = new this.WebSocket(
154 this.ws_host + utils.url_join_encode(this.kernel_url, "shell")
154 this.ws_host + utils.url_join_encode(this.kernel_url, "shell")
155 );
155 );
156 this.stdin_channel = new this.WebSocket(
156 this.stdin_channel = new this.WebSocket(
157 this.ws_host + utils.url_join_encode(this.kernel_url, "stdin")
157 this.ws_host + utils.url_join_encode(this.kernel_url, "stdin")
158 );
158 );
159 this.iopub_channel = new this.WebSocket(
159 this.iopub_channel = new this.WebSocket(
160 this.ws_host + utils.url_join_encode(this.kernel_url, "iopub")
160 this.ws_host + utils.url_join_encode(this.kernel_url, "iopub")
161 );
161 );
162
162
163 var already_called_onclose = false; // only alert once
163 var already_called_onclose = false; // only alert once
164 var ws_closed_early = function(evt){
164 var ws_closed_early = function(evt){
165 if (already_called_onclose){
165 if (already_called_onclose){
166 return;
166 return;
167 }
167 }
168 already_called_onclose = true;
168 already_called_onclose = true;
169 if ( ! evt.wasClean ){
169 if ( ! evt.wasClean ){
170 that._websocket_closed(ws_host_url, true);
170 that._websocket_closed(ws_host_url, true);
171 }
171 }
172 };
172 };
173 var ws_closed_late = function(evt){
173 var ws_closed_late = function(evt){
174 if (already_called_onclose){
174 if (already_called_onclose){
175 return;
175 return;
176 }
176 }
177 already_called_onclose = true;
177 already_called_onclose = true;
178 if ( ! evt.wasClean ){
178 if ( ! evt.wasClean ){
179 that._websocket_closed(ws_host_url, false);
179 that._websocket_closed(ws_host_url, false);
180 }
180 }
181 };
181 };
182 var channels = [this.shell_channel, this.iopub_channel, this.stdin_channel];
182 var channels = [this.shell_channel, this.iopub_channel, this.stdin_channel];
183 for (var i=0; i < channels.length; i++) {
183 for (var i=0; i < channels.length; i++) {
184 channels[i].onopen = $.proxy(this._ws_opened, this);
184 channels[i].onopen = $.proxy(this._ws_opened, this);
185 channels[i].onclose = ws_closed_early;
185 channels[i].onclose = ws_closed_early;
186 }
186 }
187 // switch from early-close to late-close message after 1s
187 // switch from early-close to late-close message after 1s
188 setTimeout(function() {
188 setTimeout(function() {
189 for (var i=0; i < channels.length; i++) {
189 for (var i=0; i < channels.length; i++) {
190 if (channels[i] !== null) {
190 if (channels[i] !== null) {
191 channels[i].onclose = ws_closed_late;
191 channels[i].onclose = ws_closed_late;
192 }
192 }
193 }
193 }
194 }, 1000);
194 }, 1000);
195 this.shell_channel.onmessage = $.proxy(this._handle_shell_reply, this);
195 this.shell_channel.onmessage = $.proxy(this._handle_shell_reply, this);
196 this.iopub_channel.onmessage = $.proxy(this._handle_iopub_message, this);
196 this.iopub_channel.onmessage = $.proxy(this._handle_iopub_message, this);
197 this.stdin_channel.onmessage = $.proxy(this._handle_input_request, this);
197 this.stdin_channel.onmessage = $.proxy(this._handle_input_request, this);
198 };
198 };
199
199
200 /**
200 /**
201 * Handle a websocket entering the open state
201 * Handle a websocket entering the open state
202 * sends session and cookie authentication info as first message.
202 * sends session and cookie authentication info as first message.
203 * Once all sockets are open, signal the Kernel.status_started event.
203 * Once all sockets are open, signal the Kernel.status_started event.
204 * @method _ws_opened
204 * @method _ws_opened
205 */
205 */
206 Kernel.prototype._ws_opened = function (evt) {
206 Kernel.prototype._ws_opened = function (evt) {
207 // send the session id so the Session object Python-side
207 // send the session id so the Session object Python-side
208 // has the same identity
208 // has the same identity
209 evt.target.send(this.session_id + ':' + document.cookie);
209 evt.target.send(this.session_id + ':' + document.cookie);
210
210
211 var channels = [this.shell_channel, this.iopub_channel, this.stdin_channel];
211 var channels = [this.shell_channel, this.iopub_channel, this.stdin_channel];
212 for (var i=0; i < channels.length; i++) {
212 for (var i=0; i < channels.length; i++) {
213 // if any channel is not ready, don't trigger event.
213 // if any channel is not ready, don't trigger event.
214 if ( !channels[i].readyState ) return;
214 if ( !channels[i].readyState ) return;
215 }
215 }
216 // all events ready, trigger started event.
216 // all events ready, trigger started event.
217 $([IPython.events]).trigger('status_started.Kernel', {kernel: this});
217 $([IPython.events]).trigger('status_started.Kernel', {kernel: this});
218 };
218 };
219
219
220 /**
220 /**
221 * Stop the websocket channels.
221 * Stop the websocket channels.
222 * @method stop_channels
222 * @method stop_channels
223 */
223 */
224 Kernel.prototype.stop_channels = function () {
224 Kernel.prototype.stop_channels = function () {
225 var channels = [this.shell_channel, this.iopub_channel, this.stdin_channel];
225 var channels = [this.shell_channel, this.iopub_channel, this.stdin_channel];
226 for (var i=0; i < channels.length; i++) {
226 for (var i=0; i < channels.length; i++) {
227 if ( channels[i] !== null ) {
227 if ( channels[i] !== null ) {
228 channels[i].onclose = null;
228 channels[i].onclose = null;
229 channels[i].close();
229 channels[i].close();
230 }
230 }
231 }
231 }
232 this.shell_channel = this.iopub_channel = this.stdin_channel = null;
232 this.shell_channel = this.iopub_channel = this.stdin_channel = null;
233 };
233 };
234
234
235 // Main public methods.
235 // Main public methods.
236
236
237 // send a message on the Kernel's shell channel
237 // send a message on the Kernel's shell channel
238 Kernel.prototype.send_shell_message = function (msg_type, content, callbacks, metadata) {
238 Kernel.prototype.send_shell_message = function (msg_type, content, callbacks, metadata) {
239 var msg = this._get_msg(msg_type, content, metadata);
239 var msg = this._get_msg(msg_type, content, metadata);
240 this.shell_channel.send(JSON.stringify(msg));
240 this.shell_channel.send(JSON.stringify(msg));
241 this.set_callbacks_for_msg(msg.header.msg_id, callbacks);
241 this.set_callbacks_for_msg(msg.header.msg_id, callbacks);
242 return msg.header.msg_id;
242 return msg.header.msg_id;
243 };
243 };
244
244
245 /**
245 /**
246 * Get kernel info
246 * Get kernel info
247 *
247 *
248 * @param callback {function}
248 * @param callback {function}
249 * @method object_info
249 * @method object_info
250 *
250 *
251 * When calling this method, pass a callback function that expects one argument.
251 * When calling this method, pass a callback function that expects one argument.
252 * The callback will be passed the complete `kernel_info_reply` message documented
252 * The callback will be passed the complete `kernel_info_reply` message documented
253 * [here](http://ipython.org/ipython-doc/dev/development/messaging.html#kernel-info)
253 * [here](http://ipython.org/ipython-doc/dev/development/messaging.html#kernel-info)
254 */
254 */
255 Kernel.prototype.kernel_info = function (callback) {
255 Kernel.prototype.kernel_info = function (callback) {
256 var callbacks;
256 var callbacks;
257 if (callback) {
257 if (callback) {
258 callbacks = { shell : { reply : callback } };
258 callbacks = { shell : { reply : callback } };
259 }
259 }
260 return this.send_shell_message("kernel_info_request", {}, callbacks);
260 return this.send_shell_message("kernel_info_request", {}, callbacks);
261 };
261 };
262
262
263 /**
263 /**
264 * Get info on an object
264 * Get info on an object
265 *
265 *
266 * @param objname {string}
266 * @param objname {string}
267 * @param callback {function}
267 * @param callback {function}
268 * @method object_info
268 * @method object_info
269 *
269 *
270 * When calling this method, pass a callback function that expects one argument.
270 * When calling this method, pass a callback function that expects one argument.
271 * The callback will be passed the complete `object_info_reply` message documented
271 * The callback will be passed the complete `object_info_reply` message documented
272 * [here](http://ipython.org/ipython-doc/dev/development/messaging.html#object-information)
272 * [here](http://ipython.org/ipython-doc/dev/development/messaging.html#object-information)
273 */
273 */
274 Kernel.prototype.object_info = function (objname, callback) {
274 Kernel.prototype.object_info = function (objname, callback) {
275 var callbacks;
275 var callbacks;
276 if (callback) {
276 if (callback) {
277 callbacks = { shell : { reply : callback } };
277 callbacks = { shell : { reply : callback } };
278 }
278 }
279
279
280 if (typeof(objname) !== null && objname !== null) {
280 if (typeof(objname) !== null && objname !== null) {
281 var content = {
281 var content = {
282 oname : objname.toString(),
282 oname : objname.toString(),
283 detail_level : 0,
283 detail_level : 0,
284 };
284 };
285 return this.send_shell_message("object_info_request", content, callbacks);
285 return this.send_shell_message("object_info_request", content, callbacks);
286 }
286 }
287 return;
287 return;
288 };
288 };
289
289
290 /**
290 /**
291 * Execute given code into kernel, and pass result to callback.
291 * Execute given code into kernel, and pass result to callback.
292 *
292 *
293 * @async
293 * @async
294 * @method execute
294 * @method execute
295 * @param {string} code
295 * @param {string} code
296 * @param [callbacks] {Object} With the following keys (all optional)
296 * @param [callbacks] {Object} With the following keys (all optional)
297 * @param callbacks.shell.reply {function}
297 * @param callbacks.shell.reply {function}
298 * @param callbacks.shell.payload.[payload_name] {function}
298 * @param callbacks.shell.payload.[payload_name] {function}
299 * @param callbacks.iopub.output {function}
299 * @param callbacks.iopub.output {function}
300 * @param callbacks.iopub.clear_output {function}
300 * @param callbacks.iopub.clear_output {function}
301 * @param callbacks.input {function}
301 * @param callbacks.input {function}
302 * @param {object} [options]
302 * @param {object} [options]
303 * @param [options.silent=false] {Boolean}
303 * @param [options.silent=false] {Boolean}
304 * @param [options.user_expressions=empty_dict] {Dict}
304 * @param [options.user_expressions=empty_dict] {Dict}
305 * @param [options.user_variables=empty_list] {List od Strings}
305 * @param [options.user_variables=empty_list] {List od Strings}
306 * @param [options.allow_stdin=false] {Boolean} true|false
306 * @param [options.allow_stdin=false] {Boolean} true|false
307 *
307 *
308 * @example
308 * @example
309 *
309 *
310 * The options object should contain the options for the execute call. Its default
310 * The options object should contain the options for the execute call. Its default
311 * values are:
311 * values are:
312 *
312 *
313 * options = {
313 * options = {
314 * silent : true,
314 * silent : true,
315 * user_variables : [],
315 * user_variables : [],
316 * user_expressions : {},
316 * user_expressions : {},
317 * allow_stdin : false
317 * allow_stdin : false
318 * }
318 * }
319 *
319 *
320 * When calling this method pass a callbacks structure of the form:
320 * When calling this method pass a callbacks structure of the form:
321 *
321 *
322 * callbacks = {
322 * callbacks = {
323 * shell : {
323 * shell : {
324 * reply : execute_reply_callback,
324 * reply : execute_reply_callback,
325 * payload : {
325 * payload : {
326 * set_next_input : set_next_input_callback,
326 * set_next_input : set_next_input_callback,
327 * }
327 * }
328 * },
328 * },
329 * iopub : {
329 * iopub : {
330 * output : output_callback,
330 * output : output_callback,
331 * clear_output : clear_output_callback,
331 * clear_output : clear_output_callback,
332 * },
332 * },
333 * input : raw_input_callback
333 * input : raw_input_callback
334 * }
334 * }
335 *
335 *
336 * Each callback will be passed the entire message as a single arugment.
336 * Each callback will be passed the entire message as a single arugment.
337 * Payload handlers will be passed the corresponding payload and the execute_reply message.
337 * Payload handlers will be passed the corresponding payload and the execute_reply message.
338 */
338 */
339 Kernel.prototype.execute = function (code, callbacks, options) {
339 Kernel.prototype.execute = function (code, callbacks, options) {
340
340
341 var content = {
341 var content = {
342 code : code,
342 code : code,
343 silent : true,
343 silent : true,
344 store_history : false,
344 store_history : false,
345 user_variables : [],
345 user_variables : [],
346 user_expressions : {},
346 user_expressions : {},
347 allow_stdin : false
347 allow_stdin : false
348 };
348 };
349 callbacks = callbacks || {};
349 callbacks = callbacks || {};
350 if (callbacks.input !== undefined) {
350 if (callbacks.input !== undefined) {
351 content.allow_stdin = true;
351 content.allow_stdin = true;
352 }
352 }
353 $.extend(true, content, options);
353 $.extend(true, content, options);
354 $([IPython.events]).trigger('execution_request.Kernel', {kernel: this, content:content});
354 $([IPython.events]).trigger('execution_request.Kernel', {kernel: this, content:content});
355 return this.send_shell_message("execute_request", content, callbacks);
355 return this.send_shell_message("execute_request", content, callbacks);
356 };
356 };
357
357
358 /**
358 /**
359 * When calling this method, pass a function to be called with the `complete_reply` message
359 * When calling this method, pass a function to be called with the `complete_reply` message
360 * as its only argument when it arrives.
360 * as its only argument when it arrives.
361 *
361 *
362 * `complete_reply` is documented
362 * `complete_reply` is documented
363 * [here](http://ipython.org/ipython-doc/dev/development/messaging.html#complete)
363 * [here](http://ipython.org/ipython-doc/dev/development/messaging.html#complete)
364 *
364 *
365 * @method complete
365 * @method complete
366 * @param line {integer}
366 * @param line {integer}
367 * @param cursor_pos {integer}
367 * @param cursor_pos {integer}
368 * @param callback {function}
368 * @param callback {function}
369 *
369 *
370 */
370 */
371 Kernel.prototype.complete = function (line, cursor_pos, callback) {
371 Kernel.prototype.complete = function (line, cursor_pos, callback) {
372 var callbacks;
372 var callbacks;
373 if (callback) {
373 if (callback) {
374 callbacks = { shell : { reply : callback } };
374 callbacks = { shell : { reply : callback } };
375 }
375 }
376 var content = {
376 var content = {
377 text : '',
377 text : '',
378 line : line,
378 line : line,
379 block : null,
379 block : null,
380 cursor_pos : cursor_pos
380 cursor_pos : cursor_pos
381 };
381 };
382 return this.send_shell_message("complete_request", content, callbacks);
382 return this.send_shell_message("complete_request", content, callbacks);
383 };
383 };
384
384
385
385
386 Kernel.prototype.interrupt = function () {
386 Kernel.prototype.interrupt = function () {
387 if (this.running) {
387 if (this.running) {
388 $([IPython.events]).trigger('status_interrupting.Kernel', {kernel: this});
388 $([IPython.events]).trigger('status_interrupting.Kernel', {kernel: this});
389 this.post(utils.url_join_encode(this.kernel_url, "interrupt"));
389 this.post(utils.url_join_encode(this.kernel_url, "interrupt"));
390 }
390 }
391 };
391 };
392
392
393
393
394 Kernel.prototype.kill = function () {
394 Kernel.prototype.kill = function () {
395 if (this.running) {
395 if (this.running) {
396 this.running = false;
396 this.running = false;
397 var settings = {
397 var settings = {
398 cache : false,
398 cache : false,
399 type : "DELETE",
399 type : "DELETE",
400 error : utils.log_ajax_error,
400 error : utils.log_ajax_error,
401 };
401 };
402 $.ajax(utils.url_join_encode(this.kernel_url), settings);
402 $.ajax(utils.url_join_encode(this.kernel_url), settings);
403 }
403 }
404 };
404 };
405
405
406 Kernel.prototype.send_input_reply = function (input) {
406 Kernel.prototype.send_input_reply = function (input) {
407 var content = {
407 var content = {
408 value : input,
408 value : input,
409 };
409 };
410 $([IPython.events]).trigger('input_reply.Kernel', {kernel: this, content:content});
410 $([IPython.events]).trigger('input_reply.Kernel', {kernel: this, content:content});
411 var msg = this._get_msg("input_reply", content);
411 var msg = this._get_msg("input_reply", content);
412 this.stdin_channel.send(JSON.stringify(msg));
412 this.stdin_channel.send(JSON.stringify(msg));
413 return msg.header.msg_id;
413 return msg.header.msg_id;
414 };
414 };
415
415
416
416
417 // Reply handlers
417 // Reply handlers
418
418
419 Kernel.prototype.register_iopub_handler = function (msg_type, callback) {
419 Kernel.prototype.register_iopub_handler = function (msg_type, callback) {
420 this._iopub_handlers[msg_type] = callback;
420 this._iopub_handlers[msg_type] = callback;
421 };
421 };
422
422
423 Kernel.prototype.get_iopub_handler = function (msg_type) {
423 Kernel.prototype.get_iopub_handler = function (msg_type) {
424 // get iopub handler for a specific message type
424 // get iopub handler for a specific message type
425 return this._iopub_handlers[msg_type];
425 return this._iopub_handlers[msg_type];
426 };
426 };
427
427
428
428
429 Kernel.prototype.get_callbacks_for_msg = function (msg_id) {
429 Kernel.prototype.get_callbacks_for_msg = function (msg_id) {
430 // get callbacks for a specific message
430 // get callbacks for a specific message
431 if (msg_id == this.last_msg_id) {
431 if (msg_id == this.last_msg_id) {
432 return this.last_msg_callbacks;
432 return this.last_msg_callbacks;
433 } else {
433 } else {
434 return this._msg_callbacks[msg_id];
434 return this._msg_callbacks[msg_id];
435 }
435 }
436 };
436 };
437
437
438
438
439 Kernel.prototype.clear_callbacks_for_msg = function (msg_id) {
439 Kernel.prototype.clear_callbacks_for_msg = function (msg_id) {
440 if (this._msg_callbacks[msg_id] !== undefined ) {
440 if (this._msg_callbacks[msg_id] !== undefined ) {
441 delete this._msg_callbacks[msg_id];
441 delete this._msg_callbacks[msg_id];
442 }
442 }
443 };
443 };
444
444
445 Kernel.prototype._finish_shell = function (msg_id) {
445 Kernel.prototype._finish_shell = function (msg_id) {
446 var callbacks = this._msg_callbacks[msg_id];
446 var callbacks = this._msg_callbacks[msg_id];
447 if (callbacks !== undefined) {
447 if (callbacks !== undefined) {
448 callbacks.shell_done = true;
448 callbacks.shell_done = true;
449 if (callbacks.iopub_done) {
449 if (callbacks.iopub_done) {
450 this.clear_callbacks_for_msg(msg_id);
450 this.clear_callbacks_for_msg(msg_id);
451 }
451 }
452 }
452 }
453 };
453 };
454
454
455 Kernel.prototype._finish_iopub = function (msg_id) {
455 Kernel.prototype._finish_iopub = function (msg_id) {
456 var callbacks = this._msg_callbacks[msg_id];
456 var callbacks = this._msg_callbacks[msg_id];
457 if (callbacks !== undefined) {
457 if (callbacks !== undefined) {
458 callbacks.iopub_done = true;
458 callbacks.iopub_done = true;
459 if (!callbacks.shell_done) {
459 if (!callbacks.shell_done) {
460 this.clear_callbacks_for_msg(msg_id);
460 this.clear_callbacks_for_msg(msg_id);
461 }
461 }
462 }
462 }
463 };
463 };
464
464
465 /* Set callbacks for a particular message.
465 /* Set callbacks for a particular message.
466 * Callbacks should be a struct of the following form:
466 * Callbacks should be a struct of the following form:
467 * shell : {
467 * shell : {
468 *
468 *
469 * }
469 * }
470
470
471 */
471 */
472 Kernel.prototype.set_callbacks_for_msg = function (msg_id, callbacks) {
472 Kernel.prototype.set_callbacks_for_msg = function (msg_id, callbacks) {
473 this.last_msg_id = msg_id;
473 this.last_msg_id = msg_id;
474 if (callbacks) {
474 if (callbacks) {
475 // shallow-copy mapping, because we will modify it at the top level
475 // shallow-copy mapping, because we will modify it at the top level
476 var cbcopy = this._msg_callbacks[msg_id] = this.last_msg_callbacks = {};
476 var cbcopy = this._msg_callbacks[msg_id] = this.last_msg_callbacks = {};
477 cbcopy.shell = callbacks.shell;
477 cbcopy.shell = callbacks.shell;
478 cbcopy.iopub = callbacks.iopub;
478 cbcopy.iopub = callbacks.iopub;
479 cbcopy.input = callbacks.input;
479 cbcopy.input = callbacks.input;
480 cbcopy.shell_done = (!callbacks.shell);
480 cbcopy.shell_done = (!callbacks.shell);
481 cbcopy.iopub_done = (!callbacks.iopub);
481 cbcopy.iopub_done = (!callbacks.iopub);
482 } else {
482 } else {
483 this.last_msg_callbacks = {};
483 this.last_msg_callbacks = {};
484 }
484 }
485 };
485 };
486
486
487
487
488 Kernel.prototype._handle_shell_reply = function (e) {
488 Kernel.prototype._handle_shell_reply = function (e) {
489 var reply = $.parseJSON(e.data);
489 var reply = $.parseJSON(e.data);
490 $([IPython.events]).trigger('shell_reply.Kernel', {kernel: this, reply:reply});
490 $([IPython.events]).trigger('shell_reply.Kernel', {kernel: this, reply:reply});
491 var content = reply.content;
491 var content = reply.content;
492 var metadata = reply.metadata;
492 var metadata = reply.metadata;
493 var parent_id = reply.parent_header.msg_id;
493 var parent_id = reply.parent_header.msg_id;
494 var callbacks = this.get_callbacks_for_msg(parent_id);
494 var callbacks = this.get_callbacks_for_msg(parent_id);
495 if (!callbacks || !callbacks.shell) {
495 if (!callbacks || !callbacks.shell) {
496 return;
496 return;
497 }
497 }
498 var shell_callbacks = callbacks.shell;
498 var shell_callbacks = callbacks.shell;
499
499
500 // signal that shell callbacks are done
500 // signal that shell callbacks are done
501 this._finish_shell(parent_id);
501 this._finish_shell(parent_id);
502
502
503 if (shell_callbacks.reply !== undefined) {
503 if (shell_callbacks.reply !== undefined) {
504 shell_callbacks.reply(reply);
504 shell_callbacks.reply(reply);
505 }
505 }
506 if (content.payload && shell_callbacks.payload) {
506 if (content.payload && shell_callbacks.payload) {
507 this._handle_payloads(content.payload, shell_callbacks.payload, reply);
507 this._handle_payloads(content.payload, shell_callbacks.payload, reply);
508 }
508 }
509 };
509 };
510
510
511
511
512 Kernel.prototype._handle_payloads = function (payloads, payload_callbacks, msg) {
512 Kernel.prototype._handle_payloads = function (payloads, payload_callbacks, msg) {
513 var l = payloads.length;
513 var l = payloads.length;
514 // Payloads are handled by triggering events because we don't want the Kernel
514 // Payloads are handled by triggering events because we don't want the Kernel
515 // to depend on the Notebook or Pager classes.
515 // to depend on the Notebook or Pager classes.
516 for (var i=0; i<l; i++) {
516 for (var i=0; i<l; i++) {
517 var payload = payloads[i];
517 var payload = payloads[i];
518 var callback = payload_callbacks[payload.source];
518 var callback = payload_callbacks[payload.source];
519 if (callback) {
519 if (callback) {
520 callback(payload, msg);
520 callback(payload, msg);
521 }
521 }
522 }
522 }
523 };
523 };
524
524
525 Kernel.prototype._handle_status_message = function (msg) {
525 Kernel.prototype._handle_status_message = function (msg) {
526 var execution_state = msg.content.execution_state;
526 var execution_state = msg.content.execution_state;
527 var parent_id = msg.parent_header.msg_id;
527 var parent_id = msg.parent_header.msg_id;
528
528
529 // dispatch status msg callbacks, if any
529 // dispatch status msg callbacks, if any
530 var callbacks = this.get_callbacks_for_msg(parent_id);
530 var callbacks = this.get_callbacks_for_msg(parent_id);
531 if (callbacks && callbacks.iopub && callbacks.iopub.status) {
531 if (callbacks && callbacks.iopub && callbacks.iopub.status) {
532 try {
532 try {
533 callbacks.iopub.status(msg);
533 callbacks.iopub.status(msg);
534 } catch (e) {
534 } catch (e) {
535 console.log("Exception in status msg handler", e, e.stack);
535 console.log("Exception in status msg handler", e, e.stack);
536 }
536 }
537 }
537 }
538
538
539 if (execution_state === 'busy') {
539 if (execution_state === 'busy') {
540 $([IPython.events]).trigger('status_busy.Kernel', {kernel: this});
540 $([IPython.events]).trigger('status_busy.Kernel', {kernel: this});
541 } else if (execution_state === 'idle') {
541 } else if (execution_state === 'idle') {
542 // signal that iopub callbacks are (probably) done
542 // signal that iopub callbacks are (probably) done
543 // async output may still arrive,
543 // async output may still arrive,
544 // but only for the most recent request
544 // but only for the most recent request
545 this._finish_iopub(parent_id);
545 this._finish_iopub(parent_id);
546
546
547 // trigger status_idle event
547 // trigger status_idle event
548 $([IPython.events]).trigger('status_idle.Kernel', {kernel: this});
548 $([IPython.events]).trigger('status_idle.Kernel', {kernel: this});
549 } else if (execution_state === 'restarting') {
549 } else if (execution_state === 'restarting') {
550 // autorestarting is distinct from restarting,
550 // autorestarting is distinct from restarting,
551 // in that it means the kernel died and the server is restarting it.
551 // in that it means the kernel died and the server is restarting it.
552 // status_restarting sets the notification widget,
552 // status_restarting sets the notification widget,
553 // autorestart shows the more prominent dialog.
553 // autorestart shows the more prominent dialog.
554 $([IPython.events]).trigger('status_autorestarting.Kernel', {kernel: this});
554 $([IPython.events]).trigger('status_autorestarting.Kernel', {kernel: this});
555 $([IPython.events]).trigger('status_restarting.Kernel', {kernel: this});
555 $([IPython.events]).trigger('status_restarting.Kernel', {kernel: this});
556 } else if (execution_state === 'dead') {
556 } else if (execution_state === 'dead') {
557 this.stop_channels();
557 this.stop_channels();
558 $([IPython.events]).trigger('status_dead.Kernel', {kernel: this});
558 $([IPython.events]).trigger('status_dead.Kernel', {kernel: this});
559 }
559 }
560 };
560 };
561
561
562
562
563 // handle clear_output message
563 // handle clear_output message
564 Kernel.prototype._handle_clear_output = function (msg) {
564 Kernel.prototype._handle_clear_output = function (msg) {
565 var callbacks = this.get_callbacks_for_msg(msg.parent_header.msg_id);
565 var callbacks = this.get_callbacks_for_msg(msg.parent_header.msg_id);
566 if (!callbacks || !callbacks.iopub) {
566 if (!callbacks || !callbacks.iopub) {
567 return;
567 return;
568 }
568 }
569 var callback = callbacks.iopub.clear_output;
569 var callback = callbacks.iopub.clear_output;
570 if (callback) {
570 if (callback) {
571 callback(msg);
571 callback(msg);
572 }
572 }
573 };
573 };
574
574
575
575
576 // handle an output message (execute_result, display_data, etc.)
576 // handle an output message (execute_result, display_data, etc.)
577 Kernel.prototype._handle_output_message = function (msg) {
577 Kernel.prototype._handle_output_message = function (msg) {
578 var callbacks = this.get_callbacks_for_msg(msg.parent_header.msg_id);
578 var callbacks = this.get_callbacks_for_msg(msg.parent_header.msg_id);
579 if (!callbacks || !callbacks.iopub) {
579 if (!callbacks || !callbacks.iopub) {
580 return;
580 return;
581 }
581 }
582 var callback = callbacks.iopub.output;
582 var callback = callbacks.iopub.output;
583 if (callback) {
583 if (callback) {
584 callback(msg);
584 callback(msg);
585 }
585 }
586 };
586 };
587
587
588 // dispatch IOPub messages to respective handlers.
588 // dispatch IOPub messages to respective handlers.
589 // each message type should have a handler.
589 // each message type should have a handler.
590 Kernel.prototype._handle_iopub_message = function (e) {
590 Kernel.prototype._handle_iopub_message = function (e) {
591 var msg = $.parseJSON(e.data);
591 var msg = $.parseJSON(e.data);
592
592
593 var handler = this.get_iopub_handler(msg.header.msg_type);
593 var handler = this.get_iopub_handler(msg.header.msg_type);
594 if (handler !== undefined) {
594 if (handler !== undefined) {
595 handler(msg);
595 handler(msg);
596 }
596 }
597 };
597 };
598
598
599
599
600 Kernel.prototype._handle_input_request = function (e) {
600 Kernel.prototype._handle_input_request = function (e) {
601 var request = $.parseJSON(e.data);
601 var request = $.parseJSON(e.data);
602 var header = request.header;
602 var header = request.header;
603 var content = request.content;
603 var content = request.content;
604 var metadata = request.metadata;
604 var metadata = request.metadata;
605 var msg_type = header.msg_type;
605 var msg_type = header.msg_type;
606 if (msg_type !== 'input_request') {
606 if (msg_type !== 'input_request') {
607 console.log("Invalid input request!", request);
607 console.log("Invalid input request!", request);
608 return;
608 return;
609 }
609 }
610 var callbacks = this.get_callbacks_for_msg(request.parent_header.msg_id);
610 var callbacks = this.get_callbacks_for_msg(request.parent_header.msg_id);
611 if (callbacks) {
611 if (callbacks) {
612 if (callbacks.input) {
612 if (callbacks.input) {
613 callbacks.input(request);
613 callbacks.input(request);
614 }
614 }
615 }
615 }
616 };
616 };
617
617
618
618
619 IPython.Kernel = Kernel;
619 IPython.Kernel = Kernel;
620
620
621 return IPython;
621 return IPython;
622
622
623 }(IPython));
623 }(IPython));
624
624
@@ -1,442 +1,442 b''
1 """Test suite for our zeromq-based message specification."""
1 """Test suite for our zeromq-based message specification."""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 import re
6 import re
7 from distutils.version import LooseVersion as V
7 from distutils.version import LooseVersion as V
8 from subprocess import PIPE
8 from subprocess import PIPE
9 try:
9 try:
10 from queue import Empty # Py 3
10 from queue import Empty # Py 3
11 except ImportError:
11 except ImportError:
12 from Queue import Empty # Py 2
12 from Queue import Empty # Py 2
13
13
14 import nose.tools as nt
14 import nose.tools as nt
15
15
16 from IPython.kernel import KernelManager
16 from IPython.kernel import KernelManager
17
17
18 from IPython.utils.traitlets import (
18 from IPython.utils.traitlets import (
19 HasTraits, TraitError, Bool, Unicode, Dict, Integer, List, Enum, Any,
19 HasTraits, TraitError, Bool, Unicode, Dict, Integer, List, Enum, Any,
20 )
20 )
21 from IPython.utils.py3compat import string_types, iteritems
21 from IPython.utils.py3compat import string_types, iteritems
22
22
23 from .utils import TIMEOUT, start_global_kernel, flush_channels, execute
23 from .utils import TIMEOUT, start_global_kernel, flush_channels, execute
24
24
25 #-----------------------------------------------------------------------------
25 #-----------------------------------------------------------------------------
26 # Globals
26 # Globals
27 #-----------------------------------------------------------------------------
27 #-----------------------------------------------------------------------------
28 KC = None
28 KC = None
29
29
30 def setup():
30 def setup():
31 global KC
31 global KC
32 KC = start_global_kernel()
32 KC = start_global_kernel()
33
33
34 #-----------------------------------------------------------------------------
34 #-----------------------------------------------------------------------------
35 # Message Spec References
35 # Message Spec References
36 #-----------------------------------------------------------------------------
36 #-----------------------------------------------------------------------------
37
37
38 class Reference(HasTraits):
38 class Reference(HasTraits):
39
39
40 """
40 """
41 Base class for message spec specification testing.
41 Base class for message spec specification testing.
42
42
43 This class is the core of the message specification test. The
43 This class is the core of the message specification test. The
44 idea is that child classes implement trait attributes for each
44 idea is that child classes implement trait attributes for each
45 message keys, so that message keys can be tested against these
45 message keys, so that message keys can be tested against these
46 traits using :meth:`check` method.
46 traits using :meth:`check` method.
47
47
48 """
48 """
49
49
50 def check(self, d):
50 def check(self, d):
51 """validate a dict against our traits"""
51 """validate a dict against our traits"""
52 for key in self.trait_names():
52 for key in self.trait_names():
53 nt.assert_in(key, d)
53 nt.assert_in(key, d)
54 # FIXME: always allow None, probably not a good idea
54 # FIXME: always allow None, probably not a good idea
55 if d[key] is None:
55 if d[key] is None:
56 continue
56 continue
57 try:
57 try:
58 setattr(self, key, d[key])
58 setattr(self, key, d[key])
59 except TraitError as e:
59 except TraitError as e:
60 assert False, str(e)
60 assert False, str(e)
61
61
62 class Version(Unicode):
62 class Version(Unicode):
63 def validate(self, obj, value):
63 def validate(self, obj, value):
64 min_version = self.default_value
64 min_version = self.default_value
65 if V(value) < V(min_version):
65 if V(value) < V(min_version):
66 raise TraitError("bad version: %s < %s" % (value, min_version))
66 raise TraitError("bad version: %s < %s" % (value, min_version))
67
67
68 class RMessage(Reference):
68 class RMessage(Reference):
69 msg_id = Unicode()
69 msg_id = Unicode()
70 msg_type = Unicode()
70 msg_type = Unicode()
71 header = Dict()
71 header = Dict()
72 parent_header = Dict()
72 parent_header = Dict()
73 content = Dict()
73 content = Dict()
74
74
75 def check(self, d):
75 def check(self, d):
76 super(RMessage, self).check(d)
76 super(RMessage, self).check(d)
77 RHeader().check(self.header)
77 RHeader().check(self.header)
78 RHeader().check(self.parent_header)
78 RHeader().check(self.parent_header)
79
79
80 class RHeader(Reference):
80 class RHeader(Reference):
81 msg_id = Unicode()
81 msg_id = Unicode()
82 msg_type = Unicode()
82 msg_type = Unicode()
83 session = Unicode()
83 session = Unicode()
84 username = Unicode()
84 username = Unicode()
85 version = Version('5.0')
85 version = Version('5.0')
86
86
87
87
88 class ExecuteReply(Reference):
88 class ExecuteReply(Reference):
89 execution_count = Integer()
89 execution_count = Integer()
90 status = Enum((u'ok', u'error'))
90 status = Enum((u'ok', u'error'))
91
91
92 def check(self, d):
92 def check(self, d):
93 Reference.check(self, d)
93 Reference.check(self, d)
94 if d['status'] == 'ok':
94 if d['status'] == 'ok':
95 ExecuteReplyOkay().check(d)
95 ExecuteReplyOkay().check(d)
96 elif d['status'] == 'error':
96 elif d['status'] == 'error':
97 ExecuteReplyError().check(d)
97 ExecuteReplyError().check(d)
98
98
99
99
100 class ExecuteReplyOkay(Reference):
100 class ExecuteReplyOkay(Reference):
101 payload = List(Dict)
101 payload = List(Dict)
102 user_variables = Dict()
102 user_variables = Dict()
103 user_expressions = Dict()
103 user_expressions = Dict()
104
104
105
105
106 class ExecuteReplyError(Reference):
106 class ExecuteReplyError(Reference):
107 ename = Unicode()
107 ename = Unicode()
108 evalue = Unicode()
108 evalue = Unicode()
109 traceback = List(Unicode)
109 traceback = List(Unicode)
110
110
111
111
112 class OInfoReply(Reference):
112 class OInfoReply(Reference):
113 name = Unicode()
113 name = Unicode()
114 found = Bool()
114 found = Bool()
115 ismagic = Bool()
115 ismagic = Bool()
116 isalias = Bool()
116 isalias = Bool()
117 namespace = Enum((u'builtin', u'magics', u'alias', u'Interactive'))
117 namespace = Enum((u'builtin', u'magics', u'alias', u'Interactive'))
118 type_name = Unicode()
118 type_name = Unicode()
119 string_form = Unicode()
119 string_form = Unicode()
120 base_class = Unicode()
120 base_class = Unicode()
121 length = Integer()
121 length = Integer()
122 file = Unicode()
122 file = Unicode()
123 definition = Unicode()
123 definition = Unicode()
124 argspec = Dict()
124 argspec = Dict()
125 init_definition = Unicode()
125 init_definition = Unicode()
126 docstring = Unicode()
126 docstring = Unicode()
127 init_docstring = Unicode()
127 init_docstring = Unicode()
128 class_docstring = Unicode()
128 class_docstring = Unicode()
129 call_def = Unicode()
129 call_def = Unicode()
130 call_docstring = Unicode()
130 call_docstring = Unicode()
131 source = Unicode()
131 source = Unicode()
132
132
133 def check(self, d):
133 def check(self, d):
134 super(OInfoReply, self).check(d)
134 super(OInfoReply, self).check(d)
135 if d['argspec'] is not None:
135 if d['argspec'] is not None:
136 ArgSpec().check(d['argspec'])
136 ArgSpec().check(d['argspec'])
137
137
138
138
139 class ArgSpec(Reference):
139 class ArgSpec(Reference):
140 args = List(Unicode)
140 args = List(Unicode)
141 varargs = Unicode()
141 varargs = Unicode()
142 varkw = Unicode()
142 varkw = Unicode()
143 defaults = List()
143 defaults = List()
144
144
145
145
146 class Status(Reference):
146 class Status(Reference):
147 execution_state = Enum((u'busy', u'idle', u'starting'))
147 execution_state = Enum((u'busy', u'idle', u'starting'))
148
148
149
149
150 class CompleteReply(Reference):
150 class CompleteReply(Reference):
151 matches = List(Unicode)
151 matches = List(Unicode)
152
152
153
153
154 class KernelInfoReply(Reference):
154 class KernelInfoReply(Reference):
155 protocol_version = Version('5.0')
155 protocol_version = Version('5.0')
156 ipython_version = Version('2.0')
156 ipython_version = Version('2.0')
157 language_version = Version('2.7')
157 language_version = Version('2.7')
158 language = Unicode()
158 language = Unicode()
159
159
160
160
161 # IOPub messages
161 # IOPub messages
162
162
163 class ExecuteInput(Reference):
163 class ExecuteInput(Reference):
164 code = Unicode()
164 code = Unicode()
165 execution_count = Integer()
165 execution_count = Integer()
166
166
167
167
168 PyErr = ExecuteReplyError
168 Error = ExecuteReplyError
169
169
170
170
171 class Stream(Reference):
171 class Stream(Reference):
172 name = Enum((u'stdout', u'stderr'))
172 name = Enum((u'stdout', u'stderr'))
173 data = Unicode()
173 data = Unicode()
174
174
175
175
176 mime_pat = re.compile(r'\w+/\w+')
176 mime_pat = re.compile(r'\w+/\w+')
177
177
178 class DisplayData(Reference):
178 class DisplayData(Reference):
179 source = Unicode()
179 source = Unicode()
180 metadata = Dict()
180 metadata = Dict()
181 data = Dict()
181 data = Dict()
182 def _data_changed(self, name, old, new):
182 def _data_changed(self, name, old, new):
183 for k,v in iteritems(new):
183 for k,v in iteritems(new):
184 assert mime_pat.match(k)
184 assert mime_pat.match(k)
185 nt.assert_is_instance(v, string_types)
185 nt.assert_is_instance(v, string_types)
186
186
187
187
188 class ExecuteResult(Reference):
188 class ExecuteResult(Reference):
189 execution_count = Integer()
189 execution_count = Integer()
190 data = Dict()
190 data = Dict()
191 def _data_changed(self, name, old, new):
191 def _data_changed(self, name, old, new):
192 for k,v in iteritems(new):
192 for k,v in iteritems(new):
193 assert mime_pat.match(k)
193 assert mime_pat.match(k)
194 nt.assert_is_instance(v, string_types)
194 nt.assert_is_instance(v, string_types)
195
195
196
196
197 references = {
197 references = {
198 'execute_reply' : ExecuteReply(),
198 'execute_reply' : ExecuteReply(),
199 'object_info_reply' : OInfoReply(),
199 'object_info_reply' : OInfoReply(),
200 'status' : Status(),
200 'status' : Status(),
201 'complete_reply' : CompleteReply(),
201 'complete_reply' : CompleteReply(),
202 'kernel_info_reply': KernelInfoReply(),
202 'kernel_info_reply': KernelInfoReply(),
203 'execute_input' : ExecuteInput(),
203 'execute_input' : ExecuteInput(),
204 'execute_result' : ExecuteResult(),
204 'execute_result' : ExecuteResult(),
205 'pyerr' : PyErr(),
205 'error' : Error(),
206 'stream' : Stream(),
206 'stream' : Stream(),
207 'display_data' : DisplayData(),
207 'display_data' : DisplayData(),
208 'header' : RHeader(),
208 'header' : RHeader(),
209 }
209 }
210 """
210 """
211 Specifications of `content` part of the reply messages.
211 Specifications of `content` part of the reply messages.
212 """
212 """
213
213
214
214
215 def validate_message(msg, msg_type=None, parent=None):
215 def validate_message(msg, msg_type=None, parent=None):
216 """validate a message
216 """validate a message
217
217
218 This is a generator, and must be iterated through to actually
218 This is a generator, and must be iterated through to actually
219 trigger each test.
219 trigger each test.
220
220
221 If msg_type and/or parent are given, the msg_type and/or parent msg_id
221 If msg_type and/or parent are given, the msg_type and/or parent msg_id
222 are compared with the given values.
222 are compared with the given values.
223 """
223 """
224 RMessage().check(msg)
224 RMessage().check(msg)
225 if msg_type:
225 if msg_type:
226 nt.assert_equal(msg['msg_type'], msg_type)
226 nt.assert_equal(msg['msg_type'], msg_type)
227 if parent:
227 if parent:
228 nt.assert_equal(msg['parent_header']['msg_id'], parent)
228 nt.assert_equal(msg['parent_header']['msg_id'], parent)
229 content = msg['content']
229 content = msg['content']
230 ref = references[msg['msg_type']]
230 ref = references[msg['msg_type']]
231 ref.check(content)
231 ref.check(content)
232
232
233
233
234 #-----------------------------------------------------------------------------
234 #-----------------------------------------------------------------------------
235 # Tests
235 # Tests
236 #-----------------------------------------------------------------------------
236 #-----------------------------------------------------------------------------
237
237
238 # Shell channel
238 # Shell channel
239
239
240 def test_execute():
240 def test_execute():
241 flush_channels()
241 flush_channels()
242
242
243 msg_id = KC.execute(code='x=1')
243 msg_id = KC.execute(code='x=1')
244 reply = KC.get_shell_msg(timeout=TIMEOUT)
244 reply = KC.get_shell_msg(timeout=TIMEOUT)
245 validate_message(reply, 'execute_reply', msg_id)
245 validate_message(reply, 'execute_reply', msg_id)
246
246
247
247
248 def test_execute_silent():
248 def test_execute_silent():
249 flush_channels()
249 flush_channels()
250 msg_id, reply = execute(code='x=1', silent=True)
250 msg_id, reply = execute(code='x=1', silent=True)
251
251
252 # flush status=idle
252 # flush status=idle
253 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
253 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
254 validate_message(status, 'status', msg_id)
254 validate_message(status, 'status', msg_id)
255 nt.assert_equal(status['content']['execution_state'], 'idle')
255 nt.assert_equal(status['content']['execution_state'], 'idle')
256
256
257 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
257 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
258 count = reply['execution_count']
258 count = reply['execution_count']
259
259
260 msg_id, reply = execute(code='x=2', silent=True)
260 msg_id, reply = execute(code='x=2', silent=True)
261
261
262 # flush status=idle
262 # flush status=idle
263 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
263 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
264 validate_message(status, 'status', msg_id)
264 validate_message(status, 'status', msg_id)
265 nt.assert_equal(status['content']['execution_state'], 'idle')
265 nt.assert_equal(status['content']['execution_state'], 'idle')
266
266
267 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
267 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
268 count_2 = reply['execution_count']
268 count_2 = reply['execution_count']
269 nt.assert_equal(count_2, count)
269 nt.assert_equal(count_2, count)
270
270
271
271
272 def test_execute_error():
272 def test_execute_error():
273 flush_channels()
273 flush_channels()
274
274
275 msg_id, reply = execute(code='1/0')
275 msg_id, reply = execute(code='1/0')
276 nt.assert_equal(reply['status'], 'error')
276 nt.assert_equal(reply['status'], 'error')
277 nt.assert_equal(reply['ename'], 'ZeroDivisionError')
277 nt.assert_equal(reply['ename'], 'ZeroDivisionError')
278
278
279 pyerr = KC.iopub_channel.get_msg(timeout=TIMEOUT)
279 error = KC.iopub_channel.get_msg(timeout=TIMEOUT)
280 validate_message(pyerr, 'pyerr', msg_id)
280 validate_message(error, 'error', msg_id)
281
281
282
282
283 def test_execute_inc():
283 def test_execute_inc():
284 """execute request should increment execution_count"""
284 """execute request should increment execution_count"""
285 flush_channels()
285 flush_channels()
286
286
287 msg_id, reply = execute(code='x=1')
287 msg_id, reply = execute(code='x=1')
288 count = reply['execution_count']
288 count = reply['execution_count']
289
289
290 flush_channels()
290 flush_channels()
291
291
292 msg_id, reply = execute(code='x=2')
292 msg_id, reply = execute(code='x=2')
293 count_2 = reply['execution_count']
293 count_2 = reply['execution_count']
294 nt.assert_equal(count_2, count+1)
294 nt.assert_equal(count_2, count+1)
295
295
296
296
297 def test_user_variables():
297 def test_user_variables():
298 flush_channels()
298 flush_channels()
299
299
300 msg_id, reply = execute(code='x=1', user_variables=['x'])
300 msg_id, reply = execute(code='x=1', user_variables=['x'])
301 user_variables = reply['user_variables']
301 user_variables = reply['user_variables']
302 nt.assert_equal(user_variables, {u'x': {
302 nt.assert_equal(user_variables, {u'x': {
303 u'status': u'ok',
303 u'status': u'ok',
304 u'data': {u'text/plain': u'1'},
304 u'data': {u'text/plain': u'1'},
305 u'metadata': {},
305 u'metadata': {},
306 }})
306 }})
307
307
308
308
309 def test_user_variables_fail():
309 def test_user_variables_fail():
310 flush_channels()
310 flush_channels()
311
311
312 msg_id, reply = execute(code='x=1', user_variables=['nosuchname'])
312 msg_id, reply = execute(code='x=1', user_variables=['nosuchname'])
313 user_variables = reply['user_variables']
313 user_variables = reply['user_variables']
314 foo = user_variables['nosuchname']
314 foo = user_variables['nosuchname']
315 nt.assert_equal(foo['status'], 'error')
315 nt.assert_equal(foo['status'], 'error')
316 nt.assert_equal(foo['ename'], 'KeyError')
316 nt.assert_equal(foo['ename'], 'KeyError')
317
317
318
318
319 def test_user_expressions():
319 def test_user_expressions():
320 flush_channels()
320 flush_channels()
321
321
322 msg_id, reply = execute(code='x=1', user_expressions=dict(foo='x+1'))
322 msg_id, reply = execute(code='x=1', user_expressions=dict(foo='x+1'))
323 user_expressions = reply['user_expressions']
323 user_expressions = reply['user_expressions']
324 nt.assert_equal(user_expressions, {u'foo': {
324 nt.assert_equal(user_expressions, {u'foo': {
325 u'status': u'ok',
325 u'status': u'ok',
326 u'data': {u'text/plain': u'2'},
326 u'data': {u'text/plain': u'2'},
327 u'metadata': {},
327 u'metadata': {},
328 }})
328 }})
329
329
330
330
331 def test_user_expressions_fail():
331 def test_user_expressions_fail():
332 flush_channels()
332 flush_channels()
333
333
334 msg_id, reply = execute(code='x=0', user_expressions=dict(foo='nosuchname'))
334 msg_id, reply = execute(code='x=0', user_expressions=dict(foo='nosuchname'))
335 user_expressions = reply['user_expressions']
335 user_expressions = reply['user_expressions']
336 foo = user_expressions['foo']
336 foo = user_expressions['foo']
337 nt.assert_equal(foo['status'], 'error')
337 nt.assert_equal(foo['status'], 'error')
338 nt.assert_equal(foo['ename'], 'NameError')
338 nt.assert_equal(foo['ename'], 'NameError')
339
339
340
340
341 def test_oinfo():
341 def test_oinfo():
342 flush_channels()
342 flush_channels()
343
343
344 msg_id = KC.object_info('a')
344 msg_id = KC.object_info('a')
345 reply = KC.get_shell_msg(timeout=TIMEOUT)
345 reply = KC.get_shell_msg(timeout=TIMEOUT)
346 validate_message(reply, 'object_info_reply', msg_id)
346 validate_message(reply, 'object_info_reply', msg_id)
347
347
348
348
349 def test_oinfo_found():
349 def test_oinfo_found():
350 flush_channels()
350 flush_channels()
351
351
352 msg_id, reply = execute(code='a=5')
352 msg_id, reply = execute(code='a=5')
353
353
354 msg_id = KC.object_info('a')
354 msg_id = KC.object_info('a')
355 reply = KC.get_shell_msg(timeout=TIMEOUT)
355 reply = KC.get_shell_msg(timeout=TIMEOUT)
356 validate_message(reply, 'object_info_reply', msg_id)
356 validate_message(reply, 'object_info_reply', msg_id)
357 content = reply['content']
357 content = reply['content']
358 assert content['found']
358 assert content['found']
359 argspec = content['argspec']
359 argspec = content['argspec']
360 nt.assert_is(argspec, None)
360 nt.assert_is(argspec, None)
361
361
362
362
363 def test_oinfo_detail():
363 def test_oinfo_detail():
364 flush_channels()
364 flush_channels()
365
365
366 msg_id, reply = execute(code='ip=get_ipython()')
366 msg_id, reply = execute(code='ip=get_ipython()')
367
367
368 msg_id = KC.object_info('ip.object_inspect', detail_level=2)
368 msg_id = KC.object_info('ip.object_inspect', detail_level=2)
369 reply = KC.get_shell_msg(timeout=TIMEOUT)
369 reply = KC.get_shell_msg(timeout=TIMEOUT)
370 validate_message(reply, 'object_info_reply', msg_id)
370 validate_message(reply, 'object_info_reply', msg_id)
371 content = reply['content']
371 content = reply['content']
372 assert content['found']
372 assert content['found']
373 argspec = content['argspec']
373 argspec = content['argspec']
374 nt.assert_is_instance(argspec, dict, "expected non-empty argspec dict, got %r" % argspec)
374 nt.assert_is_instance(argspec, dict, "expected non-empty argspec dict, got %r" % argspec)
375 nt.assert_equal(argspec['defaults'], [0])
375 nt.assert_equal(argspec['defaults'], [0])
376
376
377
377
378 def test_oinfo_not_found():
378 def test_oinfo_not_found():
379 flush_channels()
379 flush_channels()
380
380
381 msg_id = KC.object_info('dne')
381 msg_id = KC.object_info('dne')
382 reply = KC.get_shell_msg(timeout=TIMEOUT)
382 reply = KC.get_shell_msg(timeout=TIMEOUT)
383 validate_message(reply, 'object_info_reply', msg_id)
383 validate_message(reply, 'object_info_reply', msg_id)
384 content = reply['content']
384 content = reply['content']
385 nt.assert_false(content['found'])
385 nt.assert_false(content['found'])
386
386
387
387
388 def test_complete():
388 def test_complete():
389 flush_channels()
389 flush_channels()
390
390
391 msg_id, reply = execute(code="alpha = albert = 5")
391 msg_id, reply = execute(code="alpha = albert = 5")
392
392
393 msg_id = KC.complete('al', 'al', 2)
393 msg_id = KC.complete('al', 'al', 2)
394 reply = KC.get_shell_msg(timeout=TIMEOUT)
394 reply = KC.get_shell_msg(timeout=TIMEOUT)
395 validate_message(reply, 'complete_reply', msg_id)
395 validate_message(reply, 'complete_reply', msg_id)
396 matches = reply['content']['matches']
396 matches = reply['content']['matches']
397 for name in ('alpha', 'albert'):
397 for name in ('alpha', 'albert'):
398 nt.assert_in(name, matches)
398 nt.assert_in(name, matches)
399
399
400
400
401 def test_kernel_info_request():
401 def test_kernel_info_request():
402 flush_channels()
402 flush_channels()
403
403
404 msg_id = KC.kernel_info()
404 msg_id = KC.kernel_info()
405 reply = KC.get_shell_msg(timeout=TIMEOUT)
405 reply = KC.get_shell_msg(timeout=TIMEOUT)
406 validate_message(reply, 'kernel_info_reply', msg_id)
406 validate_message(reply, 'kernel_info_reply', msg_id)
407
407
408
408
409 def test_single_payload():
409 def test_single_payload():
410 flush_channels()
410 flush_channels()
411 msg_id, reply = execute(code="for i in range(3):\n"+
411 msg_id, reply = execute(code="for i in range(3):\n"+
412 " x=range?\n")
412 " x=range?\n")
413 payload = reply['payload']
413 payload = reply['payload']
414 next_input_pls = [pl for pl in payload if pl["source"] == "set_next_input"]
414 next_input_pls = [pl for pl in payload if pl["source"] == "set_next_input"]
415 nt.assert_equal(len(next_input_pls), 1)
415 nt.assert_equal(len(next_input_pls), 1)
416
416
417
417
418 # IOPub channel
418 # IOPub channel
419
419
420
420
421 def test_stream():
421 def test_stream():
422 flush_channels()
422 flush_channels()
423
423
424 msg_id, reply = execute("print('hi')")
424 msg_id, reply = execute("print('hi')")
425
425
426 stdout = KC.iopub_channel.get_msg(timeout=TIMEOUT)
426 stdout = KC.iopub_channel.get_msg(timeout=TIMEOUT)
427 validate_message(stdout, 'stream', msg_id)
427 validate_message(stdout, 'stream', msg_id)
428 content = stdout['content']
428 content = stdout['content']
429 nt.assert_equal(content['name'], u'stdout')
429 nt.assert_equal(content['name'], u'stdout')
430 nt.assert_equal(content['data'], u'hi\n')
430 nt.assert_equal(content['data'], u'hi\n')
431
431
432
432
433 def test_display_data():
433 def test_display_data():
434 flush_channels()
434 flush_channels()
435
435
436 msg_id, reply = execute("from IPython.core.display import display; display(1)")
436 msg_id, reply = execute("from IPython.core.display import display; display(1)")
437
437
438 display = KC.iopub_channel.get_msg(timeout=TIMEOUT)
438 display = KC.iopub_channel.get_msg(timeout=TIMEOUT)
439 validate_message(display, 'display_data', parent=msg_id)
439 validate_message(display, 'display_data', parent=msg_id)
440 data = display['content']['data']
440 data = display['content']['data']
441 nt.assert_equal(data['text/plain'], u'1')
441 nt.assert_equal(data['text/plain'], u'1')
442
442
@@ -1,797 +1,797 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 """An interactive kernel that talks to frontends over 0MQ."""
2 """An interactive kernel that talks to frontends over 0MQ."""
3
3
4 # Copyright (c) IPython Development Team.
4 # Copyright (c) IPython Development Team.
5 # Distributed under the terms of the Modified BSD License.
5 # Distributed under the terms of the Modified BSD License.
6
6
7 from __future__ import print_function
7 from __future__ import print_function
8
8
9 import sys
9 import sys
10 import time
10 import time
11 import traceback
11 import traceback
12 import logging
12 import logging
13 import uuid
13 import uuid
14
14
15 from datetime import datetime
15 from datetime import datetime
16 from signal import (
16 from signal import (
17 signal, default_int_handler, SIGINT
17 signal, default_int_handler, SIGINT
18 )
18 )
19
19
20 import zmq
20 import zmq
21 from zmq.eventloop import ioloop
21 from zmq.eventloop import ioloop
22 from zmq.eventloop.zmqstream import ZMQStream
22 from zmq.eventloop.zmqstream import ZMQStream
23
23
24 from IPython.config.configurable import Configurable
24 from IPython.config.configurable import Configurable
25 from IPython.core.error import StdinNotImplementedError
25 from IPython.core.error import StdinNotImplementedError
26 from IPython.core import release
26 from IPython.core import release
27 from IPython.utils import py3compat
27 from IPython.utils import py3compat
28 from IPython.utils.py3compat import builtin_mod, unicode_type, string_types
28 from IPython.utils.py3compat import builtin_mod, unicode_type, string_types
29 from IPython.utils.jsonutil import json_clean
29 from IPython.utils.jsonutil import json_clean
30 from IPython.utils.traitlets import (
30 from IPython.utils.traitlets import (
31 Any, Instance, Float, Dict, List, Set, Integer, Unicode,
31 Any, Instance, Float, Dict, List, Set, Integer, Unicode,
32 Type, Bool,
32 Type, Bool,
33 )
33 )
34
34
35 from .serialize import serialize_object, unpack_apply_message
35 from .serialize import serialize_object, unpack_apply_message
36 from .session import Session
36 from .session import Session
37 from .zmqshell import ZMQInteractiveShell
37 from .zmqshell import ZMQInteractiveShell
38
38
39
39
40 #-----------------------------------------------------------------------------
40 #-----------------------------------------------------------------------------
41 # Main kernel class
41 # Main kernel class
42 #-----------------------------------------------------------------------------
42 #-----------------------------------------------------------------------------
43
43
44 protocol_version = release.kernel_protocol_version
44 protocol_version = release.kernel_protocol_version
45 ipython_version = release.version
45 ipython_version = release.version
46 language_version = sys.version.split()[0]
46 language_version = sys.version.split()[0]
47
47
48
48
49 class Kernel(Configurable):
49 class Kernel(Configurable):
50
50
51 #---------------------------------------------------------------------------
51 #---------------------------------------------------------------------------
52 # Kernel interface
52 # Kernel interface
53 #---------------------------------------------------------------------------
53 #---------------------------------------------------------------------------
54
54
55 # attribute to override with a GUI
55 # attribute to override with a GUI
56 eventloop = Any(None)
56 eventloop = Any(None)
57 def _eventloop_changed(self, name, old, new):
57 def _eventloop_changed(self, name, old, new):
58 """schedule call to eventloop from IOLoop"""
58 """schedule call to eventloop from IOLoop"""
59 loop = ioloop.IOLoop.instance()
59 loop = ioloop.IOLoop.instance()
60 loop.add_callback(self.enter_eventloop)
60 loop.add_callback(self.enter_eventloop)
61
61
62 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
62 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
63 shell_class = Type(ZMQInteractiveShell)
63 shell_class = Type(ZMQInteractiveShell)
64
64
65 session = Instance(Session)
65 session = Instance(Session)
66 profile_dir = Instance('IPython.core.profiledir.ProfileDir')
66 profile_dir = Instance('IPython.core.profiledir.ProfileDir')
67 shell_streams = List()
67 shell_streams = List()
68 control_stream = Instance(ZMQStream)
68 control_stream = Instance(ZMQStream)
69 iopub_socket = Instance(zmq.Socket)
69 iopub_socket = Instance(zmq.Socket)
70 stdin_socket = Instance(zmq.Socket)
70 stdin_socket = Instance(zmq.Socket)
71 log = Instance(logging.Logger)
71 log = Instance(logging.Logger)
72
72
73 user_module = Any()
73 user_module = Any()
74 def _user_module_changed(self, name, old, new):
74 def _user_module_changed(self, name, old, new):
75 if self.shell is not None:
75 if self.shell is not None:
76 self.shell.user_module = new
76 self.shell.user_module = new
77
77
78 user_ns = Instance(dict, args=None, allow_none=True)
78 user_ns = Instance(dict, args=None, allow_none=True)
79 def _user_ns_changed(self, name, old, new):
79 def _user_ns_changed(self, name, old, new):
80 if self.shell is not None:
80 if self.shell is not None:
81 self.shell.user_ns = new
81 self.shell.user_ns = new
82 self.shell.init_user_ns()
82 self.shell.init_user_ns()
83
83
84 # identities:
84 # identities:
85 int_id = Integer(-1)
85 int_id = Integer(-1)
86 ident = Unicode()
86 ident = Unicode()
87
87
88 def _ident_default(self):
88 def _ident_default(self):
89 return unicode_type(uuid.uuid4())
89 return unicode_type(uuid.uuid4())
90
90
91 # Private interface
91 # Private interface
92
92
93 _darwin_app_nap = Bool(True, config=True,
93 _darwin_app_nap = Bool(True, config=True,
94 help="""Whether to use appnope for compatiblity with OS X App Nap.
94 help="""Whether to use appnope for compatiblity with OS X App Nap.
95
95
96 Only affects OS X >= 10.9.
96 Only affects OS X >= 10.9.
97 """
97 """
98 )
98 )
99
99
100 # Time to sleep after flushing the stdout/err buffers in each execute
100 # Time to sleep after flushing the stdout/err buffers in each execute
101 # cycle. While this introduces a hard limit on the minimal latency of the
101 # cycle. While this introduces a hard limit on the minimal latency of the
102 # execute cycle, it helps prevent output synchronization problems for
102 # execute cycle, it helps prevent output synchronization problems for
103 # clients.
103 # clients.
104 # Units are in seconds. The minimum zmq latency on local host is probably
104 # Units are in seconds. The minimum zmq latency on local host is probably
105 # ~150 microseconds, set this to 500us for now. We may need to increase it
105 # ~150 microseconds, set this to 500us for now. We may need to increase it
106 # a little if it's not enough after more interactive testing.
106 # a little if it's not enough after more interactive testing.
107 _execute_sleep = Float(0.0005, config=True)
107 _execute_sleep = Float(0.0005, config=True)
108
108
109 # Frequency of the kernel's event loop.
109 # Frequency of the kernel's event loop.
110 # Units are in seconds, kernel subclasses for GUI toolkits may need to
110 # Units are in seconds, kernel subclasses for GUI toolkits may need to
111 # adapt to milliseconds.
111 # adapt to milliseconds.
112 _poll_interval = Float(0.05, config=True)
112 _poll_interval = Float(0.05, config=True)
113
113
114 # If the shutdown was requested over the network, we leave here the
114 # If the shutdown was requested over the network, we leave here the
115 # necessary reply message so it can be sent by our registered atexit
115 # necessary reply message so it can be sent by our registered atexit
116 # handler. This ensures that the reply is only sent to clients truly at
116 # handler. This ensures that the reply is only sent to clients truly at
117 # the end of our shutdown process (which happens after the underlying
117 # the end of our shutdown process (which happens after the underlying
118 # IPython shell's own shutdown).
118 # IPython shell's own shutdown).
119 _shutdown_message = None
119 _shutdown_message = None
120
120
121 # This is a dict of port number that the kernel is listening on. It is set
121 # This is a dict of port number that the kernel is listening on. It is set
122 # by record_ports and used by connect_request.
122 # by record_ports and used by connect_request.
123 _recorded_ports = Dict()
123 _recorded_ports = Dict()
124
124
125 # A reference to the Python builtin 'raw_input' function.
125 # A reference to the Python builtin 'raw_input' function.
126 # (i.e., __builtin__.raw_input for Python 2.7, builtins.input for Python 3)
126 # (i.e., __builtin__.raw_input for Python 2.7, builtins.input for Python 3)
127 _sys_raw_input = Any()
127 _sys_raw_input = Any()
128 _sys_eval_input = Any()
128 _sys_eval_input = Any()
129
129
130 # set of aborted msg_ids
130 # set of aborted msg_ids
131 aborted = Set()
131 aborted = Set()
132
132
133
133
134 def __init__(self, **kwargs):
134 def __init__(self, **kwargs):
135 super(Kernel, self).__init__(**kwargs)
135 super(Kernel, self).__init__(**kwargs)
136
136
137 # Initialize the InteractiveShell subclass
137 # Initialize the InteractiveShell subclass
138 self.shell = self.shell_class.instance(parent=self,
138 self.shell = self.shell_class.instance(parent=self,
139 profile_dir = self.profile_dir,
139 profile_dir = self.profile_dir,
140 user_module = self.user_module,
140 user_module = self.user_module,
141 user_ns = self.user_ns,
141 user_ns = self.user_ns,
142 kernel = self,
142 kernel = self,
143 )
143 )
144 self.shell.displayhook.session = self.session
144 self.shell.displayhook.session = self.session
145 self.shell.displayhook.pub_socket = self.iopub_socket
145 self.shell.displayhook.pub_socket = self.iopub_socket
146 self.shell.displayhook.topic = self._topic('execute_result')
146 self.shell.displayhook.topic = self._topic('execute_result')
147 self.shell.display_pub.session = self.session
147 self.shell.display_pub.session = self.session
148 self.shell.display_pub.pub_socket = self.iopub_socket
148 self.shell.display_pub.pub_socket = self.iopub_socket
149 self.shell.data_pub.session = self.session
149 self.shell.data_pub.session = self.session
150 self.shell.data_pub.pub_socket = self.iopub_socket
150 self.shell.data_pub.pub_socket = self.iopub_socket
151
151
152 # TMP - hack while developing
152 # TMP - hack while developing
153 self.shell._reply_content = None
153 self.shell._reply_content = None
154
154
155 # Build dict of handlers for message types
155 # Build dict of handlers for message types
156 msg_types = [ 'execute_request', 'complete_request',
156 msg_types = [ 'execute_request', 'complete_request',
157 'object_info_request', 'history_request',
157 'object_info_request', 'history_request',
158 'kernel_info_request',
158 'kernel_info_request',
159 'connect_request', 'shutdown_request',
159 'connect_request', 'shutdown_request',
160 'apply_request',
160 'apply_request',
161 ]
161 ]
162 self.shell_handlers = {}
162 self.shell_handlers = {}
163 for msg_type in msg_types:
163 for msg_type in msg_types:
164 self.shell_handlers[msg_type] = getattr(self, msg_type)
164 self.shell_handlers[msg_type] = getattr(self, msg_type)
165
165
166 comm_msg_types = [ 'comm_open', 'comm_msg', 'comm_close' ]
166 comm_msg_types = [ 'comm_open', 'comm_msg', 'comm_close' ]
167 comm_manager = self.shell.comm_manager
167 comm_manager = self.shell.comm_manager
168 for msg_type in comm_msg_types:
168 for msg_type in comm_msg_types:
169 self.shell_handlers[msg_type] = getattr(comm_manager, msg_type)
169 self.shell_handlers[msg_type] = getattr(comm_manager, msg_type)
170
170
171 control_msg_types = msg_types + [ 'clear_request', 'abort_request' ]
171 control_msg_types = msg_types + [ 'clear_request', 'abort_request' ]
172 self.control_handlers = {}
172 self.control_handlers = {}
173 for msg_type in control_msg_types:
173 for msg_type in control_msg_types:
174 self.control_handlers[msg_type] = getattr(self, msg_type)
174 self.control_handlers[msg_type] = getattr(self, msg_type)
175
175
176
176
177 def dispatch_control(self, msg):
177 def dispatch_control(self, msg):
178 """dispatch control requests"""
178 """dispatch control requests"""
179 idents,msg = self.session.feed_identities(msg, copy=False)
179 idents,msg = self.session.feed_identities(msg, copy=False)
180 try:
180 try:
181 msg = self.session.unserialize(msg, content=True, copy=False)
181 msg = self.session.unserialize(msg, content=True, copy=False)
182 except:
182 except:
183 self.log.error("Invalid Control Message", exc_info=True)
183 self.log.error("Invalid Control Message", exc_info=True)
184 return
184 return
185
185
186 self.log.debug("Control received: %s", msg)
186 self.log.debug("Control received: %s", msg)
187
187
188 header = msg['header']
188 header = msg['header']
189 msg_id = header['msg_id']
189 msg_id = header['msg_id']
190 msg_type = header['msg_type']
190 msg_type = header['msg_type']
191
191
192 handler = self.control_handlers.get(msg_type, None)
192 handler = self.control_handlers.get(msg_type, None)
193 if handler is None:
193 if handler is None:
194 self.log.error("UNKNOWN CONTROL MESSAGE TYPE: %r", msg_type)
194 self.log.error("UNKNOWN CONTROL MESSAGE TYPE: %r", msg_type)
195 else:
195 else:
196 try:
196 try:
197 handler(self.control_stream, idents, msg)
197 handler(self.control_stream, idents, msg)
198 except Exception:
198 except Exception:
199 self.log.error("Exception in control handler:", exc_info=True)
199 self.log.error("Exception in control handler:", exc_info=True)
200
200
201 def dispatch_shell(self, stream, msg):
201 def dispatch_shell(self, stream, msg):
202 """dispatch shell requests"""
202 """dispatch shell requests"""
203 # flush control requests first
203 # flush control requests first
204 if self.control_stream:
204 if self.control_stream:
205 self.control_stream.flush()
205 self.control_stream.flush()
206
206
207 idents,msg = self.session.feed_identities(msg, copy=False)
207 idents,msg = self.session.feed_identities(msg, copy=False)
208 try:
208 try:
209 msg = self.session.unserialize(msg, content=True, copy=False)
209 msg = self.session.unserialize(msg, content=True, copy=False)
210 except:
210 except:
211 self.log.error("Invalid Message", exc_info=True)
211 self.log.error("Invalid Message", exc_info=True)
212 return
212 return
213
213
214 header = msg['header']
214 header = msg['header']
215 msg_id = header['msg_id']
215 msg_id = header['msg_id']
216 msg_type = msg['header']['msg_type']
216 msg_type = msg['header']['msg_type']
217
217
218 # Print some info about this message and leave a '--->' marker, so it's
218 # Print some info about this message and leave a '--->' marker, so it's
219 # easier to trace visually the message chain when debugging. Each
219 # easier to trace visually the message chain when debugging. Each
220 # handler prints its message at the end.
220 # handler prints its message at the end.
221 self.log.debug('\n*** MESSAGE TYPE:%s***', msg_type)
221 self.log.debug('\n*** MESSAGE TYPE:%s***', msg_type)
222 self.log.debug(' Content: %s\n --->\n ', msg['content'])
222 self.log.debug(' Content: %s\n --->\n ', msg['content'])
223
223
224 if msg_id in self.aborted:
224 if msg_id in self.aborted:
225 self.aborted.remove(msg_id)
225 self.aborted.remove(msg_id)
226 # is it safe to assume a msg_id will not be resubmitted?
226 # is it safe to assume a msg_id will not be resubmitted?
227 reply_type = msg_type.split('_')[0] + '_reply'
227 reply_type = msg_type.split('_')[0] + '_reply'
228 status = {'status' : 'aborted'}
228 status = {'status' : 'aborted'}
229 md = {'engine' : self.ident}
229 md = {'engine' : self.ident}
230 md.update(status)
230 md.update(status)
231 reply_msg = self.session.send(stream, reply_type, metadata=md,
231 reply_msg = self.session.send(stream, reply_type, metadata=md,
232 content=status, parent=msg, ident=idents)
232 content=status, parent=msg, ident=idents)
233 return
233 return
234
234
235 handler = self.shell_handlers.get(msg_type, None)
235 handler = self.shell_handlers.get(msg_type, None)
236 if handler is None:
236 if handler is None:
237 self.log.error("UNKNOWN MESSAGE TYPE: %r", msg_type)
237 self.log.error("UNKNOWN MESSAGE TYPE: %r", msg_type)
238 else:
238 else:
239 # ensure default_int_handler during handler call
239 # ensure default_int_handler during handler call
240 sig = signal(SIGINT, default_int_handler)
240 sig = signal(SIGINT, default_int_handler)
241 try:
241 try:
242 handler(stream, idents, msg)
242 handler(stream, idents, msg)
243 except Exception:
243 except Exception:
244 self.log.error("Exception in message handler:", exc_info=True)
244 self.log.error("Exception in message handler:", exc_info=True)
245 finally:
245 finally:
246 signal(SIGINT, sig)
246 signal(SIGINT, sig)
247
247
248 def enter_eventloop(self):
248 def enter_eventloop(self):
249 """enter eventloop"""
249 """enter eventloop"""
250 self.log.info("entering eventloop %s", self.eventloop)
250 self.log.info("entering eventloop %s", self.eventloop)
251 for stream in self.shell_streams:
251 for stream in self.shell_streams:
252 # flush any pending replies,
252 # flush any pending replies,
253 # which may be skipped by entering the eventloop
253 # which may be skipped by entering the eventloop
254 stream.flush(zmq.POLLOUT)
254 stream.flush(zmq.POLLOUT)
255 # restore default_int_handler
255 # restore default_int_handler
256 signal(SIGINT, default_int_handler)
256 signal(SIGINT, default_int_handler)
257 while self.eventloop is not None:
257 while self.eventloop is not None:
258 try:
258 try:
259 self.eventloop(self)
259 self.eventloop(self)
260 except KeyboardInterrupt:
260 except KeyboardInterrupt:
261 # Ctrl-C shouldn't crash the kernel
261 # Ctrl-C shouldn't crash the kernel
262 self.log.error("KeyboardInterrupt caught in kernel")
262 self.log.error("KeyboardInterrupt caught in kernel")
263 continue
263 continue
264 else:
264 else:
265 # eventloop exited cleanly, this means we should stop (right?)
265 # eventloop exited cleanly, this means we should stop (right?)
266 self.eventloop = None
266 self.eventloop = None
267 break
267 break
268 self.log.info("exiting eventloop")
268 self.log.info("exiting eventloop")
269
269
270 def start(self):
270 def start(self):
271 """register dispatchers for streams"""
271 """register dispatchers for streams"""
272 self.shell.exit_now = False
272 self.shell.exit_now = False
273 if self.control_stream:
273 if self.control_stream:
274 self.control_stream.on_recv(self.dispatch_control, copy=False)
274 self.control_stream.on_recv(self.dispatch_control, copy=False)
275
275
276 def make_dispatcher(stream):
276 def make_dispatcher(stream):
277 def dispatcher(msg):
277 def dispatcher(msg):
278 return self.dispatch_shell(stream, msg)
278 return self.dispatch_shell(stream, msg)
279 return dispatcher
279 return dispatcher
280
280
281 for s in self.shell_streams:
281 for s in self.shell_streams:
282 s.on_recv(make_dispatcher(s), copy=False)
282 s.on_recv(make_dispatcher(s), copy=False)
283
283
284 # publish idle status
284 # publish idle status
285 self._publish_status('starting')
285 self._publish_status('starting')
286
286
287 def do_one_iteration(self):
287 def do_one_iteration(self):
288 """step eventloop just once"""
288 """step eventloop just once"""
289 if self.control_stream:
289 if self.control_stream:
290 self.control_stream.flush()
290 self.control_stream.flush()
291 for stream in self.shell_streams:
291 for stream in self.shell_streams:
292 # handle at most one request per iteration
292 # handle at most one request per iteration
293 stream.flush(zmq.POLLIN, 1)
293 stream.flush(zmq.POLLIN, 1)
294 stream.flush(zmq.POLLOUT)
294 stream.flush(zmq.POLLOUT)
295
295
296
296
297 def record_ports(self, ports):
297 def record_ports(self, ports):
298 """Record the ports that this kernel is using.
298 """Record the ports that this kernel is using.
299
299
300 The creator of the Kernel instance must call this methods if they
300 The creator of the Kernel instance must call this methods if they
301 want the :meth:`connect_request` method to return the port numbers.
301 want the :meth:`connect_request` method to return the port numbers.
302 """
302 """
303 self._recorded_ports = ports
303 self._recorded_ports = ports
304
304
305 #---------------------------------------------------------------------------
305 #---------------------------------------------------------------------------
306 # Kernel request handlers
306 # Kernel request handlers
307 #---------------------------------------------------------------------------
307 #---------------------------------------------------------------------------
308
308
309 def _make_metadata(self, other=None):
309 def _make_metadata(self, other=None):
310 """init metadata dict, for execute/apply_reply"""
310 """init metadata dict, for execute/apply_reply"""
311 new_md = {
311 new_md = {
312 'dependencies_met' : True,
312 'dependencies_met' : True,
313 'engine' : self.ident,
313 'engine' : self.ident,
314 'started': datetime.now(),
314 'started': datetime.now(),
315 }
315 }
316 if other:
316 if other:
317 new_md.update(other)
317 new_md.update(other)
318 return new_md
318 return new_md
319
319
320 def _publish_execute_input(self, code, parent, execution_count):
320 def _publish_execute_input(self, code, parent, execution_count):
321 """Publish the code request on the iopub stream."""
321 """Publish the code request on the iopub stream."""
322
322
323 self.session.send(self.iopub_socket, u'execute_input',
323 self.session.send(self.iopub_socket, u'execute_input',
324 {u'code':code, u'execution_count': execution_count},
324 {u'code':code, u'execution_count': execution_count},
325 parent=parent, ident=self._topic('execute_input')
325 parent=parent, ident=self._topic('execute_input')
326 )
326 )
327
327
328 def _publish_status(self, status, parent=None):
328 def _publish_status(self, status, parent=None):
329 """send status (busy/idle) on IOPub"""
329 """send status (busy/idle) on IOPub"""
330 self.session.send(self.iopub_socket,
330 self.session.send(self.iopub_socket,
331 u'status',
331 u'status',
332 {u'execution_state': status},
332 {u'execution_state': status},
333 parent=parent,
333 parent=parent,
334 ident=self._topic('status'),
334 ident=self._topic('status'),
335 )
335 )
336
336
337
337
338 def execute_request(self, stream, ident, parent):
338 def execute_request(self, stream, ident, parent):
339 """handle an execute_request"""
339 """handle an execute_request"""
340
340
341 self._publish_status(u'busy', parent)
341 self._publish_status(u'busy', parent)
342
342
343 try:
343 try:
344 content = parent[u'content']
344 content = parent[u'content']
345 code = py3compat.cast_unicode_py2(content[u'code'])
345 code = py3compat.cast_unicode_py2(content[u'code'])
346 silent = content[u'silent']
346 silent = content[u'silent']
347 store_history = content.get(u'store_history', not silent)
347 store_history = content.get(u'store_history', not silent)
348 except:
348 except:
349 self.log.error("Got bad msg: ")
349 self.log.error("Got bad msg: ")
350 self.log.error("%s", parent)
350 self.log.error("%s", parent)
351 return
351 return
352
352
353 md = self._make_metadata(parent['metadata'])
353 md = self._make_metadata(parent['metadata'])
354
354
355 shell = self.shell # we'll need this a lot here
355 shell = self.shell # we'll need this a lot here
356
356
357 # Replace raw_input. Note that is not sufficient to replace
357 # Replace raw_input. Note that is not sufficient to replace
358 # raw_input in the user namespace.
358 # raw_input in the user namespace.
359 if content.get('allow_stdin', False):
359 if content.get('allow_stdin', False):
360 raw_input = lambda prompt='': self._raw_input(prompt, ident, parent)
360 raw_input = lambda prompt='': self._raw_input(prompt, ident, parent)
361 input = lambda prompt='': eval(raw_input(prompt))
361 input = lambda prompt='': eval(raw_input(prompt))
362 else:
362 else:
363 raw_input = input = lambda prompt='' : self._no_raw_input()
363 raw_input = input = lambda prompt='' : self._no_raw_input()
364
364
365 if py3compat.PY3:
365 if py3compat.PY3:
366 self._sys_raw_input = builtin_mod.input
366 self._sys_raw_input = builtin_mod.input
367 builtin_mod.input = raw_input
367 builtin_mod.input = raw_input
368 else:
368 else:
369 self._sys_raw_input = builtin_mod.raw_input
369 self._sys_raw_input = builtin_mod.raw_input
370 self._sys_eval_input = builtin_mod.input
370 self._sys_eval_input = builtin_mod.input
371 builtin_mod.raw_input = raw_input
371 builtin_mod.raw_input = raw_input
372 builtin_mod.input = input
372 builtin_mod.input = input
373
373
374 # Set the parent message of the display hook and out streams.
374 # Set the parent message of the display hook and out streams.
375 shell.set_parent(parent)
375 shell.set_parent(parent)
376
376
377 # Re-broadcast our input for the benefit of listening clients, and
377 # Re-broadcast our input for the benefit of listening clients, and
378 # start computing output
378 # start computing output
379 if not silent:
379 if not silent:
380 self._publish_execute_input(code, parent, shell.execution_count)
380 self._publish_execute_input(code, parent, shell.execution_count)
381
381
382 reply_content = {}
382 reply_content = {}
383 # FIXME: the shell calls the exception handler itself.
383 # FIXME: the shell calls the exception handler itself.
384 shell._reply_content = None
384 shell._reply_content = None
385 try:
385 try:
386 shell.run_cell(code, store_history=store_history, silent=silent)
386 shell.run_cell(code, store_history=store_history, silent=silent)
387 except:
387 except:
388 status = u'error'
388 status = u'error'
389 # FIXME: this code right now isn't being used yet by default,
389 # FIXME: this code right now isn't being used yet by default,
390 # because the run_cell() call above directly fires off exception
390 # because the run_cell() call above directly fires off exception
391 # reporting. This code, therefore, is only active in the scenario
391 # reporting. This code, therefore, is only active in the scenario
392 # where runlines itself has an unhandled exception. We need to
392 # where runlines itself has an unhandled exception. We need to
393 # uniformize this, for all exception construction to come from a
393 # uniformize this, for all exception construction to come from a
394 # single location in the codbase.
394 # single location in the codbase.
395 etype, evalue, tb = sys.exc_info()
395 etype, evalue, tb = sys.exc_info()
396 tb_list = traceback.format_exception(etype, evalue, tb)
396 tb_list = traceback.format_exception(etype, evalue, tb)
397 reply_content.update(shell._showtraceback(etype, evalue, tb_list))
397 reply_content.update(shell._showtraceback(etype, evalue, tb_list))
398 else:
398 else:
399 status = u'ok'
399 status = u'ok'
400 finally:
400 finally:
401 # Restore raw_input.
401 # Restore raw_input.
402 if py3compat.PY3:
402 if py3compat.PY3:
403 builtin_mod.input = self._sys_raw_input
403 builtin_mod.input = self._sys_raw_input
404 else:
404 else:
405 builtin_mod.raw_input = self._sys_raw_input
405 builtin_mod.raw_input = self._sys_raw_input
406 builtin_mod.input = self._sys_eval_input
406 builtin_mod.input = self._sys_eval_input
407
407
408 reply_content[u'status'] = status
408 reply_content[u'status'] = status
409
409
410 # Return the execution counter so clients can display prompts
410 # Return the execution counter so clients can display prompts
411 reply_content['execution_count'] = shell.execution_count - 1
411 reply_content['execution_count'] = shell.execution_count - 1
412
412
413 # FIXME - fish exception info out of shell, possibly left there by
413 # FIXME - fish exception info out of shell, possibly left there by
414 # runlines. We'll need to clean up this logic later.
414 # runlines. We'll need to clean up this logic later.
415 if shell._reply_content is not None:
415 if shell._reply_content is not None:
416 reply_content.update(shell._reply_content)
416 reply_content.update(shell._reply_content)
417 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='execute')
417 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='execute')
418 reply_content['engine_info'] = e_info
418 reply_content['engine_info'] = e_info
419 # reset after use
419 # reset after use
420 shell._reply_content = None
420 shell._reply_content = None
421
421
422 if 'traceback' in reply_content:
422 if 'traceback' in reply_content:
423 self.log.info("Exception in execute request:\n%s", '\n'.join(reply_content['traceback']))
423 self.log.info("Exception in execute request:\n%s", '\n'.join(reply_content['traceback']))
424
424
425
425
426 # At this point, we can tell whether the main code execution succeeded
426 # At this point, we can tell whether the main code execution succeeded
427 # or not. If it did, we proceed to evaluate user_variables/expressions
427 # or not. If it did, we proceed to evaluate user_variables/expressions
428 if reply_content['status'] == 'ok':
428 if reply_content['status'] == 'ok':
429 reply_content[u'user_variables'] = \
429 reply_content[u'user_variables'] = \
430 shell.user_variables(content.get(u'user_variables', []))
430 shell.user_variables(content.get(u'user_variables', []))
431 reply_content[u'user_expressions'] = \
431 reply_content[u'user_expressions'] = \
432 shell.user_expressions(content.get(u'user_expressions', {}))
432 shell.user_expressions(content.get(u'user_expressions', {}))
433 else:
433 else:
434 # If there was an error, don't even try to compute variables or
434 # If there was an error, don't even try to compute variables or
435 # expressions
435 # expressions
436 reply_content[u'user_variables'] = {}
436 reply_content[u'user_variables'] = {}
437 reply_content[u'user_expressions'] = {}
437 reply_content[u'user_expressions'] = {}
438
438
439 # Payloads should be retrieved regardless of outcome, so we can both
439 # Payloads should be retrieved regardless of outcome, so we can both
440 # recover partial output (that could have been generated early in a
440 # recover partial output (that could have been generated early in a
441 # block, before an error) and clear the payload system always.
441 # block, before an error) and clear the payload system always.
442 reply_content[u'payload'] = shell.payload_manager.read_payload()
442 reply_content[u'payload'] = shell.payload_manager.read_payload()
443 # Be agressive about clearing the payload because we don't want
443 # Be agressive about clearing the payload because we don't want
444 # it to sit in memory until the next execute_request comes in.
444 # it to sit in memory until the next execute_request comes in.
445 shell.payload_manager.clear_payload()
445 shell.payload_manager.clear_payload()
446
446
447 # Flush output before sending the reply.
447 # Flush output before sending the reply.
448 sys.stdout.flush()
448 sys.stdout.flush()
449 sys.stderr.flush()
449 sys.stderr.flush()
450 # FIXME: on rare occasions, the flush doesn't seem to make it to the
450 # FIXME: on rare occasions, the flush doesn't seem to make it to the
451 # clients... This seems to mitigate the problem, but we definitely need
451 # clients... This seems to mitigate the problem, but we definitely need
452 # to better understand what's going on.
452 # to better understand what's going on.
453 if self._execute_sleep:
453 if self._execute_sleep:
454 time.sleep(self._execute_sleep)
454 time.sleep(self._execute_sleep)
455
455
456 # Send the reply.
456 # Send the reply.
457 reply_content = json_clean(reply_content)
457 reply_content = json_clean(reply_content)
458
458
459 md['status'] = reply_content['status']
459 md['status'] = reply_content['status']
460 if reply_content['status'] == 'error' and \
460 if reply_content['status'] == 'error' and \
461 reply_content['ename'] == 'UnmetDependency':
461 reply_content['ename'] == 'UnmetDependency':
462 md['dependencies_met'] = False
462 md['dependencies_met'] = False
463
463
464 reply_msg = self.session.send(stream, u'execute_reply',
464 reply_msg = self.session.send(stream, u'execute_reply',
465 reply_content, parent, metadata=md,
465 reply_content, parent, metadata=md,
466 ident=ident)
466 ident=ident)
467
467
468 self.log.debug("%s", reply_msg)
468 self.log.debug("%s", reply_msg)
469
469
470 if not silent and reply_msg['content']['status'] == u'error':
470 if not silent and reply_msg['content']['status'] == u'error':
471 self._abort_queues()
471 self._abort_queues()
472
472
473 self._publish_status(u'idle', parent)
473 self._publish_status(u'idle', parent)
474
474
475 def complete_request(self, stream, ident, parent):
475 def complete_request(self, stream, ident, parent):
476 txt, matches = self._complete(parent)
476 txt, matches = self._complete(parent)
477 matches = {'matches' : matches,
477 matches = {'matches' : matches,
478 'matched_text' : txt,
478 'matched_text' : txt,
479 'status' : 'ok'}
479 'status' : 'ok'}
480 matches = json_clean(matches)
480 matches = json_clean(matches)
481 completion_msg = self.session.send(stream, 'complete_reply',
481 completion_msg = self.session.send(stream, 'complete_reply',
482 matches, parent, ident)
482 matches, parent, ident)
483 self.log.debug("%s", completion_msg)
483 self.log.debug("%s", completion_msg)
484
484
485 def object_info_request(self, stream, ident, parent):
485 def object_info_request(self, stream, ident, parent):
486 content = parent['content']
486 content = parent['content']
487 object_info = self.shell.object_inspect(content['oname'],
487 object_info = self.shell.object_inspect(content['oname'],
488 detail_level = content.get('detail_level', 0)
488 detail_level = content.get('detail_level', 0)
489 )
489 )
490 # Before we send this object over, we scrub it for JSON usage
490 # Before we send this object over, we scrub it for JSON usage
491 oinfo = json_clean(object_info)
491 oinfo = json_clean(object_info)
492 msg = self.session.send(stream, 'object_info_reply',
492 msg = self.session.send(stream, 'object_info_reply',
493 oinfo, parent, ident)
493 oinfo, parent, ident)
494 self.log.debug("%s", msg)
494 self.log.debug("%s", msg)
495
495
496 def history_request(self, stream, ident, parent):
496 def history_request(self, stream, ident, parent):
497 # We need to pull these out, as passing **kwargs doesn't work with
497 # We need to pull these out, as passing **kwargs doesn't work with
498 # unicode keys before Python 2.6.5.
498 # unicode keys before Python 2.6.5.
499 hist_access_type = parent['content']['hist_access_type']
499 hist_access_type = parent['content']['hist_access_type']
500 raw = parent['content']['raw']
500 raw = parent['content']['raw']
501 output = parent['content']['output']
501 output = parent['content']['output']
502 if hist_access_type == 'tail':
502 if hist_access_type == 'tail':
503 n = parent['content']['n']
503 n = parent['content']['n']
504 hist = self.shell.history_manager.get_tail(n, raw=raw, output=output,
504 hist = self.shell.history_manager.get_tail(n, raw=raw, output=output,
505 include_latest=True)
505 include_latest=True)
506
506
507 elif hist_access_type == 'range':
507 elif hist_access_type == 'range':
508 session = parent['content']['session']
508 session = parent['content']['session']
509 start = parent['content']['start']
509 start = parent['content']['start']
510 stop = parent['content']['stop']
510 stop = parent['content']['stop']
511 hist = self.shell.history_manager.get_range(session, start, stop,
511 hist = self.shell.history_manager.get_range(session, start, stop,
512 raw=raw, output=output)
512 raw=raw, output=output)
513
513
514 elif hist_access_type == 'search':
514 elif hist_access_type == 'search':
515 n = parent['content'].get('n')
515 n = parent['content'].get('n')
516 unique = parent['content'].get('unique', False)
516 unique = parent['content'].get('unique', False)
517 pattern = parent['content']['pattern']
517 pattern = parent['content']['pattern']
518 hist = self.shell.history_manager.search(
518 hist = self.shell.history_manager.search(
519 pattern, raw=raw, output=output, n=n, unique=unique)
519 pattern, raw=raw, output=output, n=n, unique=unique)
520
520
521 else:
521 else:
522 hist = []
522 hist = []
523 hist = list(hist)
523 hist = list(hist)
524 content = {'history' : hist}
524 content = {'history' : hist}
525 content = json_clean(content)
525 content = json_clean(content)
526 msg = self.session.send(stream, 'history_reply',
526 msg = self.session.send(stream, 'history_reply',
527 content, parent, ident)
527 content, parent, ident)
528 self.log.debug("Sending history reply with %i entries", len(hist))
528 self.log.debug("Sending history reply with %i entries", len(hist))
529
529
530 def connect_request(self, stream, ident, parent):
530 def connect_request(self, stream, ident, parent):
531 if self._recorded_ports is not None:
531 if self._recorded_ports is not None:
532 content = self._recorded_ports.copy()
532 content = self._recorded_ports.copy()
533 else:
533 else:
534 content = {}
534 content = {}
535 msg = self.session.send(stream, 'connect_reply',
535 msg = self.session.send(stream, 'connect_reply',
536 content, parent, ident)
536 content, parent, ident)
537 self.log.debug("%s", msg)
537 self.log.debug("%s", msg)
538
538
539 def kernel_info_request(self, stream, ident, parent):
539 def kernel_info_request(self, stream, ident, parent):
540 vinfo = {
540 vinfo = {
541 'protocol_version': protocol_version,
541 'protocol_version': protocol_version,
542 'ipython_version': ipython_version,
542 'ipython_version': ipython_version,
543 'language_version': language_version,
543 'language_version': language_version,
544 'language': 'python',
544 'language': 'python',
545 }
545 }
546 msg = self.session.send(stream, 'kernel_info_reply',
546 msg = self.session.send(stream, 'kernel_info_reply',
547 vinfo, parent, ident)
547 vinfo, parent, ident)
548 self.log.debug("%s", msg)
548 self.log.debug("%s", msg)
549
549
550 def shutdown_request(self, stream, ident, parent):
550 def shutdown_request(self, stream, ident, parent):
551 self.shell.exit_now = True
551 self.shell.exit_now = True
552 content = dict(status='ok')
552 content = dict(status='ok')
553 content.update(parent['content'])
553 content.update(parent['content'])
554 self.session.send(stream, u'shutdown_reply', content, parent, ident=ident)
554 self.session.send(stream, u'shutdown_reply', content, parent, ident=ident)
555 # same content, but different msg_id for broadcasting on IOPub
555 # same content, but different msg_id for broadcasting on IOPub
556 self._shutdown_message = self.session.msg(u'shutdown_reply',
556 self._shutdown_message = self.session.msg(u'shutdown_reply',
557 content, parent
557 content, parent
558 )
558 )
559
559
560 self._at_shutdown()
560 self._at_shutdown()
561 # call sys.exit after a short delay
561 # call sys.exit after a short delay
562 loop = ioloop.IOLoop.instance()
562 loop = ioloop.IOLoop.instance()
563 loop.add_timeout(time.time()+0.1, loop.stop)
563 loop.add_timeout(time.time()+0.1, loop.stop)
564
564
565 #---------------------------------------------------------------------------
565 #---------------------------------------------------------------------------
566 # Engine methods
566 # Engine methods
567 #---------------------------------------------------------------------------
567 #---------------------------------------------------------------------------
568
568
569 def apply_request(self, stream, ident, parent):
569 def apply_request(self, stream, ident, parent):
570 try:
570 try:
571 content = parent[u'content']
571 content = parent[u'content']
572 bufs = parent[u'buffers']
572 bufs = parent[u'buffers']
573 msg_id = parent['header']['msg_id']
573 msg_id = parent['header']['msg_id']
574 except:
574 except:
575 self.log.error("Got bad msg: %s", parent, exc_info=True)
575 self.log.error("Got bad msg: %s", parent, exc_info=True)
576 return
576 return
577
577
578 self._publish_status(u'busy', parent)
578 self._publish_status(u'busy', parent)
579
579
580 # Set the parent message of the display hook and out streams.
580 # Set the parent message of the display hook and out streams.
581 shell = self.shell
581 shell = self.shell
582 shell.set_parent(parent)
582 shell.set_parent(parent)
583
583
584 # execute_input_msg = self.session.msg(u'execute_input',{u'code':code}, parent=parent)
584 # execute_input_msg = self.session.msg(u'execute_input',{u'code':code}, parent=parent)
585 # self.iopub_socket.send(execute_input_msg)
585 # self.iopub_socket.send(execute_input_msg)
586 # self.session.send(self.iopub_socket, u'execute_input', {u'code':code},parent=parent)
586 # self.session.send(self.iopub_socket, u'execute_input', {u'code':code},parent=parent)
587 md = self._make_metadata(parent['metadata'])
587 md = self._make_metadata(parent['metadata'])
588 try:
588 try:
589 working = shell.user_ns
589 working = shell.user_ns
590
590
591 prefix = "_"+str(msg_id).replace("-","")+"_"
591 prefix = "_"+str(msg_id).replace("-","")+"_"
592
592
593 f,args,kwargs = unpack_apply_message(bufs, working, copy=False)
593 f,args,kwargs = unpack_apply_message(bufs, working, copy=False)
594
594
595 fname = getattr(f, '__name__', 'f')
595 fname = getattr(f, '__name__', 'f')
596
596
597 fname = prefix+"f"
597 fname = prefix+"f"
598 argname = prefix+"args"
598 argname = prefix+"args"
599 kwargname = prefix+"kwargs"
599 kwargname = prefix+"kwargs"
600 resultname = prefix+"result"
600 resultname = prefix+"result"
601
601
602 ns = { fname : f, argname : args, kwargname : kwargs , resultname : None }
602 ns = { fname : f, argname : args, kwargname : kwargs , resultname : None }
603 # print ns
603 # print ns
604 working.update(ns)
604 working.update(ns)
605 code = "%s = %s(*%s,**%s)" % (resultname, fname, argname, kwargname)
605 code = "%s = %s(*%s,**%s)" % (resultname, fname, argname, kwargname)
606 try:
606 try:
607 exec(code, shell.user_global_ns, shell.user_ns)
607 exec(code, shell.user_global_ns, shell.user_ns)
608 result = working.get(resultname)
608 result = working.get(resultname)
609 finally:
609 finally:
610 for key in ns:
610 for key in ns:
611 working.pop(key)
611 working.pop(key)
612
612
613 result_buf = serialize_object(result,
613 result_buf = serialize_object(result,
614 buffer_threshold=self.session.buffer_threshold,
614 buffer_threshold=self.session.buffer_threshold,
615 item_threshold=self.session.item_threshold,
615 item_threshold=self.session.item_threshold,
616 )
616 )
617
617
618 except:
618 except:
619 # invoke IPython traceback formatting
619 # invoke IPython traceback formatting
620 shell.showtraceback()
620 shell.showtraceback()
621 # FIXME - fish exception info out of shell, possibly left there by
621 # FIXME - fish exception info out of shell, possibly left there by
622 # run_code. We'll need to clean up this logic later.
622 # run_code. We'll need to clean up this logic later.
623 reply_content = {}
623 reply_content = {}
624 if shell._reply_content is not None:
624 if shell._reply_content is not None:
625 reply_content.update(shell._reply_content)
625 reply_content.update(shell._reply_content)
626 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='apply')
626 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='apply')
627 reply_content['engine_info'] = e_info
627 reply_content['engine_info'] = e_info
628 # reset after use
628 # reset after use
629 shell._reply_content = None
629 shell._reply_content = None
630
630
631 self.session.send(self.iopub_socket, u'pyerr', reply_content, parent=parent,
631 self.session.send(self.iopub_socket, u'error', reply_content, parent=parent,
632 ident=self._topic('pyerr'))
632 ident=self._topic('error'))
633 self.log.info("Exception in apply request:\n%s", '\n'.join(reply_content['traceback']))
633 self.log.info("Exception in apply request:\n%s", '\n'.join(reply_content['traceback']))
634 result_buf = []
634 result_buf = []
635
635
636 if reply_content['ename'] == 'UnmetDependency':
636 if reply_content['ename'] == 'UnmetDependency':
637 md['dependencies_met'] = False
637 md['dependencies_met'] = False
638 else:
638 else:
639 reply_content = {'status' : 'ok'}
639 reply_content = {'status' : 'ok'}
640
640
641 # put 'ok'/'error' status in header, for scheduler introspection:
641 # put 'ok'/'error' status in header, for scheduler introspection:
642 md['status'] = reply_content['status']
642 md['status'] = reply_content['status']
643
643
644 # flush i/o
644 # flush i/o
645 sys.stdout.flush()
645 sys.stdout.flush()
646 sys.stderr.flush()
646 sys.stderr.flush()
647
647
648 reply_msg = self.session.send(stream, u'apply_reply', reply_content,
648 reply_msg = self.session.send(stream, u'apply_reply', reply_content,
649 parent=parent, ident=ident,buffers=result_buf, metadata=md)
649 parent=parent, ident=ident,buffers=result_buf, metadata=md)
650
650
651 self._publish_status(u'idle', parent)
651 self._publish_status(u'idle', parent)
652
652
653 #---------------------------------------------------------------------------
653 #---------------------------------------------------------------------------
654 # Control messages
654 # Control messages
655 #---------------------------------------------------------------------------
655 #---------------------------------------------------------------------------
656
656
657 def abort_request(self, stream, ident, parent):
657 def abort_request(self, stream, ident, parent):
658 """abort a specifig msg by id"""
658 """abort a specifig msg by id"""
659 msg_ids = parent['content'].get('msg_ids', None)
659 msg_ids = parent['content'].get('msg_ids', None)
660 if isinstance(msg_ids, string_types):
660 if isinstance(msg_ids, string_types):
661 msg_ids = [msg_ids]
661 msg_ids = [msg_ids]
662 if not msg_ids:
662 if not msg_ids:
663 self.abort_queues()
663 self.abort_queues()
664 for mid in msg_ids:
664 for mid in msg_ids:
665 self.aborted.add(str(mid))
665 self.aborted.add(str(mid))
666
666
667 content = dict(status='ok')
667 content = dict(status='ok')
668 reply_msg = self.session.send(stream, 'abort_reply', content=content,
668 reply_msg = self.session.send(stream, 'abort_reply', content=content,
669 parent=parent, ident=ident)
669 parent=parent, ident=ident)
670 self.log.debug("%s", reply_msg)
670 self.log.debug("%s", reply_msg)
671
671
672 def clear_request(self, stream, idents, parent):
672 def clear_request(self, stream, idents, parent):
673 """Clear our namespace."""
673 """Clear our namespace."""
674 self.shell.reset(False)
674 self.shell.reset(False)
675 msg = self.session.send(stream, 'clear_reply', ident=idents, parent=parent,
675 msg = self.session.send(stream, 'clear_reply', ident=idents, parent=parent,
676 content = dict(status='ok'))
676 content = dict(status='ok'))
677
677
678
678
679 #---------------------------------------------------------------------------
679 #---------------------------------------------------------------------------
680 # Protected interface
680 # Protected interface
681 #---------------------------------------------------------------------------
681 #---------------------------------------------------------------------------
682
682
683 def _wrap_exception(self, method=None):
683 def _wrap_exception(self, method=None):
684 # import here, because _wrap_exception is only used in parallel,
684 # import here, because _wrap_exception is only used in parallel,
685 # and parallel has higher min pyzmq version
685 # and parallel has higher min pyzmq version
686 from IPython.parallel.error import wrap_exception
686 from IPython.parallel.error import wrap_exception
687 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method=method)
687 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method=method)
688 content = wrap_exception(e_info)
688 content = wrap_exception(e_info)
689 return content
689 return content
690
690
691 def _topic(self, topic):
691 def _topic(self, topic):
692 """prefixed topic for IOPub messages"""
692 """prefixed topic for IOPub messages"""
693 if self.int_id >= 0:
693 if self.int_id >= 0:
694 base = "engine.%i" % self.int_id
694 base = "engine.%i" % self.int_id
695 else:
695 else:
696 base = "kernel.%s" % self.ident
696 base = "kernel.%s" % self.ident
697
697
698 return py3compat.cast_bytes("%s.%s" % (base, topic))
698 return py3compat.cast_bytes("%s.%s" % (base, topic))
699
699
700 def _abort_queues(self):
700 def _abort_queues(self):
701 for stream in self.shell_streams:
701 for stream in self.shell_streams:
702 if stream:
702 if stream:
703 self._abort_queue(stream)
703 self._abort_queue(stream)
704
704
705 def _abort_queue(self, stream):
705 def _abort_queue(self, stream):
706 poller = zmq.Poller()
706 poller = zmq.Poller()
707 poller.register(stream.socket, zmq.POLLIN)
707 poller.register(stream.socket, zmq.POLLIN)
708 while True:
708 while True:
709 idents,msg = self.session.recv(stream, zmq.NOBLOCK, content=True)
709 idents,msg = self.session.recv(stream, zmq.NOBLOCK, content=True)
710 if msg is None:
710 if msg is None:
711 return
711 return
712
712
713 self.log.info("Aborting:")
713 self.log.info("Aborting:")
714 self.log.info("%s", msg)
714 self.log.info("%s", msg)
715 msg_type = msg['header']['msg_type']
715 msg_type = msg['header']['msg_type']
716 reply_type = msg_type.split('_')[0] + '_reply'
716 reply_type = msg_type.split('_')[0] + '_reply'
717
717
718 status = {'status' : 'aborted'}
718 status = {'status' : 'aborted'}
719 md = {'engine' : self.ident}
719 md = {'engine' : self.ident}
720 md.update(status)
720 md.update(status)
721 reply_msg = self.session.send(stream, reply_type, metadata=md,
721 reply_msg = self.session.send(stream, reply_type, metadata=md,
722 content=status, parent=msg, ident=idents)
722 content=status, parent=msg, ident=idents)
723 self.log.debug("%s", reply_msg)
723 self.log.debug("%s", reply_msg)
724 # We need to wait a bit for requests to come in. This can probably
724 # We need to wait a bit for requests to come in. This can probably
725 # be set shorter for true asynchronous clients.
725 # be set shorter for true asynchronous clients.
726 poller.poll(50)
726 poller.poll(50)
727
727
728
728
729 def _no_raw_input(self):
729 def _no_raw_input(self):
730 """Raise StdinNotImplentedError if active frontend doesn't support
730 """Raise StdinNotImplentedError if active frontend doesn't support
731 stdin."""
731 stdin."""
732 raise StdinNotImplementedError("raw_input was called, but this "
732 raise StdinNotImplementedError("raw_input was called, but this "
733 "frontend does not support stdin.")
733 "frontend does not support stdin.")
734
734
735 def _raw_input(self, prompt, ident, parent):
735 def _raw_input(self, prompt, ident, parent):
736 # Flush output before making the request.
736 # Flush output before making the request.
737 sys.stderr.flush()
737 sys.stderr.flush()
738 sys.stdout.flush()
738 sys.stdout.flush()
739 # flush the stdin socket, to purge stale replies
739 # flush the stdin socket, to purge stale replies
740 while True:
740 while True:
741 try:
741 try:
742 self.stdin_socket.recv_multipart(zmq.NOBLOCK)
742 self.stdin_socket.recv_multipart(zmq.NOBLOCK)
743 except zmq.ZMQError as e:
743 except zmq.ZMQError as e:
744 if e.errno == zmq.EAGAIN:
744 if e.errno == zmq.EAGAIN:
745 break
745 break
746 else:
746 else:
747 raise
747 raise
748
748
749 # Send the input request.
749 # Send the input request.
750 content = json_clean(dict(prompt=prompt))
750 content = json_clean(dict(prompt=prompt))
751 self.session.send(self.stdin_socket, u'input_request', content, parent,
751 self.session.send(self.stdin_socket, u'input_request', content, parent,
752 ident=ident)
752 ident=ident)
753
753
754 # Await a response.
754 # Await a response.
755 while True:
755 while True:
756 try:
756 try:
757 ident, reply = self.session.recv(self.stdin_socket, 0)
757 ident, reply = self.session.recv(self.stdin_socket, 0)
758 except Exception:
758 except Exception:
759 self.log.warn("Invalid Message:", exc_info=True)
759 self.log.warn("Invalid Message:", exc_info=True)
760 except KeyboardInterrupt:
760 except KeyboardInterrupt:
761 # re-raise KeyboardInterrupt, to truncate traceback
761 # re-raise KeyboardInterrupt, to truncate traceback
762 raise KeyboardInterrupt
762 raise KeyboardInterrupt
763 else:
763 else:
764 break
764 break
765 try:
765 try:
766 value = py3compat.unicode_to_str(reply['content']['value'])
766 value = py3compat.unicode_to_str(reply['content']['value'])
767 except:
767 except:
768 self.log.error("Got bad raw_input reply: ")
768 self.log.error("Got bad raw_input reply: ")
769 self.log.error("%s", parent)
769 self.log.error("%s", parent)
770 value = ''
770 value = ''
771 if value == '\x04':
771 if value == '\x04':
772 # EOF
772 # EOF
773 raise EOFError
773 raise EOFError
774 return value
774 return value
775
775
776 def _complete(self, msg):
776 def _complete(self, msg):
777 c = msg['content']
777 c = msg['content']
778 try:
778 try:
779 cpos = int(c['cursor_pos'])
779 cpos = int(c['cursor_pos'])
780 except:
780 except:
781 # If we don't get something that we can convert to an integer, at
781 # If we don't get something that we can convert to an integer, at
782 # least attempt the completion guessing the cursor is at the end of
782 # least attempt the completion guessing the cursor is at the end of
783 # the text, if there's any, and otherwise of the line
783 # the text, if there's any, and otherwise of the line
784 cpos = len(c['text'])
784 cpos = len(c['text'])
785 if cpos==0:
785 if cpos==0:
786 cpos = len(c['line'])
786 cpos = len(c['line'])
787 return self.shell.complete(c['text'], c['line'], cpos)
787 return self.shell.complete(c['text'], c['line'], cpos)
788
788
789 def _at_shutdown(self):
789 def _at_shutdown(self):
790 """Actions taken at shutdown by the kernel, called by python's atexit.
790 """Actions taken at shutdown by the kernel, called by python's atexit.
791 """
791 """
792 # io.rprint("Kernel at_shutdown") # dbg
792 # io.rprint("Kernel at_shutdown") # dbg
793 if self._shutdown_message is not None:
793 if self._shutdown_message is not None:
794 self.session.send(self.iopub_socket, self._shutdown_message, ident=self._topic('shutdown'))
794 self.session.send(self.iopub_socket, self._shutdown_message, ident=self._topic('shutdown'))
795 self.log.debug("%s", self._shutdown_message)
795 self.log.debug("%s", self._shutdown_message)
796 [ s.flush(zmq.POLLOUT) for s in self.shell_streams ]
796 [ s.flush(zmq.POLLOUT) for s in self.shell_streams ]
797
797
@@ -1,415 +1,408 b''
1 """An Application for launching a kernel
1 """An Application for launching a kernel"""
2 """
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 #-----------------------------------------------------------------------------
7 # Imports
8 #-----------------------------------------------------------------------------
9
10 from __future__ import print_function
6 from __future__ import print_function
11
7
12 # Standard library imports
13 import atexit
8 import atexit
14 import os
9 import os
15 import sys
10 import sys
16 import signal
11 import signal
17
12
18 # System library imports
19 import zmq
13 import zmq
20 from zmq.eventloop import ioloop
14 from zmq.eventloop import ioloop
21 from zmq.eventloop.zmqstream import ZMQStream
15 from zmq.eventloop.zmqstream import ZMQStream
22
16
23 # IPython imports
24 from IPython.core.ultratb import FormattedTB
17 from IPython.core.ultratb import FormattedTB
25 from IPython.core.application import (
18 from IPython.core.application import (
26 BaseIPythonApplication, base_flags, base_aliases, catch_config_error
19 BaseIPythonApplication, base_flags, base_aliases, catch_config_error
27 )
20 )
28 from IPython.core.profiledir import ProfileDir
21 from IPython.core.profiledir import ProfileDir
29 from IPython.core.shellapp import (
22 from IPython.core.shellapp import (
30 InteractiveShellApp, shell_flags, shell_aliases
23 InteractiveShellApp, shell_flags, shell_aliases
31 )
24 )
32 from IPython.utils import io
25 from IPython.utils import io
33 from IPython.utils.path import filefind
26 from IPython.utils.path import filefind
34 from IPython.utils.traitlets import (
27 from IPython.utils.traitlets import (
35 Any, Instance, Dict, Unicode, Integer, Bool, DottedObjectName,
28 Any, Instance, Dict, Unicode, Integer, Bool, DottedObjectName,
36 )
29 )
37 from IPython.utils.importstring import import_item
30 from IPython.utils.importstring import import_item
38 from IPython.kernel import write_connection_file
31 from IPython.kernel import write_connection_file
39 from IPython.kernel.connect import ConnectionFileMixin
32 from IPython.kernel.connect import ConnectionFileMixin
40
33
41 # local imports
34 # local imports
42 from .heartbeat import Heartbeat
35 from .heartbeat import Heartbeat
43 from .ipkernel import Kernel
36 from .ipkernel import Kernel
44 from .parentpoller import ParentPollerUnix, ParentPollerWindows
37 from .parentpoller import ParentPollerUnix, ParentPollerWindows
45 from .session import (
38 from .session import (
46 Session, session_flags, session_aliases, default_secure,
39 Session, session_flags, session_aliases, default_secure,
47 )
40 )
48 from .zmqshell import ZMQInteractiveShell
41 from .zmqshell import ZMQInteractiveShell
49
42
50 #-----------------------------------------------------------------------------
43 #-----------------------------------------------------------------------------
51 # Flags and Aliases
44 # Flags and Aliases
52 #-----------------------------------------------------------------------------
45 #-----------------------------------------------------------------------------
53
46
54 kernel_aliases = dict(base_aliases)
47 kernel_aliases = dict(base_aliases)
55 kernel_aliases.update({
48 kernel_aliases.update({
56 'ip' : 'IPKernelApp.ip',
49 'ip' : 'IPKernelApp.ip',
57 'hb' : 'IPKernelApp.hb_port',
50 'hb' : 'IPKernelApp.hb_port',
58 'shell' : 'IPKernelApp.shell_port',
51 'shell' : 'IPKernelApp.shell_port',
59 'iopub' : 'IPKernelApp.iopub_port',
52 'iopub' : 'IPKernelApp.iopub_port',
60 'stdin' : 'IPKernelApp.stdin_port',
53 'stdin' : 'IPKernelApp.stdin_port',
61 'control' : 'IPKernelApp.control_port',
54 'control' : 'IPKernelApp.control_port',
62 'f' : 'IPKernelApp.connection_file',
55 'f' : 'IPKernelApp.connection_file',
63 'parent': 'IPKernelApp.parent_handle',
56 'parent': 'IPKernelApp.parent_handle',
64 'transport': 'IPKernelApp.transport',
57 'transport': 'IPKernelApp.transport',
65 })
58 })
66 if sys.platform.startswith('win'):
59 if sys.platform.startswith('win'):
67 kernel_aliases['interrupt'] = 'IPKernelApp.interrupt'
60 kernel_aliases['interrupt'] = 'IPKernelApp.interrupt'
68
61
69 kernel_flags = dict(base_flags)
62 kernel_flags = dict(base_flags)
70 kernel_flags.update({
63 kernel_flags.update({
71 'no-stdout' : (
64 'no-stdout' : (
72 {'IPKernelApp' : {'no_stdout' : True}},
65 {'IPKernelApp' : {'no_stdout' : True}},
73 "redirect stdout to the null device"),
66 "redirect stdout to the null device"),
74 'no-stderr' : (
67 'no-stderr' : (
75 {'IPKernelApp' : {'no_stderr' : True}},
68 {'IPKernelApp' : {'no_stderr' : True}},
76 "redirect stderr to the null device"),
69 "redirect stderr to the null device"),
77 'pylab' : (
70 'pylab' : (
78 {'IPKernelApp' : {'pylab' : 'auto'}},
71 {'IPKernelApp' : {'pylab' : 'auto'}},
79 """Pre-load matplotlib and numpy for interactive use with
72 """Pre-load matplotlib and numpy for interactive use with
80 the default matplotlib backend."""),
73 the default matplotlib backend."""),
81 })
74 })
82
75
83 # inherit flags&aliases for any IPython shell apps
76 # inherit flags&aliases for any IPython shell apps
84 kernel_aliases.update(shell_aliases)
77 kernel_aliases.update(shell_aliases)
85 kernel_flags.update(shell_flags)
78 kernel_flags.update(shell_flags)
86
79
87 # inherit flags&aliases for Sessions
80 # inherit flags&aliases for Sessions
88 kernel_aliases.update(session_aliases)
81 kernel_aliases.update(session_aliases)
89 kernel_flags.update(session_flags)
82 kernel_flags.update(session_flags)
90
83
91 _ctrl_c_message = """\
84 _ctrl_c_message = """\
92 NOTE: When using the `ipython kernel` entry point, Ctrl-C will not work.
85 NOTE: When using the `ipython kernel` entry point, Ctrl-C will not work.
93
86
94 To exit, you will have to explicitly quit this process, by either sending
87 To exit, you will have to explicitly quit this process, by either sending
95 "quit" from a client, or using Ctrl-\\ in UNIX-like environments.
88 "quit" from a client, or using Ctrl-\\ in UNIX-like environments.
96
89
97 To read more about this, see https://github.com/ipython/ipython/issues/2049
90 To read more about this, see https://github.com/ipython/ipython/issues/2049
98
91
99 """
92 """
100
93
101 #-----------------------------------------------------------------------------
94 #-----------------------------------------------------------------------------
102 # Application class for starting an IPython Kernel
95 # Application class for starting an IPython Kernel
103 #-----------------------------------------------------------------------------
96 #-----------------------------------------------------------------------------
104
97
105 class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,
98 class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,
106 ConnectionFileMixin):
99 ConnectionFileMixin):
107 name='ipkernel'
100 name='ipkernel'
108 aliases = Dict(kernel_aliases)
101 aliases = Dict(kernel_aliases)
109 flags = Dict(kernel_flags)
102 flags = Dict(kernel_flags)
110 classes = [Kernel, ZMQInteractiveShell, ProfileDir, Session]
103 classes = [Kernel, ZMQInteractiveShell, ProfileDir, Session]
111 # the kernel class, as an importstring
104 # the kernel class, as an importstring
112 kernel_class = DottedObjectName('IPython.kernel.zmq.ipkernel.Kernel', config=True,
105 kernel_class = DottedObjectName('IPython.kernel.zmq.ipkernel.Kernel', config=True,
113 help="""The Kernel subclass to be used.
106 help="""The Kernel subclass to be used.
114
107
115 This should allow easy re-use of the IPKernelApp entry point
108 This should allow easy re-use of the IPKernelApp entry point
116 to configure and launch kernels other than IPython's own.
109 to configure and launch kernels other than IPython's own.
117 """)
110 """)
118 kernel = Any()
111 kernel = Any()
119 poller = Any() # don't restrict this even though current pollers are all Threads
112 poller = Any() # don't restrict this even though current pollers are all Threads
120 heartbeat = Instance(Heartbeat)
113 heartbeat = Instance(Heartbeat)
121 session = Instance('IPython.kernel.zmq.session.Session')
114 session = Instance('IPython.kernel.zmq.session.Session')
122 ports = Dict()
115 ports = Dict()
123
116
124 # ipkernel doesn't get its own config file
117 # ipkernel doesn't get its own config file
125 def _config_file_name_default(self):
118 def _config_file_name_default(self):
126 return 'ipython_config.py'
119 return 'ipython_config.py'
127
120
128 # inherit config file name from parent:
121 # inherit config file name from parent:
129 parent_appname = Unicode(config=True)
122 parent_appname = Unicode(config=True)
130 def _parent_appname_changed(self, name, old, new):
123 def _parent_appname_changed(self, name, old, new):
131 if self.config_file_specified:
124 if self.config_file_specified:
132 # it was manually specified, ignore
125 # it was manually specified, ignore
133 return
126 return
134 self.config_file_name = new.replace('-','_') + u'_config.py'
127 self.config_file_name = new.replace('-','_') + u'_config.py'
135 # don't let this count as specifying the config file
128 # don't let this count as specifying the config file
136 self.config_file_specified.remove(self.config_file_name)
129 self.config_file_specified.remove(self.config_file_name)
137
130
138 # connection info:
131 # connection info:
139
132
140 @property
133 @property
141 def abs_connection_file(self):
134 def abs_connection_file(self):
142 if os.path.basename(self.connection_file) == self.connection_file:
135 if os.path.basename(self.connection_file) == self.connection_file:
143 return os.path.join(self.profile_dir.security_dir, self.connection_file)
136 return os.path.join(self.profile_dir.security_dir, self.connection_file)
144 else:
137 else:
145 return self.connection_file
138 return self.connection_file
146
139
147
140
148 # streams, etc.
141 # streams, etc.
149 no_stdout = Bool(False, config=True, help="redirect stdout to the null device")
142 no_stdout = Bool(False, config=True, help="redirect stdout to the null device")
150 no_stderr = Bool(False, config=True, help="redirect stderr to the null device")
143 no_stderr = Bool(False, config=True, help="redirect stderr to the null device")
151 outstream_class = DottedObjectName('IPython.kernel.zmq.iostream.OutStream',
144 outstream_class = DottedObjectName('IPython.kernel.zmq.iostream.OutStream',
152 config=True, help="The importstring for the OutStream factory")
145 config=True, help="The importstring for the OutStream factory")
153 displayhook_class = DottedObjectName('IPython.kernel.zmq.displayhook.ZMQDisplayHook',
146 displayhook_class = DottedObjectName('IPython.kernel.zmq.displayhook.ZMQDisplayHook',
154 config=True, help="The importstring for the DisplayHook factory")
147 config=True, help="The importstring for the DisplayHook factory")
155
148
156 # polling
149 # polling
157 parent_handle = Integer(0, config=True,
150 parent_handle = Integer(0, config=True,
158 help="""kill this process if its parent dies. On Windows, the argument
151 help="""kill this process if its parent dies. On Windows, the argument
159 specifies the HANDLE of the parent process, otherwise it is simply boolean.
152 specifies the HANDLE of the parent process, otherwise it is simply boolean.
160 """)
153 """)
161 interrupt = Integer(0, config=True,
154 interrupt = Integer(0, config=True,
162 help="""ONLY USED ON WINDOWS
155 help="""ONLY USED ON WINDOWS
163 Interrupt this process when the parent is signaled.
156 Interrupt this process when the parent is signaled.
164 """)
157 """)
165
158
166 def init_crash_handler(self):
159 def init_crash_handler(self):
167 # Install minimal exception handling
160 # Install minimal exception handling
168 sys.excepthook = FormattedTB(mode='Verbose', color_scheme='NoColor',
161 sys.excepthook = FormattedTB(mode='Verbose', color_scheme='NoColor',
169 ostream=sys.__stdout__)
162 ostream=sys.__stdout__)
170
163
171 def init_poller(self):
164 def init_poller(self):
172 if sys.platform == 'win32':
165 if sys.platform == 'win32':
173 if self.interrupt or self.parent_handle:
166 if self.interrupt or self.parent_handle:
174 self.poller = ParentPollerWindows(self.interrupt, self.parent_handle)
167 self.poller = ParentPollerWindows(self.interrupt, self.parent_handle)
175 elif self.parent_handle:
168 elif self.parent_handle:
176 self.poller = ParentPollerUnix()
169 self.poller = ParentPollerUnix()
177
170
178 def _bind_socket(self, s, port):
171 def _bind_socket(self, s, port):
179 iface = '%s://%s' % (self.transport, self.ip)
172 iface = '%s://%s' % (self.transport, self.ip)
180 if self.transport == 'tcp':
173 if self.transport == 'tcp':
181 if port <= 0:
174 if port <= 0:
182 port = s.bind_to_random_port(iface)
175 port = s.bind_to_random_port(iface)
183 else:
176 else:
184 s.bind("tcp://%s:%i" % (self.ip, port))
177 s.bind("tcp://%s:%i" % (self.ip, port))
185 elif self.transport == 'ipc':
178 elif self.transport == 'ipc':
186 if port <= 0:
179 if port <= 0:
187 port = 1
180 port = 1
188 path = "%s-%i" % (self.ip, port)
181 path = "%s-%i" % (self.ip, port)
189 while os.path.exists(path):
182 while os.path.exists(path):
190 port = port + 1
183 port = port + 1
191 path = "%s-%i" % (self.ip, port)
184 path = "%s-%i" % (self.ip, port)
192 else:
185 else:
193 path = "%s-%i" % (self.ip, port)
186 path = "%s-%i" % (self.ip, port)
194 s.bind("ipc://%s" % path)
187 s.bind("ipc://%s" % path)
195 return port
188 return port
196
189
197 def write_connection_file(self):
190 def write_connection_file(self):
198 """write connection info to JSON file"""
191 """write connection info to JSON file"""
199 cf = self.abs_connection_file
192 cf = self.abs_connection_file
200 self.log.debug("Writing connection file: %s", cf)
193 self.log.debug("Writing connection file: %s", cf)
201 write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport,
194 write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport,
202 shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port,
195 shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port,
203 iopub_port=self.iopub_port, control_port=self.control_port)
196 iopub_port=self.iopub_port, control_port=self.control_port)
204
197
205 def cleanup_connection_file(self):
198 def cleanup_connection_file(self):
206 cf = self.abs_connection_file
199 cf = self.abs_connection_file
207 self.log.debug("Cleaning up connection file: %s", cf)
200 self.log.debug("Cleaning up connection file: %s", cf)
208 try:
201 try:
209 os.remove(cf)
202 os.remove(cf)
210 except (IOError, OSError):
203 except (IOError, OSError):
211 pass
204 pass
212
205
213 self.cleanup_ipc_files()
206 self.cleanup_ipc_files()
214
207
215 def init_connection_file(self):
208 def init_connection_file(self):
216 if not self.connection_file:
209 if not self.connection_file:
217 self.connection_file = "kernel-%s.json"%os.getpid()
210 self.connection_file = "kernel-%s.json"%os.getpid()
218 try:
211 try:
219 self.connection_file = filefind(self.connection_file, ['.', self.profile_dir.security_dir])
212 self.connection_file = filefind(self.connection_file, ['.', self.profile_dir.security_dir])
220 except IOError:
213 except IOError:
221 self.log.debug("Connection file not found: %s", self.connection_file)
214 self.log.debug("Connection file not found: %s", self.connection_file)
222 # This means I own it, so I will clean it up:
215 # This means I own it, so I will clean it up:
223 atexit.register(self.cleanup_connection_file)
216 atexit.register(self.cleanup_connection_file)
224 return
217 return
225 try:
218 try:
226 self.load_connection_file()
219 self.load_connection_file()
227 except Exception:
220 except Exception:
228 self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
221 self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
229 self.exit(1)
222 self.exit(1)
230
223
231 def init_sockets(self):
224 def init_sockets(self):
232 # Create a context, a session, and the kernel sockets.
225 # Create a context, a session, and the kernel sockets.
233 self.log.info("Starting the kernel at pid: %i", os.getpid())
226 self.log.info("Starting the kernel at pid: %i", os.getpid())
234 context = zmq.Context.instance()
227 context = zmq.Context.instance()
235 # Uncomment this to try closing the context.
228 # Uncomment this to try closing the context.
236 # atexit.register(context.term)
229 # atexit.register(context.term)
237
230
238 self.shell_socket = context.socket(zmq.ROUTER)
231 self.shell_socket = context.socket(zmq.ROUTER)
239 self.shell_socket.linger = 1000
232 self.shell_socket.linger = 1000
240 self.shell_port = self._bind_socket(self.shell_socket, self.shell_port)
233 self.shell_port = self._bind_socket(self.shell_socket, self.shell_port)
241 self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port)
234 self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port)
242
235
243 self.iopub_socket = context.socket(zmq.PUB)
236 self.iopub_socket = context.socket(zmq.PUB)
244 self.iopub_socket.linger = 1000
237 self.iopub_socket.linger = 1000
245 self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port)
238 self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port)
246 self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port)
239 self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port)
247
240
248 self.stdin_socket = context.socket(zmq.ROUTER)
241 self.stdin_socket = context.socket(zmq.ROUTER)
249 self.stdin_socket.linger = 1000
242 self.stdin_socket.linger = 1000
250 self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port)
243 self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port)
251 self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port)
244 self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port)
252
245
253 self.control_socket = context.socket(zmq.ROUTER)
246 self.control_socket = context.socket(zmq.ROUTER)
254 self.control_socket.linger = 1000
247 self.control_socket.linger = 1000
255 self.control_port = self._bind_socket(self.control_socket, self.control_port)
248 self.control_port = self._bind_socket(self.control_socket, self.control_port)
256 self.log.debug("control ROUTER Channel on port: %i" % self.control_port)
249 self.log.debug("control ROUTER Channel on port: %i" % self.control_port)
257
250
258 def init_heartbeat(self):
251 def init_heartbeat(self):
259 """start the heart beating"""
252 """start the heart beating"""
260 # heartbeat doesn't share context, because it mustn't be blocked
253 # heartbeat doesn't share context, because it mustn't be blocked
261 # by the GIL, which is accessed by libzmq when freeing zero-copy messages
254 # by the GIL, which is accessed by libzmq when freeing zero-copy messages
262 hb_ctx = zmq.Context()
255 hb_ctx = zmq.Context()
263 self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port))
256 self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port))
264 self.hb_port = self.heartbeat.port
257 self.hb_port = self.heartbeat.port
265 self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port)
258 self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port)
266 self.heartbeat.start()
259 self.heartbeat.start()
267
260
268 def log_connection_info(self):
261 def log_connection_info(self):
269 """display connection info, and store ports"""
262 """display connection info, and store ports"""
270 basename = os.path.basename(self.connection_file)
263 basename = os.path.basename(self.connection_file)
271 if basename == self.connection_file or \
264 if basename == self.connection_file or \
272 os.path.dirname(self.connection_file) == self.profile_dir.security_dir:
265 os.path.dirname(self.connection_file) == self.profile_dir.security_dir:
273 # use shortname
266 # use shortname
274 tail = basename
267 tail = basename
275 if self.profile != 'default':
268 if self.profile != 'default':
276 tail += " --profile %s" % self.profile
269 tail += " --profile %s" % self.profile
277 else:
270 else:
278 tail = self.connection_file
271 tail = self.connection_file
279 lines = [
272 lines = [
280 "To connect another client to this kernel, use:",
273 "To connect another client to this kernel, use:",
281 " --existing %s" % tail,
274 " --existing %s" % tail,
282 ]
275 ]
283 # log connection info
276 # log connection info
284 # info-level, so often not shown.
277 # info-level, so often not shown.
285 # frontends should use the %connect_info magic
278 # frontends should use the %connect_info magic
286 # to see the connection info
279 # to see the connection info
287 for line in lines:
280 for line in lines:
288 self.log.info(line)
281 self.log.info(line)
289 # also raw print to the terminal if no parent_handle (`ipython kernel`)
282 # also raw print to the terminal if no parent_handle (`ipython kernel`)
290 if not self.parent_handle:
283 if not self.parent_handle:
291 io.rprint(_ctrl_c_message)
284 io.rprint(_ctrl_c_message)
292 for line in lines:
285 for line in lines:
293 io.rprint(line)
286 io.rprint(line)
294
287
295 self.ports = dict(shell=self.shell_port, iopub=self.iopub_port,
288 self.ports = dict(shell=self.shell_port, iopub=self.iopub_port,
296 stdin=self.stdin_port, hb=self.hb_port,
289 stdin=self.stdin_port, hb=self.hb_port,
297 control=self.control_port)
290 control=self.control_port)
298
291
299 def init_session(self):
292 def init_session(self):
300 """create our session object"""
293 """create our session object"""
301 default_secure(self.config)
294 default_secure(self.config)
302 self.session = Session(parent=self, username=u'kernel')
295 self.session = Session(parent=self, username=u'kernel')
303
296
304 def init_blackhole(self):
297 def init_blackhole(self):
305 """redirects stdout/stderr to devnull if necessary"""
298 """redirects stdout/stderr to devnull if necessary"""
306 if self.no_stdout or self.no_stderr:
299 if self.no_stdout or self.no_stderr:
307 blackhole = open(os.devnull, 'w')
300 blackhole = open(os.devnull, 'w')
308 if self.no_stdout:
301 if self.no_stdout:
309 sys.stdout = sys.__stdout__ = blackhole
302 sys.stdout = sys.__stdout__ = blackhole
310 if self.no_stderr:
303 if self.no_stderr:
311 sys.stderr = sys.__stderr__ = blackhole
304 sys.stderr = sys.__stderr__ = blackhole
312
305
313 def init_io(self):
306 def init_io(self):
314 """Redirect input streams and set a display hook."""
307 """Redirect input streams and set a display hook."""
315 if self.outstream_class:
308 if self.outstream_class:
316 outstream_factory = import_item(str(self.outstream_class))
309 outstream_factory = import_item(str(self.outstream_class))
317 sys.stdout = outstream_factory(self.session, self.iopub_socket, u'stdout')
310 sys.stdout = outstream_factory(self.session, self.iopub_socket, u'stdout')
318 sys.stderr = outstream_factory(self.session, self.iopub_socket, u'stderr')
311 sys.stderr = outstream_factory(self.session, self.iopub_socket, u'stderr')
319 if self.displayhook_class:
312 if self.displayhook_class:
320 displayhook_factory = import_item(str(self.displayhook_class))
313 displayhook_factory = import_item(str(self.displayhook_class))
321 sys.displayhook = displayhook_factory(self.session, self.iopub_socket)
314 sys.displayhook = displayhook_factory(self.session, self.iopub_socket)
322
315
323 def init_signal(self):
316 def init_signal(self):
324 signal.signal(signal.SIGINT, signal.SIG_IGN)
317 signal.signal(signal.SIGINT, signal.SIG_IGN)
325
318
326 def init_kernel(self):
319 def init_kernel(self):
327 """Create the Kernel object itself"""
320 """Create the Kernel object itself"""
328 shell_stream = ZMQStream(self.shell_socket)
321 shell_stream = ZMQStream(self.shell_socket)
329 control_stream = ZMQStream(self.control_socket)
322 control_stream = ZMQStream(self.control_socket)
330
323
331 kernel_factory = import_item(str(self.kernel_class))
324 kernel_factory = import_item(str(self.kernel_class))
332
325
333 kernel = kernel_factory(parent=self, session=self.session,
326 kernel = kernel_factory(parent=self, session=self.session,
334 shell_streams=[shell_stream, control_stream],
327 shell_streams=[shell_stream, control_stream],
335 iopub_socket=self.iopub_socket,
328 iopub_socket=self.iopub_socket,
336 stdin_socket=self.stdin_socket,
329 stdin_socket=self.stdin_socket,
337 log=self.log,
330 log=self.log,
338 profile_dir=self.profile_dir,
331 profile_dir=self.profile_dir,
339 user_ns=self.user_ns,
332 user_ns=self.user_ns,
340 )
333 )
341 kernel.record_ports(self.ports)
334 kernel.record_ports(self.ports)
342 self.kernel = kernel
335 self.kernel = kernel
343
336
344 def init_gui_pylab(self):
337 def init_gui_pylab(self):
345 """Enable GUI event loop integration, taking pylab into account."""
338 """Enable GUI event loop integration, taking pylab into account."""
346
339
347 # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab`
340 # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab`
348 # to ensure that any exception is printed straight to stderr.
341 # to ensure that any exception is printed straight to stderr.
349 # Normally _showtraceback associates the reply with an execution,
342 # Normally _showtraceback associates the reply with an execution,
350 # which means frontends will never draw it, as this exception
343 # which means frontends will never draw it, as this exception
351 # is not associated with any execute request.
344 # is not associated with any execute request.
352
345
353 shell = self.shell
346 shell = self.shell
354 _showtraceback = shell._showtraceback
347 _showtraceback = shell._showtraceback
355 try:
348 try:
356 # replace pyerr-sending traceback with stderr
349 # replace error-sending traceback with stderr
357 def print_tb(etype, evalue, stb):
350 def print_tb(etype, evalue, stb):
358 print ("GUI event loop or pylab initialization failed",
351 print ("GUI event loop or pylab initialization failed",
359 file=io.stderr)
352 file=io.stderr)
360 print (shell.InteractiveTB.stb2text(stb), file=io.stderr)
353 print (shell.InteractiveTB.stb2text(stb), file=io.stderr)
361 shell._showtraceback = print_tb
354 shell._showtraceback = print_tb
362 InteractiveShellApp.init_gui_pylab(self)
355 InteractiveShellApp.init_gui_pylab(self)
363 finally:
356 finally:
364 shell._showtraceback = _showtraceback
357 shell._showtraceback = _showtraceback
365
358
366 def init_shell(self):
359 def init_shell(self):
367 self.shell = self.kernel.shell
360 self.shell = self.kernel.shell
368 self.shell.configurables.append(self)
361 self.shell.configurables.append(self)
369
362
370 @catch_config_error
363 @catch_config_error
371 def initialize(self, argv=None):
364 def initialize(self, argv=None):
372 super(IPKernelApp, self).initialize(argv)
365 super(IPKernelApp, self).initialize(argv)
373 self.init_blackhole()
366 self.init_blackhole()
374 self.init_connection_file()
367 self.init_connection_file()
375 self.init_session()
368 self.init_session()
376 self.init_poller()
369 self.init_poller()
377 self.init_sockets()
370 self.init_sockets()
378 self.init_heartbeat()
371 self.init_heartbeat()
379 # writing/displaying connection info must be *after* init_sockets/heartbeat
372 # writing/displaying connection info must be *after* init_sockets/heartbeat
380 self.log_connection_info()
373 self.log_connection_info()
381 self.write_connection_file()
374 self.write_connection_file()
382 self.init_io()
375 self.init_io()
383 self.init_signal()
376 self.init_signal()
384 self.init_kernel()
377 self.init_kernel()
385 # shell init steps
378 # shell init steps
386 self.init_path()
379 self.init_path()
387 self.init_shell()
380 self.init_shell()
388 self.init_gui_pylab()
381 self.init_gui_pylab()
389 self.init_extensions()
382 self.init_extensions()
390 self.init_code()
383 self.init_code()
391 # flush stdout/stderr, so that anything written to these streams during
384 # flush stdout/stderr, so that anything written to these streams during
392 # initialization do not get associated with the first execution request
385 # initialization do not get associated with the first execution request
393 sys.stdout.flush()
386 sys.stdout.flush()
394 sys.stderr.flush()
387 sys.stderr.flush()
395
388
396 def start(self):
389 def start(self):
397 if self.poller is not None:
390 if self.poller is not None:
398 self.poller.start()
391 self.poller.start()
399 self.kernel.start()
392 self.kernel.start()
400 try:
393 try:
401 ioloop.IOLoop.instance().start()
394 ioloop.IOLoop.instance().start()
402 except KeyboardInterrupt:
395 except KeyboardInterrupt:
403 pass
396 pass
404
397
405 launch_new_instance = IPKernelApp.launch_instance
398 launch_new_instance = IPKernelApp.launch_instance
406
399
407 def main():
400 def main():
408 """Run an IPKernel as an application"""
401 """Run an IPKernel as an application"""
409 app = IPKernelApp.instance()
402 app = IPKernelApp.instance()
410 app.initialize()
403 app.initialize()
411 app.start()
404 app.start()
412
405
413
406
414 if __name__ == '__main__':
407 if __name__ == '__main__':
415 main()
408 main()
@@ -1,569 +1,569 b''
1 """A ZMQ-based subclass of InteractiveShell.
1 """A ZMQ-based subclass of InteractiveShell.
2
2
3 This code is meant to ease the refactoring of the base InteractiveShell into
3 This code is meant to ease the refactoring of the base InteractiveShell into
4 something with a cleaner architecture for 2-process use, without actually
4 something with a cleaner architecture for 2-process use, without actually
5 breaking InteractiveShell itself. So we're doing something a bit ugly, where
5 breaking InteractiveShell itself. So we're doing something a bit ugly, where
6 we subclass and override what we want to fix. Once this is working well, we
6 we subclass and override what we want to fix. Once this is working well, we
7 can go back to the base class and refactor the code for a cleaner inheritance
7 can go back to the base class and refactor the code for a cleaner inheritance
8 implementation that doesn't rely on so much monkeypatching.
8 implementation that doesn't rely on so much monkeypatching.
9
9
10 But this lets us maintain a fully working IPython as we develop the new
10 But this lets us maintain a fully working IPython as we develop the new
11 machinery. This should thus be thought of as scaffolding.
11 machinery. This should thus be thought of as scaffolding.
12 """
12 """
13
13
14 # Copyright (c) IPython Development Team.
14 # Copyright (c) IPython Development Team.
15 # Distributed under the terms of the Modified BSD License.
15 # Distributed under the terms of the Modified BSD License.
16
16
17 from __future__ import print_function
17 from __future__ import print_function
18
18
19 import os
19 import os
20 import sys
20 import sys
21 import time
21 import time
22
22
23 from zmq.eventloop import ioloop
23 from zmq.eventloop import ioloop
24
24
25 from IPython.core.interactiveshell import (
25 from IPython.core.interactiveshell import (
26 InteractiveShell, InteractiveShellABC
26 InteractiveShell, InteractiveShellABC
27 )
27 )
28 from IPython.core import page
28 from IPython.core import page
29 from IPython.core.autocall import ZMQExitAutocall
29 from IPython.core.autocall import ZMQExitAutocall
30 from IPython.core.displaypub import DisplayPublisher
30 from IPython.core.displaypub import DisplayPublisher
31 from IPython.core.error import UsageError
31 from IPython.core.error import UsageError
32 from IPython.core.magics import MacroToEdit, CodeMagics
32 from IPython.core.magics import MacroToEdit, CodeMagics
33 from IPython.core.magic import magics_class, line_magic, Magics
33 from IPython.core.magic import magics_class, line_magic, Magics
34 from IPython.core.payloadpage import install_payload_page
34 from IPython.core.payloadpage import install_payload_page
35 from IPython.display import display, Javascript
35 from IPython.display import display, Javascript
36 from IPython.kernel.inprocess.socket import SocketABC
36 from IPython.kernel.inprocess.socket import SocketABC
37 from IPython.kernel import (
37 from IPython.kernel import (
38 get_connection_file, get_connection_info, connect_qtconsole
38 get_connection_file, get_connection_info, connect_qtconsole
39 )
39 )
40 from IPython.testing.skipdoctest import skip_doctest
40 from IPython.testing.skipdoctest import skip_doctest
41 from IPython.utils import openpy
41 from IPython.utils import openpy
42 from IPython.utils.jsonutil import json_clean, encode_images
42 from IPython.utils.jsonutil import json_clean, encode_images
43 from IPython.utils.process import arg_split
43 from IPython.utils.process import arg_split
44 from IPython.utils import py3compat
44 from IPython.utils import py3compat
45 from IPython.utils.py3compat import unicode_type
45 from IPython.utils.py3compat import unicode_type
46 from IPython.utils.traitlets import Instance, Type, Dict, CBool, CBytes, Any
46 from IPython.utils.traitlets import Instance, Type, Dict, CBool, CBytes, Any
47 from IPython.utils.warn import error
47 from IPython.utils.warn import error
48 from IPython.kernel.zmq.displayhook import ZMQShellDisplayHook
48 from IPython.kernel.zmq.displayhook import ZMQShellDisplayHook
49 from IPython.kernel.zmq.datapub import ZMQDataPublisher
49 from IPython.kernel.zmq.datapub import ZMQDataPublisher
50 from IPython.kernel.zmq.session import extract_header
50 from IPython.kernel.zmq.session import extract_header
51 from IPython.kernel.comm import CommManager
51 from IPython.kernel.comm import CommManager
52 from .session import Session
52 from .session import Session
53
53
54 #-----------------------------------------------------------------------------
54 #-----------------------------------------------------------------------------
55 # Functions and classes
55 # Functions and classes
56 #-----------------------------------------------------------------------------
56 #-----------------------------------------------------------------------------
57
57
58 class ZMQDisplayPublisher(DisplayPublisher):
58 class ZMQDisplayPublisher(DisplayPublisher):
59 """A display publisher that publishes data using a ZeroMQ PUB socket."""
59 """A display publisher that publishes data using a ZeroMQ PUB socket."""
60
60
61 session = Instance(Session)
61 session = Instance(Session)
62 pub_socket = Instance(SocketABC)
62 pub_socket = Instance(SocketABC)
63 parent_header = Dict({})
63 parent_header = Dict({})
64 topic = CBytes(b'display_data')
64 topic = CBytes(b'display_data')
65
65
66 def set_parent(self, parent):
66 def set_parent(self, parent):
67 """Set the parent for outbound messages."""
67 """Set the parent for outbound messages."""
68 self.parent_header = extract_header(parent)
68 self.parent_header = extract_header(parent)
69
69
70 def _flush_streams(self):
70 def _flush_streams(self):
71 """flush IO Streams prior to display"""
71 """flush IO Streams prior to display"""
72 sys.stdout.flush()
72 sys.stdout.flush()
73 sys.stderr.flush()
73 sys.stderr.flush()
74
74
75 def publish(self, source, data, metadata=None):
75 def publish(self, source, data, metadata=None):
76 self._flush_streams()
76 self._flush_streams()
77 if metadata is None:
77 if metadata is None:
78 metadata = {}
78 metadata = {}
79 self._validate_data(source, data, metadata)
79 self._validate_data(source, data, metadata)
80 content = {}
80 content = {}
81 content['source'] = source
81 content['source'] = source
82 content['data'] = encode_images(data)
82 content['data'] = encode_images(data)
83 content['metadata'] = metadata
83 content['metadata'] = metadata
84 self.session.send(
84 self.session.send(
85 self.pub_socket, u'display_data', json_clean(content),
85 self.pub_socket, u'display_data', json_clean(content),
86 parent=self.parent_header, ident=self.topic,
86 parent=self.parent_header, ident=self.topic,
87 )
87 )
88
88
89 def clear_output(self, wait=False):
89 def clear_output(self, wait=False):
90 content = dict(wait=wait)
90 content = dict(wait=wait)
91 self._flush_streams()
91 self._flush_streams()
92 self.session.send(
92 self.session.send(
93 self.pub_socket, u'clear_output', content,
93 self.pub_socket, u'clear_output', content,
94 parent=self.parent_header, ident=self.topic,
94 parent=self.parent_header, ident=self.topic,
95 )
95 )
96
96
97 @magics_class
97 @magics_class
98 class KernelMagics(Magics):
98 class KernelMagics(Magics):
99 #------------------------------------------------------------------------
99 #------------------------------------------------------------------------
100 # Magic overrides
100 # Magic overrides
101 #------------------------------------------------------------------------
101 #------------------------------------------------------------------------
102 # Once the base class stops inheriting from magic, this code needs to be
102 # Once the base class stops inheriting from magic, this code needs to be
103 # moved into a separate machinery as well. For now, at least isolate here
103 # moved into a separate machinery as well. For now, at least isolate here
104 # the magics which this class needs to implement differently from the base
104 # the magics which this class needs to implement differently from the base
105 # class, or that are unique to it.
105 # class, or that are unique to it.
106
106
107 @line_magic
107 @line_magic
108 def doctest_mode(self, parameter_s=''):
108 def doctest_mode(self, parameter_s=''):
109 """Toggle doctest mode on and off.
109 """Toggle doctest mode on and off.
110
110
111 This mode is intended to make IPython behave as much as possible like a
111 This mode is intended to make IPython behave as much as possible like a
112 plain Python shell, from the perspective of how its prompts, exceptions
112 plain Python shell, from the perspective of how its prompts, exceptions
113 and output look. This makes it easy to copy and paste parts of a
113 and output look. This makes it easy to copy and paste parts of a
114 session into doctests. It does so by:
114 session into doctests. It does so by:
115
115
116 - Changing the prompts to the classic ``>>>`` ones.
116 - Changing the prompts to the classic ``>>>`` ones.
117 - Changing the exception reporting mode to 'Plain'.
117 - Changing the exception reporting mode to 'Plain'.
118 - Disabling pretty-printing of output.
118 - Disabling pretty-printing of output.
119
119
120 Note that IPython also supports the pasting of code snippets that have
120 Note that IPython also supports the pasting of code snippets that have
121 leading '>>>' and '...' prompts in them. This means that you can paste
121 leading '>>>' and '...' prompts in them. This means that you can paste
122 doctests from files or docstrings (even if they have leading
122 doctests from files or docstrings (even if they have leading
123 whitespace), and the code will execute correctly. You can then use
123 whitespace), and the code will execute correctly. You can then use
124 '%history -t' to see the translated history; this will give you the
124 '%history -t' to see the translated history; this will give you the
125 input after removal of all the leading prompts and whitespace, which
125 input after removal of all the leading prompts and whitespace, which
126 can be pasted back into an editor.
126 can be pasted back into an editor.
127
127
128 With these features, you can switch into this mode easily whenever you
128 With these features, you can switch into this mode easily whenever you
129 need to do testing and changes to doctests, without having to leave
129 need to do testing and changes to doctests, without having to leave
130 your existing IPython session.
130 your existing IPython session.
131 """
131 """
132
132
133 from IPython.utils.ipstruct import Struct
133 from IPython.utils.ipstruct import Struct
134
134
135 # Shorthands
135 # Shorthands
136 shell = self.shell
136 shell = self.shell
137 disp_formatter = self.shell.display_formatter
137 disp_formatter = self.shell.display_formatter
138 ptformatter = disp_formatter.formatters['text/plain']
138 ptformatter = disp_formatter.formatters['text/plain']
139 # dstore is a data store kept in the instance metadata bag to track any
139 # dstore is a data store kept in the instance metadata bag to track any
140 # changes we make, so we can undo them later.
140 # changes we make, so we can undo them later.
141 dstore = shell.meta.setdefault('doctest_mode', Struct())
141 dstore = shell.meta.setdefault('doctest_mode', Struct())
142 save_dstore = dstore.setdefault
142 save_dstore = dstore.setdefault
143
143
144 # save a few values we'll need to recover later
144 # save a few values we'll need to recover later
145 mode = save_dstore('mode', False)
145 mode = save_dstore('mode', False)
146 save_dstore('rc_pprint', ptformatter.pprint)
146 save_dstore('rc_pprint', ptformatter.pprint)
147 save_dstore('rc_active_types',disp_formatter.active_types)
147 save_dstore('rc_active_types',disp_formatter.active_types)
148 save_dstore('xmode', shell.InteractiveTB.mode)
148 save_dstore('xmode', shell.InteractiveTB.mode)
149
149
150 if mode == False:
150 if mode == False:
151 # turn on
151 # turn on
152 ptformatter.pprint = False
152 ptformatter.pprint = False
153 disp_formatter.active_types = ['text/plain']
153 disp_formatter.active_types = ['text/plain']
154 shell.magic('xmode Plain')
154 shell.magic('xmode Plain')
155 else:
155 else:
156 # turn off
156 # turn off
157 ptformatter.pprint = dstore.rc_pprint
157 ptformatter.pprint = dstore.rc_pprint
158 disp_formatter.active_types = dstore.rc_active_types
158 disp_formatter.active_types = dstore.rc_active_types
159 shell.magic("xmode " + dstore.xmode)
159 shell.magic("xmode " + dstore.xmode)
160
160
161 # Store new mode and inform on console
161 # Store new mode and inform on console
162 dstore.mode = bool(1-int(mode))
162 dstore.mode = bool(1-int(mode))
163 mode_label = ['OFF','ON'][dstore.mode]
163 mode_label = ['OFF','ON'][dstore.mode]
164 print('Doctest mode is:', mode_label)
164 print('Doctest mode is:', mode_label)
165
165
166 # Send the payload back so that clients can modify their prompt display
166 # Send the payload back so that clients can modify their prompt display
167 payload = dict(
167 payload = dict(
168 source='doctest_mode',
168 source='doctest_mode',
169 mode=dstore.mode)
169 mode=dstore.mode)
170 shell.payload_manager.write_payload(payload)
170 shell.payload_manager.write_payload(payload)
171
171
172
172
173 _find_edit_target = CodeMagics._find_edit_target
173 _find_edit_target = CodeMagics._find_edit_target
174
174
175 @skip_doctest
175 @skip_doctest
176 @line_magic
176 @line_magic
177 def edit(self, parameter_s='', last_call=['','']):
177 def edit(self, parameter_s='', last_call=['','']):
178 """Bring up an editor and execute the resulting code.
178 """Bring up an editor and execute the resulting code.
179
179
180 Usage:
180 Usage:
181 %edit [options] [args]
181 %edit [options] [args]
182
182
183 %edit runs an external text editor. You will need to set the command for
183 %edit runs an external text editor. You will need to set the command for
184 this editor via the ``TerminalInteractiveShell.editor`` option in your
184 this editor via the ``TerminalInteractiveShell.editor`` option in your
185 configuration file before it will work.
185 configuration file before it will work.
186
186
187 This command allows you to conveniently edit multi-line code right in
187 This command allows you to conveniently edit multi-line code right in
188 your IPython session.
188 your IPython session.
189
189
190 If called without arguments, %edit opens up an empty editor with a
190 If called without arguments, %edit opens up an empty editor with a
191 temporary file and will execute the contents of this file when you
191 temporary file and will execute the contents of this file when you
192 close it (don't forget to save it!).
192 close it (don't forget to save it!).
193
193
194 Options:
194 Options:
195
195
196 -n <number>
196 -n <number>
197 Open the editor at a specified line number. By default, the IPython
197 Open the editor at a specified line number. By default, the IPython
198 editor hook uses the unix syntax 'editor +N filename', but you can
198 editor hook uses the unix syntax 'editor +N filename', but you can
199 configure this by providing your own modified hook if your favorite
199 configure this by providing your own modified hook if your favorite
200 editor supports line-number specifications with a different syntax.
200 editor supports line-number specifications with a different syntax.
201
201
202 -p
202 -p
203 Call the editor with the same data as the previous time it was used,
203 Call the editor with the same data as the previous time it was used,
204 regardless of how long ago (in your current session) it was.
204 regardless of how long ago (in your current session) it was.
205
205
206 -r
206 -r
207 Use 'raw' input. This option only applies to input taken from the
207 Use 'raw' input. This option only applies to input taken from the
208 user's history. By default, the 'processed' history is used, so that
208 user's history. By default, the 'processed' history is used, so that
209 magics are loaded in their transformed version to valid Python. If
209 magics are loaded in their transformed version to valid Python. If
210 this option is given, the raw input as typed as the command line is
210 this option is given, the raw input as typed as the command line is
211 used instead. When you exit the editor, it will be executed by
211 used instead. When you exit the editor, it will be executed by
212 IPython's own processor.
212 IPython's own processor.
213
213
214 Arguments:
214 Arguments:
215
215
216 If arguments are given, the following possibilites exist:
216 If arguments are given, the following possibilites exist:
217
217
218 - The arguments are numbers or pairs of colon-separated numbers (like
218 - The arguments are numbers or pairs of colon-separated numbers (like
219 1 4:8 9). These are interpreted as lines of previous input to be
219 1 4:8 9). These are interpreted as lines of previous input to be
220 loaded into the editor. The syntax is the same of the %macro command.
220 loaded into the editor. The syntax is the same of the %macro command.
221
221
222 - If the argument doesn't start with a number, it is evaluated as a
222 - If the argument doesn't start with a number, it is evaluated as a
223 variable and its contents loaded into the editor. You can thus edit
223 variable and its contents loaded into the editor. You can thus edit
224 any string which contains python code (including the result of
224 any string which contains python code (including the result of
225 previous edits).
225 previous edits).
226
226
227 - If the argument is the name of an object (other than a string),
227 - If the argument is the name of an object (other than a string),
228 IPython will try to locate the file where it was defined and open the
228 IPython will try to locate the file where it was defined and open the
229 editor at the point where it is defined. You can use ``%edit function``
229 editor at the point where it is defined. You can use ``%edit function``
230 to load an editor exactly at the point where 'function' is defined,
230 to load an editor exactly at the point where 'function' is defined,
231 edit it and have the file be executed automatically.
231 edit it and have the file be executed automatically.
232
232
233 If the object is a macro (see %macro for details), this opens up your
233 If the object is a macro (see %macro for details), this opens up your
234 specified editor with a temporary file containing the macro's data.
234 specified editor with a temporary file containing the macro's data.
235 Upon exit, the macro is reloaded with the contents of the file.
235 Upon exit, the macro is reloaded with the contents of the file.
236
236
237 Note: opening at an exact line is only supported under Unix, and some
237 Note: opening at an exact line is only supported under Unix, and some
238 editors (like kedit and gedit up to Gnome 2.8) do not understand the
238 editors (like kedit and gedit up to Gnome 2.8) do not understand the
239 '+NUMBER' parameter necessary for this feature. Good editors like
239 '+NUMBER' parameter necessary for this feature. Good editors like
240 (X)Emacs, vi, jed, pico and joe all do.
240 (X)Emacs, vi, jed, pico and joe all do.
241
241
242 - If the argument is not found as a variable, IPython will look for a
242 - If the argument is not found as a variable, IPython will look for a
243 file with that name (adding .py if necessary) and load it into the
243 file with that name (adding .py if necessary) and load it into the
244 editor. It will execute its contents with execfile() when you exit,
244 editor. It will execute its contents with execfile() when you exit,
245 loading any code in the file into your interactive namespace.
245 loading any code in the file into your interactive namespace.
246
246
247 Unlike in the terminal, this is designed to use a GUI editor, and we do
247 Unlike in the terminal, this is designed to use a GUI editor, and we do
248 not know when it has closed. So the file you edit will not be
248 not know when it has closed. So the file you edit will not be
249 automatically executed or printed.
249 automatically executed or printed.
250
250
251 Note that %edit is also available through the alias %ed.
251 Note that %edit is also available through the alias %ed.
252 """
252 """
253
253
254 opts,args = self.parse_options(parameter_s,'prn:')
254 opts,args = self.parse_options(parameter_s,'prn:')
255
255
256 try:
256 try:
257 filename, lineno, _ = CodeMagics._find_edit_target(self.shell, args, opts, last_call)
257 filename, lineno, _ = CodeMagics._find_edit_target(self.shell, args, opts, last_call)
258 except MacroToEdit as e:
258 except MacroToEdit as e:
259 # TODO: Implement macro editing over 2 processes.
259 # TODO: Implement macro editing over 2 processes.
260 print("Macro editing not yet implemented in 2-process model.")
260 print("Macro editing not yet implemented in 2-process model.")
261 return
261 return
262
262
263 # Make sure we send to the client an absolute path, in case the working
263 # Make sure we send to the client an absolute path, in case the working
264 # directory of client and kernel don't match
264 # directory of client and kernel don't match
265 filename = os.path.abspath(filename)
265 filename = os.path.abspath(filename)
266
266
267 payload = {
267 payload = {
268 'source' : 'edit_magic',
268 'source' : 'edit_magic',
269 'filename' : filename,
269 'filename' : filename,
270 'line_number' : lineno
270 'line_number' : lineno
271 }
271 }
272 self.shell.payload_manager.write_payload(payload)
272 self.shell.payload_manager.write_payload(payload)
273
273
274 # A few magics that are adapted to the specifics of using pexpect and a
274 # A few magics that are adapted to the specifics of using pexpect and a
275 # remote terminal
275 # remote terminal
276
276
277 @line_magic
277 @line_magic
278 def clear(self, arg_s):
278 def clear(self, arg_s):
279 """Clear the terminal."""
279 """Clear the terminal."""
280 if os.name == 'posix':
280 if os.name == 'posix':
281 self.shell.system("clear")
281 self.shell.system("clear")
282 else:
282 else:
283 self.shell.system("cls")
283 self.shell.system("cls")
284
284
285 if os.name == 'nt':
285 if os.name == 'nt':
286 # This is the usual name in windows
286 # This is the usual name in windows
287 cls = line_magic('cls')(clear)
287 cls = line_magic('cls')(clear)
288
288
289 # Terminal pagers won't work over pexpect, but we do have our own pager
289 # Terminal pagers won't work over pexpect, but we do have our own pager
290
290
291 @line_magic
291 @line_magic
292 def less(self, arg_s):
292 def less(self, arg_s):
293 """Show a file through the pager.
293 """Show a file through the pager.
294
294
295 Files ending in .py are syntax-highlighted."""
295 Files ending in .py are syntax-highlighted."""
296 if not arg_s:
296 if not arg_s:
297 raise UsageError('Missing filename.')
297 raise UsageError('Missing filename.')
298
298
299 cont = open(arg_s).read()
299 cont = open(arg_s).read()
300 if arg_s.endswith('.py'):
300 if arg_s.endswith('.py'):
301 cont = self.shell.pycolorize(openpy.read_py_file(arg_s, skip_encoding_cookie=False))
301 cont = self.shell.pycolorize(openpy.read_py_file(arg_s, skip_encoding_cookie=False))
302 else:
302 else:
303 cont = open(arg_s).read()
303 cont = open(arg_s).read()
304 page.page(cont)
304 page.page(cont)
305
305
306 more = line_magic('more')(less)
306 more = line_magic('more')(less)
307
307
308 # Man calls a pager, so we also need to redefine it
308 # Man calls a pager, so we also need to redefine it
309 if os.name == 'posix':
309 if os.name == 'posix':
310 @line_magic
310 @line_magic
311 def man(self, arg_s):
311 def man(self, arg_s):
312 """Find the man page for the given command and display in pager."""
312 """Find the man page for the given command and display in pager."""
313 page.page(self.shell.getoutput('man %s | col -b' % arg_s,
313 page.page(self.shell.getoutput('man %s | col -b' % arg_s,
314 split=False))
314 split=False))
315
315
316 @line_magic
316 @line_magic
317 def connect_info(self, arg_s):
317 def connect_info(self, arg_s):
318 """Print information for connecting other clients to this kernel
318 """Print information for connecting other clients to this kernel
319
319
320 It will print the contents of this session's connection file, as well as
320 It will print the contents of this session's connection file, as well as
321 shortcuts for local clients.
321 shortcuts for local clients.
322
322
323 In the simplest case, when called from the most recently launched kernel,
323 In the simplest case, when called from the most recently launched kernel,
324 secondary clients can be connected, simply with:
324 secondary clients can be connected, simply with:
325
325
326 $> ipython <app> --existing
326 $> ipython <app> --existing
327
327
328 """
328 """
329
329
330 from IPython.core.application import BaseIPythonApplication as BaseIPApp
330 from IPython.core.application import BaseIPythonApplication as BaseIPApp
331
331
332 if BaseIPApp.initialized():
332 if BaseIPApp.initialized():
333 app = BaseIPApp.instance()
333 app = BaseIPApp.instance()
334 security_dir = app.profile_dir.security_dir
334 security_dir = app.profile_dir.security_dir
335 profile = app.profile
335 profile = app.profile
336 else:
336 else:
337 profile = 'default'
337 profile = 'default'
338 security_dir = ''
338 security_dir = ''
339
339
340 try:
340 try:
341 connection_file = get_connection_file()
341 connection_file = get_connection_file()
342 info = get_connection_info(unpack=False)
342 info = get_connection_info(unpack=False)
343 except Exception as e:
343 except Exception as e:
344 error("Could not get connection info: %r" % e)
344 error("Could not get connection info: %r" % e)
345 return
345 return
346
346
347 # add profile flag for non-default profile
347 # add profile flag for non-default profile
348 profile_flag = "--profile %s" % profile if profile != 'default' else ""
348 profile_flag = "--profile %s" % profile if profile != 'default' else ""
349
349
350 # if it's in the security dir, truncate to basename
350 # if it's in the security dir, truncate to basename
351 if security_dir == os.path.dirname(connection_file):
351 if security_dir == os.path.dirname(connection_file):
352 connection_file = os.path.basename(connection_file)
352 connection_file = os.path.basename(connection_file)
353
353
354
354
355 print (info + '\n')
355 print (info + '\n')
356 print ("Paste the above JSON into a file, and connect with:\n"
356 print ("Paste the above JSON into a file, and connect with:\n"
357 " $> ipython <app> --existing <file>\n"
357 " $> ipython <app> --existing <file>\n"
358 "or, if you are local, you can connect with just:\n"
358 "or, if you are local, you can connect with just:\n"
359 " $> ipython <app> --existing {0} {1}\n"
359 " $> ipython <app> --existing {0} {1}\n"
360 "or even just:\n"
360 "or even just:\n"
361 " $> ipython <app> --existing {1}\n"
361 " $> ipython <app> --existing {1}\n"
362 "if this is the most recent IPython session you have started.".format(
362 "if this is the most recent IPython session you have started.".format(
363 connection_file, profile_flag
363 connection_file, profile_flag
364 )
364 )
365 )
365 )
366
366
367 @line_magic
367 @line_magic
368 def qtconsole(self, arg_s):
368 def qtconsole(self, arg_s):
369 """Open a qtconsole connected to this kernel.
369 """Open a qtconsole connected to this kernel.
370
370
371 Useful for connecting a qtconsole to running notebooks, for better
371 Useful for connecting a qtconsole to running notebooks, for better
372 debugging.
372 debugging.
373 """
373 """
374
374
375 # %qtconsole should imply bind_kernel for engines:
375 # %qtconsole should imply bind_kernel for engines:
376 try:
376 try:
377 from IPython.parallel import bind_kernel
377 from IPython.parallel import bind_kernel
378 except ImportError:
378 except ImportError:
379 # technically possible, because parallel has higher pyzmq min-version
379 # technically possible, because parallel has higher pyzmq min-version
380 pass
380 pass
381 else:
381 else:
382 bind_kernel()
382 bind_kernel()
383
383
384 try:
384 try:
385 p = connect_qtconsole(argv=arg_split(arg_s, os.name=='posix'))
385 p = connect_qtconsole(argv=arg_split(arg_s, os.name=='posix'))
386 except Exception as e:
386 except Exception as e:
387 error("Could not start qtconsole: %r" % e)
387 error("Could not start qtconsole: %r" % e)
388 return
388 return
389
389
390 @line_magic
390 @line_magic
391 def autosave(self, arg_s):
391 def autosave(self, arg_s):
392 """Set the autosave interval in the notebook (in seconds).
392 """Set the autosave interval in the notebook (in seconds).
393
393
394 The default value is 120, or two minutes.
394 The default value is 120, or two minutes.
395 ``%autosave 0`` will disable autosave.
395 ``%autosave 0`` will disable autosave.
396
396
397 This magic only has an effect when called from the notebook interface.
397 This magic only has an effect when called from the notebook interface.
398 It has no effect when called in a startup file.
398 It has no effect when called in a startup file.
399 """
399 """
400
400
401 try:
401 try:
402 interval = int(arg_s)
402 interval = int(arg_s)
403 except ValueError:
403 except ValueError:
404 raise UsageError("%%autosave requires an integer, got %r" % arg_s)
404 raise UsageError("%%autosave requires an integer, got %r" % arg_s)
405
405
406 # javascript wants milliseconds
406 # javascript wants milliseconds
407 milliseconds = 1000 * interval
407 milliseconds = 1000 * interval
408 display(Javascript("IPython.notebook.set_autosave_interval(%i)" % milliseconds),
408 display(Javascript("IPython.notebook.set_autosave_interval(%i)" % milliseconds),
409 include=['application/javascript']
409 include=['application/javascript']
410 )
410 )
411 if interval:
411 if interval:
412 print("Autosaving every %i seconds" % interval)
412 print("Autosaving every %i seconds" % interval)
413 else:
413 else:
414 print("Autosave disabled")
414 print("Autosave disabled")
415
415
416
416
417 class ZMQInteractiveShell(InteractiveShell):
417 class ZMQInteractiveShell(InteractiveShell):
418 """A subclass of InteractiveShell for ZMQ."""
418 """A subclass of InteractiveShell for ZMQ."""
419
419
420 displayhook_class = Type(ZMQShellDisplayHook)
420 displayhook_class = Type(ZMQShellDisplayHook)
421 display_pub_class = Type(ZMQDisplayPublisher)
421 display_pub_class = Type(ZMQDisplayPublisher)
422 data_pub_class = Type(ZMQDataPublisher)
422 data_pub_class = Type(ZMQDataPublisher)
423 kernel = Any()
423 kernel = Any()
424 parent_header = Any()
424 parent_header = Any()
425
425
426 # Override the traitlet in the parent class, because there's no point using
426 # Override the traitlet in the parent class, because there's no point using
427 # readline for the kernel. Can be removed when the readline code is moved
427 # readline for the kernel. Can be removed when the readline code is moved
428 # to the terminal frontend.
428 # to the terminal frontend.
429 colors_force = CBool(True)
429 colors_force = CBool(True)
430 readline_use = CBool(False)
430 readline_use = CBool(False)
431 # autoindent has no meaning in a zmqshell, and attempting to enable it
431 # autoindent has no meaning in a zmqshell, and attempting to enable it
432 # will print a warning in the absence of readline.
432 # will print a warning in the absence of readline.
433 autoindent = CBool(False)
433 autoindent = CBool(False)
434
434
435 exiter = Instance(ZMQExitAutocall)
435 exiter = Instance(ZMQExitAutocall)
436 def _exiter_default(self):
436 def _exiter_default(self):
437 return ZMQExitAutocall(self)
437 return ZMQExitAutocall(self)
438
438
439 def _exit_now_changed(self, name, old, new):
439 def _exit_now_changed(self, name, old, new):
440 """stop eventloop when exit_now fires"""
440 """stop eventloop when exit_now fires"""
441 if new:
441 if new:
442 loop = ioloop.IOLoop.instance()
442 loop = ioloop.IOLoop.instance()
443 loop.add_timeout(time.time()+0.1, loop.stop)
443 loop.add_timeout(time.time()+0.1, loop.stop)
444
444
445 keepkernel_on_exit = None
445 keepkernel_on_exit = None
446
446
447 # Over ZeroMQ, GUI control isn't done with PyOS_InputHook as there is no
447 # Over ZeroMQ, GUI control isn't done with PyOS_InputHook as there is no
448 # interactive input being read; we provide event loop support in ipkernel
448 # interactive input being read; we provide event loop support in ipkernel
449 @staticmethod
449 @staticmethod
450 def enable_gui(gui):
450 def enable_gui(gui):
451 from .eventloops import enable_gui as real_enable_gui
451 from .eventloops import enable_gui as real_enable_gui
452 try:
452 try:
453 real_enable_gui(gui)
453 real_enable_gui(gui)
454 except ValueError as e:
454 except ValueError as e:
455 raise UsageError("%s" % e)
455 raise UsageError("%s" % e)
456
456
457 def init_environment(self):
457 def init_environment(self):
458 """Configure the user's environment.
458 """Configure the user's environment.
459
459
460 """
460 """
461 env = os.environ
461 env = os.environ
462 # These two ensure 'ls' produces nice coloring on BSD-derived systems
462 # These two ensure 'ls' produces nice coloring on BSD-derived systems
463 env['TERM'] = 'xterm-color'
463 env['TERM'] = 'xterm-color'
464 env['CLICOLOR'] = '1'
464 env['CLICOLOR'] = '1'
465 # Since normal pagers don't work at all (over pexpect we don't have
465 # Since normal pagers don't work at all (over pexpect we don't have
466 # single-key control of the subprocess), try to disable paging in
466 # single-key control of the subprocess), try to disable paging in
467 # subprocesses as much as possible.
467 # subprocesses as much as possible.
468 env['PAGER'] = 'cat'
468 env['PAGER'] = 'cat'
469 env['GIT_PAGER'] = 'cat'
469 env['GIT_PAGER'] = 'cat'
470
470
471 # And install the payload version of page.
471 # And install the payload version of page.
472 install_payload_page()
472 install_payload_page()
473
473
474 def auto_rewrite_input(self, cmd):
474 def auto_rewrite_input(self, cmd):
475 """Called to show the auto-rewritten input for autocall and friends.
475 """Called to show the auto-rewritten input for autocall and friends.
476
476
477 FIXME: this payload is currently not correctly processed by the
477 FIXME: this payload is currently not correctly processed by the
478 frontend.
478 frontend.
479 """
479 """
480 new = self.prompt_manager.render('rewrite') + cmd
480 new = self.prompt_manager.render('rewrite') + cmd
481 payload = dict(
481 payload = dict(
482 source='auto_rewrite_input',
482 source='auto_rewrite_input',
483 transformed_input=new,
483 transformed_input=new,
484 )
484 )
485 self.payload_manager.write_payload(payload)
485 self.payload_manager.write_payload(payload)
486
486
487 def ask_exit(self):
487 def ask_exit(self):
488 """Engage the exit actions."""
488 """Engage the exit actions."""
489 self.exit_now = True
489 self.exit_now = True
490 payload = dict(
490 payload = dict(
491 source='ask_exit',
491 source='ask_exit',
492 exit=True,
492 exit=True,
493 keepkernel=self.keepkernel_on_exit,
493 keepkernel=self.keepkernel_on_exit,
494 )
494 )
495 self.payload_manager.write_payload(payload)
495 self.payload_manager.write_payload(payload)
496
496
497 def _showtraceback(self, etype, evalue, stb):
497 def _showtraceback(self, etype, evalue, stb):
498 # try to preserve ordering of tracebacks and print statements
498 # try to preserve ordering of tracebacks and print statements
499 sys.stdout.flush()
499 sys.stdout.flush()
500 sys.stderr.flush()
500 sys.stderr.flush()
501
501
502 exc_content = {
502 exc_content = {
503 u'traceback' : stb,
503 u'traceback' : stb,
504 u'ename' : unicode_type(etype.__name__),
504 u'ename' : unicode_type(etype.__name__),
505 u'evalue' : py3compat.safe_unicode(evalue),
505 u'evalue' : py3compat.safe_unicode(evalue),
506 }
506 }
507
507
508 dh = self.displayhook
508 dh = self.displayhook
509 # Send exception info over pub socket for other clients than the caller
509 # Send exception info over pub socket for other clients than the caller
510 # to pick up
510 # to pick up
511 topic = None
511 topic = None
512 if dh.topic:
512 if dh.topic:
513 topic = dh.topic.replace(b'execute_result', b'pyerr')
513 topic = dh.topic.replace(b'execute_result', b'error')
514
514
515 exc_msg = dh.session.send(dh.pub_socket, u'pyerr', json_clean(exc_content), dh.parent_header, ident=topic)
515 exc_msg = dh.session.send(dh.pub_socket, u'error', json_clean(exc_content), dh.parent_header, ident=topic)
516
516
517 # FIXME - Hack: store exception info in shell object. Right now, the
517 # FIXME - Hack: store exception info in shell object. Right now, the
518 # caller is reading this info after the fact, we need to fix this logic
518 # caller is reading this info after the fact, we need to fix this logic
519 # to remove this hack. Even uglier, we need to store the error status
519 # to remove this hack. Even uglier, we need to store the error status
520 # here, because in the main loop, the logic that sets it is being
520 # here, because in the main loop, the logic that sets it is being
521 # skipped because runlines swallows the exceptions.
521 # skipped because runlines swallows the exceptions.
522 exc_content[u'status'] = u'error'
522 exc_content[u'status'] = u'error'
523 self._reply_content = exc_content
523 self._reply_content = exc_content
524 # /FIXME
524 # /FIXME
525
525
526 return exc_content
526 return exc_content
527
527
528 def set_next_input(self, text):
528 def set_next_input(self, text):
529 """Send the specified text to the frontend to be presented at the next
529 """Send the specified text to the frontend to be presented at the next
530 input cell."""
530 input cell."""
531 payload = dict(
531 payload = dict(
532 source='set_next_input',
532 source='set_next_input',
533 text=text
533 text=text
534 )
534 )
535 self.payload_manager.write_payload(payload)
535 self.payload_manager.write_payload(payload)
536
536
537 def set_parent(self, parent):
537 def set_parent(self, parent):
538 """Set the parent header for associating output with its triggering input"""
538 """Set the parent header for associating output with its triggering input"""
539 self.parent_header = parent
539 self.parent_header = parent
540 self.displayhook.set_parent(parent)
540 self.displayhook.set_parent(parent)
541 self.display_pub.set_parent(parent)
541 self.display_pub.set_parent(parent)
542 self.data_pub.set_parent(parent)
542 self.data_pub.set_parent(parent)
543 try:
543 try:
544 sys.stdout.set_parent(parent)
544 sys.stdout.set_parent(parent)
545 except AttributeError:
545 except AttributeError:
546 pass
546 pass
547 try:
547 try:
548 sys.stderr.set_parent(parent)
548 sys.stderr.set_parent(parent)
549 except AttributeError:
549 except AttributeError:
550 pass
550 pass
551
551
552 def get_parent(self):
552 def get_parent(self):
553 return self.parent_header
553 return self.parent_header
554
554
555 #-------------------------------------------------------------------------
555 #-------------------------------------------------------------------------
556 # Things related to magics
556 # Things related to magics
557 #-------------------------------------------------------------------------
557 #-------------------------------------------------------------------------
558
558
559 def init_magics(self):
559 def init_magics(self):
560 super(ZMQInteractiveShell, self).init_magics()
560 super(ZMQInteractiveShell, self).init_magics()
561 self.register_magics(KernelMagics)
561 self.register_magics(KernelMagics)
562 self.magics_manager.register_alias('ed', 'edit')
562 self.magics_manager.register_alias('ed', 'edit')
563
563
564 def init_comms(self):
564 def init_comms(self):
565 self.comm_manager = CommManager(shell=self, parent=self)
565 self.comm_manager = CommManager(shell=self, parent=self)
566 self.configurables.append(self.comm_manager)
566 self.configurables.append(self.comm_manager)
567
567
568
568
569 InteractiveShellABC.register(ZMQInteractiveShell)
569 InteractiveShellABC.register(ZMQInteractiveShell)
@@ -1,1863 +1,1863 b''
1 """A semi-synchronous Client for IPython parallel"""
1 """A semi-synchronous Client for IPython parallel"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 from __future__ import print_function
6 from __future__ import print_function
7
7
8 import os
8 import os
9 import json
9 import json
10 import sys
10 import sys
11 from threading import Thread, Event
11 from threading import Thread, Event
12 import time
12 import time
13 import warnings
13 import warnings
14 from datetime import datetime
14 from datetime import datetime
15 from getpass import getpass
15 from getpass import getpass
16 from pprint import pprint
16 from pprint import pprint
17
17
18 pjoin = os.path.join
18 pjoin = os.path.join
19
19
20 import zmq
20 import zmq
21
21
22 from IPython.config.configurable import MultipleInstanceError
22 from IPython.config.configurable import MultipleInstanceError
23 from IPython.core.application import BaseIPythonApplication
23 from IPython.core.application import BaseIPythonApplication
24 from IPython.core.profiledir import ProfileDir, ProfileDirError
24 from IPython.core.profiledir import ProfileDir, ProfileDirError
25
25
26 from IPython.utils.capture import RichOutput
26 from IPython.utils.capture import RichOutput
27 from IPython.utils.coloransi import TermColors
27 from IPython.utils.coloransi import TermColors
28 from IPython.utils.jsonutil import rekey, extract_dates, parse_date
28 from IPython.utils.jsonutil import rekey, extract_dates, parse_date
29 from IPython.utils.localinterfaces import localhost, is_local_ip
29 from IPython.utils.localinterfaces import localhost, is_local_ip
30 from IPython.utils.path import get_ipython_dir
30 from IPython.utils.path import get_ipython_dir
31 from IPython.utils.py3compat import cast_bytes, string_types, xrange, iteritems
31 from IPython.utils.py3compat import cast_bytes, string_types, xrange, iteritems
32 from IPython.utils.traitlets import (HasTraits, Integer, Instance, Unicode,
32 from IPython.utils.traitlets import (HasTraits, Integer, Instance, Unicode,
33 Dict, List, Bool, Set, Any)
33 Dict, List, Bool, Set, Any)
34 from IPython.external.decorator import decorator
34 from IPython.external.decorator import decorator
35 from IPython.external.ssh import tunnel
35 from IPython.external.ssh import tunnel
36
36
37 from IPython.parallel import Reference
37 from IPython.parallel import Reference
38 from IPython.parallel import error
38 from IPython.parallel import error
39 from IPython.parallel import util
39 from IPython.parallel import util
40
40
41 from IPython.kernel.zmq.session import Session, Message
41 from IPython.kernel.zmq.session import Session, Message
42 from IPython.kernel.zmq import serialize
42 from IPython.kernel.zmq import serialize
43
43
44 from .asyncresult import AsyncResult, AsyncHubResult
44 from .asyncresult import AsyncResult, AsyncHubResult
45 from .view import DirectView, LoadBalancedView
45 from .view import DirectView, LoadBalancedView
46
46
47 #--------------------------------------------------------------------------
47 #--------------------------------------------------------------------------
48 # Decorators for Client methods
48 # Decorators for Client methods
49 #--------------------------------------------------------------------------
49 #--------------------------------------------------------------------------
50
50
51 @decorator
51 @decorator
52 def spin_first(f, self, *args, **kwargs):
52 def spin_first(f, self, *args, **kwargs):
53 """Call spin() to sync state prior to calling the method."""
53 """Call spin() to sync state prior to calling the method."""
54 self.spin()
54 self.spin()
55 return f(self, *args, **kwargs)
55 return f(self, *args, **kwargs)
56
56
57
57
58 #--------------------------------------------------------------------------
58 #--------------------------------------------------------------------------
59 # Classes
59 # Classes
60 #--------------------------------------------------------------------------
60 #--------------------------------------------------------------------------
61
61
62
62
63 class ExecuteReply(RichOutput):
63 class ExecuteReply(RichOutput):
64 """wrapper for finished Execute results"""
64 """wrapper for finished Execute results"""
65 def __init__(self, msg_id, content, metadata):
65 def __init__(self, msg_id, content, metadata):
66 self.msg_id = msg_id
66 self.msg_id = msg_id
67 self._content = content
67 self._content = content
68 self.execution_count = content['execution_count']
68 self.execution_count = content['execution_count']
69 self.metadata = metadata
69 self.metadata = metadata
70
70
71 # RichOutput overrides
71 # RichOutput overrides
72
72
73 @property
73 @property
74 def source(self):
74 def source(self):
75 execute_result = self.metadata['execute_result']
75 execute_result = self.metadata['execute_result']
76 if execute_result:
76 if execute_result:
77 return execute_result.get('source', '')
77 return execute_result.get('source', '')
78
78
79 @property
79 @property
80 def data(self):
80 def data(self):
81 execute_result = self.metadata['execute_result']
81 execute_result = self.metadata['execute_result']
82 if execute_result:
82 if execute_result:
83 return execute_result.get('data', {})
83 return execute_result.get('data', {})
84
84
85 @property
85 @property
86 def _metadata(self):
86 def _metadata(self):
87 execute_result = self.metadata['execute_result']
87 execute_result = self.metadata['execute_result']
88 if execute_result:
88 if execute_result:
89 return execute_result.get('metadata', {})
89 return execute_result.get('metadata', {})
90
90
91 def display(self):
91 def display(self):
92 from IPython.display import publish_display_data
92 from IPython.display import publish_display_data
93 publish_display_data(self.source, self.data, self.metadata)
93 publish_display_data(self.source, self.data, self.metadata)
94
94
95 def _repr_mime_(self, mime):
95 def _repr_mime_(self, mime):
96 if mime not in self.data:
96 if mime not in self.data:
97 return
97 return
98 data = self.data[mime]
98 data = self.data[mime]
99 if mime in self._metadata:
99 if mime in self._metadata:
100 return data, self._metadata[mime]
100 return data, self._metadata[mime]
101 else:
101 else:
102 return data
102 return data
103
103
104 def __getitem__(self, key):
104 def __getitem__(self, key):
105 return self.metadata[key]
105 return self.metadata[key]
106
106
107 def __getattr__(self, key):
107 def __getattr__(self, key):
108 if key not in self.metadata:
108 if key not in self.metadata:
109 raise AttributeError(key)
109 raise AttributeError(key)
110 return self.metadata[key]
110 return self.metadata[key]
111
111
112 def __repr__(self):
112 def __repr__(self):
113 execute_result = self.metadata['execute_result'] or {'data':{}}
113 execute_result = self.metadata['execute_result'] or {'data':{}}
114 text_out = execute_result['data'].get('text/plain', '')
114 text_out = execute_result['data'].get('text/plain', '')
115 if len(text_out) > 32:
115 if len(text_out) > 32:
116 text_out = text_out[:29] + '...'
116 text_out = text_out[:29] + '...'
117
117
118 return "<ExecuteReply[%i]: %s>" % (self.execution_count, text_out)
118 return "<ExecuteReply[%i]: %s>" % (self.execution_count, text_out)
119
119
120 def _repr_pretty_(self, p, cycle):
120 def _repr_pretty_(self, p, cycle):
121 execute_result = self.metadata['execute_result'] or {'data':{}}
121 execute_result = self.metadata['execute_result'] or {'data':{}}
122 text_out = execute_result['data'].get('text/plain', '')
122 text_out = execute_result['data'].get('text/plain', '')
123
123
124 if not text_out:
124 if not text_out:
125 return
125 return
126
126
127 try:
127 try:
128 ip = get_ipython()
128 ip = get_ipython()
129 except NameError:
129 except NameError:
130 colors = "NoColor"
130 colors = "NoColor"
131 else:
131 else:
132 colors = ip.colors
132 colors = ip.colors
133
133
134 if colors == "NoColor":
134 if colors == "NoColor":
135 out = normal = ""
135 out = normal = ""
136 else:
136 else:
137 out = TermColors.Red
137 out = TermColors.Red
138 normal = TermColors.Normal
138 normal = TermColors.Normal
139
139
140 if '\n' in text_out and not text_out.startswith('\n'):
140 if '\n' in text_out and not text_out.startswith('\n'):
141 # add newline for multiline reprs
141 # add newline for multiline reprs
142 text_out = '\n' + text_out
142 text_out = '\n' + text_out
143
143
144 p.text(
144 p.text(
145 out + u'Out[%i:%i]: ' % (
145 out + u'Out[%i:%i]: ' % (
146 self.metadata['engine_id'], self.execution_count
146 self.metadata['engine_id'], self.execution_count
147 ) + normal + text_out
147 ) + normal + text_out
148 )
148 )
149
149
150
150
151 class Metadata(dict):
151 class Metadata(dict):
152 """Subclass of dict for initializing metadata values.
152 """Subclass of dict for initializing metadata values.
153
153
154 Attribute access works on keys.
154 Attribute access works on keys.
155
155
156 These objects have a strict set of keys - errors will raise if you try
156 These objects have a strict set of keys - errors will raise if you try
157 to add new keys.
157 to add new keys.
158 """
158 """
159 def __init__(self, *args, **kwargs):
159 def __init__(self, *args, **kwargs):
160 dict.__init__(self)
160 dict.__init__(self)
161 md = {'msg_id' : None,
161 md = {'msg_id' : None,
162 'submitted' : None,
162 'submitted' : None,
163 'started' : None,
163 'started' : None,
164 'completed' : None,
164 'completed' : None,
165 'received' : None,
165 'received' : None,
166 'engine_uuid' : None,
166 'engine_uuid' : None,
167 'engine_id' : None,
167 'engine_id' : None,
168 'follow' : None,
168 'follow' : None,
169 'after' : None,
169 'after' : None,
170 'status' : None,
170 'status' : None,
171
171
172 'execute_input' : None,
172 'execute_input' : None,
173 'execute_result' : None,
173 'execute_result' : None,
174 'pyerr' : None,
174 'error' : None,
175 'stdout' : '',
175 'stdout' : '',
176 'stderr' : '',
176 'stderr' : '',
177 'outputs' : [],
177 'outputs' : [],
178 'data': {},
178 'data': {},
179 'outputs_ready' : False,
179 'outputs_ready' : False,
180 }
180 }
181 self.update(md)
181 self.update(md)
182 self.update(dict(*args, **kwargs))
182 self.update(dict(*args, **kwargs))
183
183
184 def __getattr__(self, key):
184 def __getattr__(self, key):
185 """getattr aliased to getitem"""
185 """getattr aliased to getitem"""
186 if key in self:
186 if key in self:
187 return self[key]
187 return self[key]
188 else:
188 else:
189 raise AttributeError(key)
189 raise AttributeError(key)
190
190
191 def __setattr__(self, key, value):
191 def __setattr__(self, key, value):
192 """setattr aliased to setitem, with strict"""
192 """setattr aliased to setitem, with strict"""
193 if key in self:
193 if key in self:
194 self[key] = value
194 self[key] = value
195 else:
195 else:
196 raise AttributeError(key)
196 raise AttributeError(key)
197
197
198 def __setitem__(self, key, value):
198 def __setitem__(self, key, value):
199 """strict static key enforcement"""
199 """strict static key enforcement"""
200 if key in self:
200 if key in self:
201 dict.__setitem__(self, key, value)
201 dict.__setitem__(self, key, value)
202 else:
202 else:
203 raise KeyError(key)
203 raise KeyError(key)
204
204
205
205
206 class Client(HasTraits):
206 class Client(HasTraits):
207 """A semi-synchronous client to the IPython ZMQ cluster
207 """A semi-synchronous client to the IPython ZMQ cluster
208
208
209 Parameters
209 Parameters
210 ----------
210 ----------
211
211
212 url_file : str/unicode; path to ipcontroller-client.json
212 url_file : str/unicode; path to ipcontroller-client.json
213 This JSON file should contain all the information needed to connect to a cluster,
213 This JSON file should contain all the information needed to connect to a cluster,
214 and is likely the only argument needed.
214 and is likely the only argument needed.
215 Connection information for the Hub's registration. If a json connector
215 Connection information for the Hub's registration. If a json connector
216 file is given, then likely no further configuration is necessary.
216 file is given, then likely no further configuration is necessary.
217 [Default: use profile]
217 [Default: use profile]
218 profile : bytes
218 profile : bytes
219 The name of the Cluster profile to be used to find connector information.
219 The name of the Cluster profile to be used to find connector information.
220 If run from an IPython application, the default profile will be the same
220 If run from an IPython application, the default profile will be the same
221 as the running application, otherwise it will be 'default'.
221 as the running application, otherwise it will be 'default'.
222 cluster_id : str
222 cluster_id : str
223 String id to added to runtime files, to prevent name collisions when using
223 String id to added to runtime files, to prevent name collisions when using
224 multiple clusters with a single profile simultaneously.
224 multiple clusters with a single profile simultaneously.
225 When set, will look for files named like: 'ipcontroller-<cluster_id>-client.json'
225 When set, will look for files named like: 'ipcontroller-<cluster_id>-client.json'
226 Since this is text inserted into filenames, typical recommendations apply:
226 Since this is text inserted into filenames, typical recommendations apply:
227 Simple character strings are ideal, and spaces are not recommended (but
227 Simple character strings are ideal, and spaces are not recommended (but
228 should generally work)
228 should generally work)
229 context : zmq.Context
229 context : zmq.Context
230 Pass an existing zmq.Context instance, otherwise the client will create its own.
230 Pass an existing zmq.Context instance, otherwise the client will create its own.
231 debug : bool
231 debug : bool
232 flag for lots of message printing for debug purposes
232 flag for lots of message printing for debug purposes
233 timeout : int/float
233 timeout : int/float
234 time (in seconds) to wait for connection replies from the Hub
234 time (in seconds) to wait for connection replies from the Hub
235 [Default: 10]
235 [Default: 10]
236
236
237 #-------------- session related args ----------------
237 #-------------- session related args ----------------
238
238
239 config : Config object
239 config : Config object
240 If specified, this will be relayed to the Session for configuration
240 If specified, this will be relayed to the Session for configuration
241 username : str
241 username : str
242 set username for the session object
242 set username for the session object
243
243
244 #-------------- ssh related args ----------------
244 #-------------- ssh related args ----------------
245 # These are args for configuring the ssh tunnel to be used
245 # These are args for configuring the ssh tunnel to be used
246 # credentials are used to forward connections over ssh to the Controller
246 # credentials are used to forward connections over ssh to the Controller
247 # Note that the ip given in `addr` needs to be relative to sshserver
247 # Note that the ip given in `addr` needs to be relative to sshserver
248 # The most basic case is to leave addr as pointing to localhost (127.0.0.1),
248 # The most basic case is to leave addr as pointing to localhost (127.0.0.1),
249 # and set sshserver as the same machine the Controller is on. However,
249 # and set sshserver as the same machine the Controller is on. However,
250 # the only requirement is that sshserver is able to see the Controller
250 # the only requirement is that sshserver is able to see the Controller
251 # (i.e. is within the same trusted network).
251 # (i.e. is within the same trusted network).
252
252
253 sshserver : str
253 sshserver : str
254 A string of the form passed to ssh, i.e. 'server.tld' or 'user@server.tld:port'
254 A string of the form passed to ssh, i.e. 'server.tld' or 'user@server.tld:port'
255 If keyfile or password is specified, and this is not, it will default to
255 If keyfile or password is specified, and this is not, it will default to
256 the ip given in addr.
256 the ip given in addr.
257 sshkey : str; path to ssh private key file
257 sshkey : str; path to ssh private key file
258 This specifies a key to be used in ssh login, default None.
258 This specifies a key to be used in ssh login, default None.
259 Regular default ssh keys will be used without specifying this argument.
259 Regular default ssh keys will be used without specifying this argument.
260 password : str
260 password : str
261 Your ssh password to sshserver. Note that if this is left None,
261 Your ssh password to sshserver. Note that if this is left None,
262 you will be prompted for it if passwordless key based login is unavailable.
262 you will be prompted for it if passwordless key based login is unavailable.
263 paramiko : bool
263 paramiko : bool
264 flag for whether to use paramiko instead of shell ssh for tunneling.
264 flag for whether to use paramiko instead of shell ssh for tunneling.
265 [default: True on win32, False else]
265 [default: True on win32, False else]
266
266
267
267
268 Attributes
268 Attributes
269 ----------
269 ----------
270
270
271 ids : list of int engine IDs
271 ids : list of int engine IDs
272 requesting the ids attribute always synchronizes
272 requesting the ids attribute always synchronizes
273 the registration state. To request ids without synchronization,
273 the registration state. To request ids without synchronization,
274 use semi-private _ids attributes.
274 use semi-private _ids attributes.
275
275
276 history : list of msg_ids
276 history : list of msg_ids
277 a list of msg_ids, keeping track of all the execution
277 a list of msg_ids, keeping track of all the execution
278 messages you have submitted in order.
278 messages you have submitted in order.
279
279
280 outstanding : set of msg_ids
280 outstanding : set of msg_ids
281 a set of msg_ids that have been submitted, but whose
281 a set of msg_ids that have been submitted, but whose
282 results have not yet been received.
282 results have not yet been received.
283
283
284 results : dict
284 results : dict
285 a dict of all our results, keyed by msg_id
285 a dict of all our results, keyed by msg_id
286
286
287 block : bool
287 block : bool
288 determines default behavior when block not specified
288 determines default behavior when block not specified
289 in execution methods
289 in execution methods
290
290
291 Methods
291 Methods
292 -------
292 -------
293
293
294 spin
294 spin
295 flushes incoming results and registration state changes
295 flushes incoming results and registration state changes
296 control methods spin, and requesting `ids` also ensures up to date
296 control methods spin, and requesting `ids` also ensures up to date
297
297
298 wait
298 wait
299 wait on one or more msg_ids
299 wait on one or more msg_ids
300
300
301 execution methods
301 execution methods
302 apply
302 apply
303 legacy: execute, run
303 legacy: execute, run
304
304
305 data movement
305 data movement
306 push, pull, scatter, gather
306 push, pull, scatter, gather
307
307
308 query methods
308 query methods
309 queue_status, get_result, purge, result_status
309 queue_status, get_result, purge, result_status
310
310
311 control methods
311 control methods
312 abort, shutdown
312 abort, shutdown
313
313
314 """
314 """
315
315
316
316
317 block = Bool(False)
317 block = Bool(False)
318 outstanding = Set()
318 outstanding = Set()
319 results = Instance('collections.defaultdict', (dict,))
319 results = Instance('collections.defaultdict', (dict,))
320 metadata = Instance('collections.defaultdict', (Metadata,))
320 metadata = Instance('collections.defaultdict', (Metadata,))
321 history = List()
321 history = List()
322 debug = Bool(False)
322 debug = Bool(False)
323 _spin_thread = Any()
323 _spin_thread = Any()
324 _stop_spinning = Any()
324 _stop_spinning = Any()
325
325
326 profile=Unicode()
326 profile=Unicode()
327 def _profile_default(self):
327 def _profile_default(self):
328 if BaseIPythonApplication.initialized():
328 if BaseIPythonApplication.initialized():
329 # an IPython app *might* be running, try to get its profile
329 # an IPython app *might* be running, try to get its profile
330 try:
330 try:
331 return BaseIPythonApplication.instance().profile
331 return BaseIPythonApplication.instance().profile
332 except (AttributeError, MultipleInstanceError):
332 except (AttributeError, MultipleInstanceError):
333 # could be a *different* subclass of config.Application,
333 # could be a *different* subclass of config.Application,
334 # which would raise one of these two errors.
334 # which would raise one of these two errors.
335 return u'default'
335 return u'default'
336 else:
336 else:
337 return u'default'
337 return u'default'
338
338
339
339
340 _outstanding_dict = Instance('collections.defaultdict', (set,))
340 _outstanding_dict = Instance('collections.defaultdict', (set,))
341 _ids = List()
341 _ids = List()
342 _connected=Bool(False)
342 _connected=Bool(False)
343 _ssh=Bool(False)
343 _ssh=Bool(False)
344 _context = Instance('zmq.Context')
344 _context = Instance('zmq.Context')
345 _config = Dict()
345 _config = Dict()
346 _engines=Instance(util.ReverseDict, (), {})
346 _engines=Instance(util.ReverseDict, (), {})
347 # _hub_socket=Instance('zmq.Socket')
347 # _hub_socket=Instance('zmq.Socket')
348 _query_socket=Instance('zmq.Socket')
348 _query_socket=Instance('zmq.Socket')
349 _control_socket=Instance('zmq.Socket')
349 _control_socket=Instance('zmq.Socket')
350 _iopub_socket=Instance('zmq.Socket')
350 _iopub_socket=Instance('zmq.Socket')
351 _notification_socket=Instance('zmq.Socket')
351 _notification_socket=Instance('zmq.Socket')
352 _mux_socket=Instance('zmq.Socket')
352 _mux_socket=Instance('zmq.Socket')
353 _task_socket=Instance('zmq.Socket')
353 _task_socket=Instance('zmq.Socket')
354 _task_scheme=Unicode()
354 _task_scheme=Unicode()
355 _closed = False
355 _closed = False
356 _ignored_control_replies=Integer(0)
356 _ignored_control_replies=Integer(0)
357 _ignored_hub_replies=Integer(0)
357 _ignored_hub_replies=Integer(0)
358
358
359 def __new__(self, *args, **kw):
359 def __new__(self, *args, **kw):
360 # don't raise on positional args
360 # don't raise on positional args
361 return HasTraits.__new__(self, **kw)
361 return HasTraits.__new__(self, **kw)
362
362
363 def __init__(self, url_file=None, profile=None, profile_dir=None, ipython_dir=None,
363 def __init__(self, url_file=None, profile=None, profile_dir=None, ipython_dir=None,
364 context=None, debug=False,
364 context=None, debug=False,
365 sshserver=None, sshkey=None, password=None, paramiko=None,
365 sshserver=None, sshkey=None, password=None, paramiko=None,
366 timeout=10, cluster_id=None, **extra_args
366 timeout=10, cluster_id=None, **extra_args
367 ):
367 ):
368 if profile:
368 if profile:
369 super(Client, self).__init__(debug=debug, profile=profile)
369 super(Client, self).__init__(debug=debug, profile=profile)
370 else:
370 else:
371 super(Client, self).__init__(debug=debug)
371 super(Client, self).__init__(debug=debug)
372 if context is None:
372 if context is None:
373 context = zmq.Context.instance()
373 context = zmq.Context.instance()
374 self._context = context
374 self._context = context
375 self._stop_spinning = Event()
375 self._stop_spinning = Event()
376
376
377 if 'url_or_file' in extra_args:
377 if 'url_or_file' in extra_args:
378 url_file = extra_args['url_or_file']
378 url_file = extra_args['url_or_file']
379 warnings.warn("url_or_file arg no longer supported, use url_file", DeprecationWarning)
379 warnings.warn("url_or_file arg no longer supported, use url_file", DeprecationWarning)
380
380
381 if url_file and util.is_url(url_file):
381 if url_file and util.is_url(url_file):
382 raise ValueError("single urls cannot be specified, url-files must be used.")
382 raise ValueError("single urls cannot be specified, url-files must be used.")
383
383
384 self._setup_profile_dir(self.profile, profile_dir, ipython_dir)
384 self._setup_profile_dir(self.profile, profile_dir, ipython_dir)
385
385
386 if self._cd is not None:
386 if self._cd is not None:
387 if url_file is None:
387 if url_file is None:
388 if not cluster_id:
388 if not cluster_id:
389 client_json = 'ipcontroller-client.json'
389 client_json = 'ipcontroller-client.json'
390 else:
390 else:
391 client_json = 'ipcontroller-%s-client.json' % cluster_id
391 client_json = 'ipcontroller-%s-client.json' % cluster_id
392 url_file = pjoin(self._cd.security_dir, client_json)
392 url_file = pjoin(self._cd.security_dir, client_json)
393 if url_file is None:
393 if url_file is None:
394 raise ValueError(
394 raise ValueError(
395 "I can't find enough information to connect to a hub!"
395 "I can't find enough information to connect to a hub!"
396 " Please specify at least one of url_file or profile."
396 " Please specify at least one of url_file or profile."
397 )
397 )
398
398
399 with open(url_file) as f:
399 with open(url_file) as f:
400 cfg = json.load(f)
400 cfg = json.load(f)
401
401
402 self._task_scheme = cfg['task_scheme']
402 self._task_scheme = cfg['task_scheme']
403
403
404 # sync defaults from args, json:
404 # sync defaults from args, json:
405 if sshserver:
405 if sshserver:
406 cfg['ssh'] = sshserver
406 cfg['ssh'] = sshserver
407
407
408 location = cfg.setdefault('location', None)
408 location = cfg.setdefault('location', None)
409
409
410 proto,addr = cfg['interface'].split('://')
410 proto,addr = cfg['interface'].split('://')
411 addr = util.disambiguate_ip_address(addr, location)
411 addr = util.disambiguate_ip_address(addr, location)
412 cfg['interface'] = "%s://%s" % (proto, addr)
412 cfg['interface'] = "%s://%s" % (proto, addr)
413
413
414 # turn interface,port into full urls:
414 # turn interface,port into full urls:
415 for key in ('control', 'task', 'mux', 'iopub', 'notification', 'registration'):
415 for key in ('control', 'task', 'mux', 'iopub', 'notification', 'registration'):
416 cfg[key] = cfg['interface'] + ':%i' % cfg[key]
416 cfg[key] = cfg['interface'] + ':%i' % cfg[key]
417
417
418 url = cfg['registration']
418 url = cfg['registration']
419
419
420 if location is not None and addr == localhost():
420 if location is not None and addr == localhost():
421 # location specified, and connection is expected to be local
421 # location specified, and connection is expected to be local
422 if not is_local_ip(location) and not sshserver:
422 if not is_local_ip(location) and not sshserver:
423 # load ssh from JSON *only* if the controller is not on
423 # load ssh from JSON *only* if the controller is not on
424 # this machine
424 # this machine
425 sshserver=cfg['ssh']
425 sshserver=cfg['ssh']
426 if not is_local_ip(location) and not sshserver:
426 if not is_local_ip(location) and not sshserver:
427 # warn if no ssh specified, but SSH is probably needed
427 # warn if no ssh specified, but SSH is probably needed
428 # This is only a warning, because the most likely cause
428 # This is only a warning, because the most likely cause
429 # is a local Controller on a laptop whose IP is dynamic
429 # is a local Controller on a laptop whose IP is dynamic
430 warnings.warn("""
430 warnings.warn("""
431 Controller appears to be listening on localhost, but not on this machine.
431 Controller appears to be listening on localhost, but not on this machine.
432 If this is true, you should specify Client(...,sshserver='you@%s')
432 If this is true, you should specify Client(...,sshserver='you@%s')
433 or instruct your controller to listen on an external IP."""%location,
433 or instruct your controller to listen on an external IP."""%location,
434 RuntimeWarning)
434 RuntimeWarning)
435 elif not sshserver:
435 elif not sshserver:
436 # otherwise sync with cfg
436 # otherwise sync with cfg
437 sshserver = cfg['ssh']
437 sshserver = cfg['ssh']
438
438
439 self._config = cfg
439 self._config = cfg
440
440
441 self._ssh = bool(sshserver or sshkey or password)
441 self._ssh = bool(sshserver or sshkey or password)
442 if self._ssh and sshserver is None:
442 if self._ssh and sshserver is None:
443 # default to ssh via localhost
443 # default to ssh via localhost
444 sshserver = addr
444 sshserver = addr
445 if self._ssh and password is None:
445 if self._ssh and password is None:
446 if tunnel.try_passwordless_ssh(sshserver, sshkey, paramiko):
446 if tunnel.try_passwordless_ssh(sshserver, sshkey, paramiko):
447 password=False
447 password=False
448 else:
448 else:
449 password = getpass("SSH Password for %s: "%sshserver)
449 password = getpass("SSH Password for %s: "%sshserver)
450 ssh_kwargs = dict(keyfile=sshkey, password=password, paramiko=paramiko)
450 ssh_kwargs = dict(keyfile=sshkey, password=password, paramiko=paramiko)
451
451
452 # configure and construct the session
452 # configure and construct the session
453 try:
453 try:
454 extra_args['packer'] = cfg['pack']
454 extra_args['packer'] = cfg['pack']
455 extra_args['unpacker'] = cfg['unpack']
455 extra_args['unpacker'] = cfg['unpack']
456 extra_args['key'] = cast_bytes(cfg['key'])
456 extra_args['key'] = cast_bytes(cfg['key'])
457 extra_args['signature_scheme'] = cfg['signature_scheme']
457 extra_args['signature_scheme'] = cfg['signature_scheme']
458 except KeyError as exc:
458 except KeyError as exc:
459 msg = '\n'.join([
459 msg = '\n'.join([
460 "Connection file is invalid (missing '{}'), possibly from an old version of IPython.",
460 "Connection file is invalid (missing '{}'), possibly from an old version of IPython.",
461 "If you are reusing connection files, remove them and start ipcontroller again."
461 "If you are reusing connection files, remove them and start ipcontroller again."
462 ])
462 ])
463 raise ValueError(msg.format(exc.message))
463 raise ValueError(msg.format(exc.message))
464
464
465 self.session = Session(**extra_args)
465 self.session = Session(**extra_args)
466
466
467 self._query_socket = self._context.socket(zmq.DEALER)
467 self._query_socket = self._context.socket(zmq.DEALER)
468
468
469 if self._ssh:
469 if self._ssh:
470 tunnel.tunnel_connection(self._query_socket, cfg['registration'], sshserver, **ssh_kwargs)
470 tunnel.tunnel_connection(self._query_socket, cfg['registration'], sshserver, **ssh_kwargs)
471 else:
471 else:
472 self._query_socket.connect(cfg['registration'])
472 self._query_socket.connect(cfg['registration'])
473
473
474 self.session.debug = self.debug
474 self.session.debug = self.debug
475
475
476 self._notification_handlers = {'registration_notification' : self._register_engine,
476 self._notification_handlers = {'registration_notification' : self._register_engine,
477 'unregistration_notification' : self._unregister_engine,
477 'unregistration_notification' : self._unregister_engine,
478 'shutdown_notification' : lambda msg: self.close(),
478 'shutdown_notification' : lambda msg: self.close(),
479 }
479 }
480 self._queue_handlers = {'execute_reply' : self._handle_execute_reply,
480 self._queue_handlers = {'execute_reply' : self._handle_execute_reply,
481 'apply_reply' : self._handle_apply_reply}
481 'apply_reply' : self._handle_apply_reply}
482
482
483 try:
483 try:
484 self._connect(sshserver, ssh_kwargs, timeout)
484 self._connect(sshserver, ssh_kwargs, timeout)
485 except:
485 except:
486 self.close(linger=0)
486 self.close(linger=0)
487 raise
487 raise
488
488
489 # last step: setup magics, if we are in IPython:
489 # last step: setup magics, if we are in IPython:
490
490
491 try:
491 try:
492 ip = get_ipython()
492 ip = get_ipython()
493 except NameError:
493 except NameError:
494 return
494 return
495 else:
495 else:
496 if 'px' not in ip.magics_manager.magics:
496 if 'px' not in ip.magics_manager.magics:
497 # in IPython but we are the first Client.
497 # in IPython but we are the first Client.
498 # activate a default view for parallel magics.
498 # activate a default view for parallel magics.
499 self.activate()
499 self.activate()
500
500
501 def __del__(self):
501 def __del__(self):
502 """cleanup sockets, but _not_ context."""
502 """cleanup sockets, but _not_ context."""
503 self.close()
503 self.close()
504
504
505 def _setup_profile_dir(self, profile, profile_dir, ipython_dir):
505 def _setup_profile_dir(self, profile, profile_dir, ipython_dir):
506 if ipython_dir is None:
506 if ipython_dir is None:
507 ipython_dir = get_ipython_dir()
507 ipython_dir = get_ipython_dir()
508 if profile_dir is not None:
508 if profile_dir is not None:
509 try:
509 try:
510 self._cd = ProfileDir.find_profile_dir(profile_dir)
510 self._cd = ProfileDir.find_profile_dir(profile_dir)
511 return
511 return
512 except ProfileDirError:
512 except ProfileDirError:
513 pass
513 pass
514 elif profile is not None:
514 elif profile is not None:
515 try:
515 try:
516 self._cd = ProfileDir.find_profile_dir_by_name(
516 self._cd = ProfileDir.find_profile_dir_by_name(
517 ipython_dir, profile)
517 ipython_dir, profile)
518 return
518 return
519 except ProfileDirError:
519 except ProfileDirError:
520 pass
520 pass
521 self._cd = None
521 self._cd = None
522
522
523 def _update_engines(self, engines):
523 def _update_engines(self, engines):
524 """Update our engines dict and _ids from a dict of the form: {id:uuid}."""
524 """Update our engines dict and _ids from a dict of the form: {id:uuid}."""
525 for k,v in iteritems(engines):
525 for k,v in iteritems(engines):
526 eid = int(k)
526 eid = int(k)
527 if eid not in self._engines:
527 if eid not in self._engines:
528 self._ids.append(eid)
528 self._ids.append(eid)
529 self._engines[eid] = v
529 self._engines[eid] = v
530 self._ids = sorted(self._ids)
530 self._ids = sorted(self._ids)
531 if sorted(self._engines.keys()) != list(range(len(self._engines))) and \
531 if sorted(self._engines.keys()) != list(range(len(self._engines))) and \
532 self._task_scheme == 'pure' and self._task_socket:
532 self._task_scheme == 'pure' and self._task_socket:
533 self._stop_scheduling_tasks()
533 self._stop_scheduling_tasks()
534
534
535 def _stop_scheduling_tasks(self):
535 def _stop_scheduling_tasks(self):
536 """Stop scheduling tasks because an engine has been unregistered
536 """Stop scheduling tasks because an engine has been unregistered
537 from a pure ZMQ scheduler.
537 from a pure ZMQ scheduler.
538 """
538 """
539 self._task_socket.close()
539 self._task_socket.close()
540 self._task_socket = None
540 self._task_socket = None
541 msg = "An engine has been unregistered, and we are using pure " +\
541 msg = "An engine has been unregistered, and we are using pure " +\
542 "ZMQ task scheduling. Task farming will be disabled."
542 "ZMQ task scheduling. Task farming will be disabled."
543 if self.outstanding:
543 if self.outstanding:
544 msg += " If you were running tasks when this happened, " +\
544 msg += " If you were running tasks when this happened, " +\
545 "some `outstanding` msg_ids may never resolve."
545 "some `outstanding` msg_ids may never resolve."
546 warnings.warn(msg, RuntimeWarning)
546 warnings.warn(msg, RuntimeWarning)
547
547
548 def _build_targets(self, targets):
548 def _build_targets(self, targets):
549 """Turn valid target IDs or 'all' into two lists:
549 """Turn valid target IDs or 'all' into two lists:
550 (int_ids, uuids).
550 (int_ids, uuids).
551 """
551 """
552 if not self._ids:
552 if not self._ids:
553 # flush notification socket if no engines yet, just in case
553 # flush notification socket if no engines yet, just in case
554 if not self.ids:
554 if not self.ids:
555 raise error.NoEnginesRegistered("Can't build targets without any engines")
555 raise error.NoEnginesRegistered("Can't build targets without any engines")
556
556
557 if targets is None:
557 if targets is None:
558 targets = self._ids
558 targets = self._ids
559 elif isinstance(targets, string_types):
559 elif isinstance(targets, string_types):
560 if targets.lower() == 'all':
560 if targets.lower() == 'all':
561 targets = self._ids
561 targets = self._ids
562 else:
562 else:
563 raise TypeError("%r not valid str target, must be 'all'"%(targets))
563 raise TypeError("%r not valid str target, must be 'all'"%(targets))
564 elif isinstance(targets, int):
564 elif isinstance(targets, int):
565 if targets < 0:
565 if targets < 0:
566 targets = self.ids[targets]
566 targets = self.ids[targets]
567 if targets not in self._ids:
567 if targets not in self._ids:
568 raise IndexError("No such engine: %i"%targets)
568 raise IndexError("No such engine: %i"%targets)
569 targets = [targets]
569 targets = [targets]
570
570
571 if isinstance(targets, slice):
571 if isinstance(targets, slice):
572 indices = list(range(len(self._ids))[targets])
572 indices = list(range(len(self._ids))[targets])
573 ids = self.ids
573 ids = self.ids
574 targets = [ ids[i] for i in indices ]
574 targets = [ ids[i] for i in indices ]
575
575
576 if not isinstance(targets, (tuple, list, xrange)):
576 if not isinstance(targets, (tuple, list, xrange)):
577 raise TypeError("targets by int/slice/collection of ints only, not %s"%(type(targets)))
577 raise TypeError("targets by int/slice/collection of ints only, not %s"%(type(targets)))
578
578
579 return [cast_bytes(self._engines[t]) for t in targets], list(targets)
579 return [cast_bytes(self._engines[t]) for t in targets], list(targets)
580
580
581 def _connect(self, sshserver, ssh_kwargs, timeout):
581 def _connect(self, sshserver, ssh_kwargs, timeout):
582 """setup all our socket connections to the cluster. This is called from
582 """setup all our socket connections to the cluster. This is called from
583 __init__."""
583 __init__."""
584
584
585 # Maybe allow reconnecting?
585 # Maybe allow reconnecting?
586 if self._connected:
586 if self._connected:
587 return
587 return
588 self._connected=True
588 self._connected=True
589
589
590 def connect_socket(s, url):
590 def connect_socket(s, url):
591 if self._ssh:
591 if self._ssh:
592 return tunnel.tunnel_connection(s, url, sshserver, **ssh_kwargs)
592 return tunnel.tunnel_connection(s, url, sshserver, **ssh_kwargs)
593 else:
593 else:
594 return s.connect(url)
594 return s.connect(url)
595
595
596 self.session.send(self._query_socket, 'connection_request')
596 self.session.send(self._query_socket, 'connection_request')
597 # use Poller because zmq.select has wrong units in pyzmq 2.1.7
597 # use Poller because zmq.select has wrong units in pyzmq 2.1.7
598 poller = zmq.Poller()
598 poller = zmq.Poller()
599 poller.register(self._query_socket, zmq.POLLIN)
599 poller.register(self._query_socket, zmq.POLLIN)
600 # poll expects milliseconds, timeout is seconds
600 # poll expects milliseconds, timeout is seconds
601 evts = poller.poll(timeout*1000)
601 evts = poller.poll(timeout*1000)
602 if not evts:
602 if not evts:
603 raise error.TimeoutError("Hub connection request timed out")
603 raise error.TimeoutError("Hub connection request timed out")
604 idents,msg = self.session.recv(self._query_socket,mode=0)
604 idents,msg = self.session.recv(self._query_socket,mode=0)
605 if self.debug:
605 if self.debug:
606 pprint(msg)
606 pprint(msg)
607 content = msg['content']
607 content = msg['content']
608 # self._config['registration'] = dict(content)
608 # self._config['registration'] = dict(content)
609 cfg = self._config
609 cfg = self._config
610 if content['status'] == 'ok':
610 if content['status'] == 'ok':
611 self._mux_socket = self._context.socket(zmq.DEALER)
611 self._mux_socket = self._context.socket(zmq.DEALER)
612 connect_socket(self._mux_socket, cfg['mux'])
612 connect_socket(self._mux_socket, cfg['mux'])
613
613
614 self._task_socket = self._context.socket(zmq.DEALER)
614 self._task_socket = self._context.socket(zmq.DEALER)
615 connect_socket(self._task_socket, cfg['task'])
615 connect_socket(self._task_socket, cfg['task'])
616
616
617 self._notification_socket = self._context.socket(zmq.SUB)
617 self._notification_socket = self._context.socket(zmq.SUB)
618 self._notification_socket.setsockopt(zmq.SUBSCRIBE, b'')
618 self._notification_socket.setsockopt(zmq.SUBSCRIBE, b'')
619 connect_socket(self._notification_socket, cfg['notification'])
619 connect_socket(self._notification_socket, cfg['notification'])
620
620
621 self._control_socket = self._context.socket(zmq.DEALER)
621 self._control_socket = self._context.socket(zmq.DEALER)
622 connect_socket(self._control_socket, cfg['control'])
622 connect_socket(self._control_socket, cfg['control'])
623
623
624 self._iopub_socket = self._context.socket(zmq.SUB)
624 self._iopub_socket = self._context.socket(zmq.SUB)
625 self._iopub_socket.setsockopt(zmq.SUBSCRIBE, b'')
625 self._iopub_socket.setsockopt(zmq.SUBSCRIBE, b'')
626 connect_socket(self._iopub_socket, cfg['iopub'])
626 connect_socket(self._iopub_socket, cfg['iopub'])
627
627
628 self._update_engines(dict(content['engines']))
628 self._update_engines(dict(content['engines']))
629 else:
629 else:
630 self._connected = False
630 self._connected = False
631 raise Exception("Failed to connect!")
631 raise Exception("Failed to connect!")
632
632
633 #--------------------------------------------------------------------------
633 #--------------------------------------------------------------------------
634 # handlers and callbacks for incoming messages
634 # handlers and callbacks for incoming messages
635 #--------------------------------------------------------------------------
635 #--------------------------------------------------------------------------
636
636
637 def _unwrap_exception(self, content):
637 def _unwrap_exception(self, content):
638 """unwrap exception, and remap engine_id to int."""
638 """unwrap exception, and remap engine_id to int."""
639 e = error.unwrap_exception(content)
639 e = error.unwrap_exception(content)
640 # print e.traceback
640 # print e.traceback
641 if e.engine_info:
641 if e.engine_info:
642 e_uuid = e.engine_info['engine_uuid']
642 e_uuid = e.engine_info['engine_uuid']
643 eid = self._engines[e_uuid]
643 eid = self._engines[e_uuid]
644 e.engine_info['engine_id'] = eid
644 e.engine_info['engine_id'] = eid
645 return e
645 return e
646
646
647 def _extract_metadata(self, msg):
647 def _extract_metadata(self, msg):
648 header = msg['header']
648 header = msg['header']
649 parent = msg['parent_header']
649 parent = msg['parent_header']
650 msg_meta = msg['metadata']
650 msg_meta = msg['metadata']
651 content = msg['content']
651 content = msg['content']
652 md = {'msg_id' : parent['msg_id'],
652 md = {'msg_id' : parent['msg_id'],
653 'received' : datetime.now(),
653 'received' : datetime.now(),
654 'engine_uuid' : msg_meta.get('engine', None),
654 'engine_uuid' : msg_meta.get('engine', None),
655 'follow' : msg_meta.get('follow', []),
655 'follow' : msg_meta.get('follow', []),
656 'after' : msg_meta.get('after', []),
656 'after' : msg_meta.get('after', []),
657 'status' : content['status'],
657 'status' : content['status'],
658 }
658 }
659
659
660 if md['engine_uuid'] is not None:
660 if md['engine_uuid'] is not None:
661 md['engine_id'] = self._engines.get(md['engine_uuid'], None)
661 md['engine_id'] = self._engines.get(md['engine_uuid'], None)
662
662
663 if 'date' in parent:
663 if 'date' in parent:
664 md['submitted'] = parent['date']
664 md['submitted'] = parent['date']
665 if 'started' in msg_meta:
665 if 'started' in msg_meta:
666 md['started'] = parse_date(msg_meta['started'])
666 md['started'] = parse_date(msg_meta['started'])
667 if 'date' in header:
667 if 'date' in header:
668 md['completed'] = header['date']
668 md['completed'] = header['date']
669 return md
669 return md
670
670
671 def _register_engine(self, msg):
671 def _register_engine(self, msg):
672 """Register a new engine, and update our connection info."""
672 """Register a new engine, and update our connection info."""
673 content = msg['content']
673 content = msg['content']
674 eid = content['id']
674 eid = content['id']
675 d = {eid : content['uuid']}
675 d = {eid : content['uuid']}
676 self._update_engines(d)
676 self._update_engines(d)
677
677
678 def _unregister_engine(self, msg):
678 def _unregister_engine(self, msg):
679 """Unregister an engine that has died."""
679 """Unregister an engine that has died."""
680 content = msg['content']
680 content = msg['content']
681 eid = int(content['id'])
681 eid = int(content['id'])
682 if eid in self._ids:
682 if eid in self._ids:
683 self._ids.remove(eid)
683 self._ids.remove(eid)
684 uuid = self._engines.pop(eid)
684 uuid = self._engines.pop(eid)
685
685
686 self._handle_stranded_msgs(eid, uuid)
686 self._handle_stranded_msgs(eid, uuid)
687
687
688 if self._task_socket and self._task_scheme == 'pure':
688 if self._task_socket and self._task_scheme == 'pure':
689 self._stop_scheduling_tasks()
689 self._stop_scheduling_tasks()
690
690
691 def _handle_stranded_msgs(self, eid, uuid):
691 def _handle_stranded_msgs(self, eid, uuid):
692 """Handle messages known to be on an engine when the engine unregisters.
692 """Handle messages known to be on an engine when the engine unregisters.
693
693
694 It is possible that this will fire prematurely - that is, an engine will
694 It is possible that this will fire prematurely - that is, an engine will
695 go down after completing a result, and the client will be notified
695 go down after completing a result, and the client will be notified
696 of the unregistration and later receive the successful result.
696 of the unregistration and later receive the successful result.
697 """
697 """
698
698
699 outstanding = self._outstanding_dict[uuid]
699 outstanding = self._outstanding_dict[uuid]
700
700
701 for msg_id in list(outstanding):
701 for msg_id in list(outstanding):
702 if msg_id in self.results:
702 if msg_id in self.results:
703 # we already
703 # we already
704 continue
704 continue
705 try:
705 try:
706 raise error.EngineError("Engine %r died while running task %r"%(eid, msg_id))
706 raise error.EngineError("Engine %r died while running task %r"%(eid, msg_id))
707 except:
707 except:
708 content = error.wrap_exception()
708 content = error.wrap_exception()
709 # build a fake message:
709 # build a fake message:
710 msg = self.session.msg('apply_reply', content=content)
710 msg = self.session.msg('apply_reply', content=content)
711 msg['parent_header']['msg_id'] = msg_id
711 msg['parent_header']['msg_id'] = msg_id
712 msg['metadata']['engine'] = uuid
712 msg['metadata']['engine'] = uuid
713 self._handle_apply_reply(msg)
713 self._handle_apply_reply(msg)
714
714
715 def _handle_execute_reply(self, msg):
715 def _handle_execute_reply(self, msg):
716 """Save the reply to an execute_request into our results.
716 """Save the reply to an execute_request into our results.
717
717
718 execute messages are never actually used. apply is used instead.
718 execute messages are never actually used. apply is used instead.
719 """
719 """
720
720
721 parent = msg['parent_header']
721 parent = msg['parent_header']
722 msg_id = parent['msg_id']
722 msg_id = parent['msg_id']
723 if msg_id not in self.outstanding:
723 if msg_id not in self.outstanding:
724 if msg_id in self.history:
724 if msg_id in self.history:
725 print("got stale result: %s"%msg_id)
725 print("got stale result: %s"%msg_id)
726 else:
726 else:
727 print("got unknown result: %s"%msg_id)
727 print("got unknown result: %s"%msg_id)
728 else:
728 else:
729 self.outstanding.remove(msg_id)
729 self.outstanding.remove(msg_id)
730
730
731 content = msg['content']
731 content = msg['content']
732 header = msg['header']
732 header = msg['header']
733
733
734 # construct metadata:
734 # construct metadata:
735 md = self.metadata[msg_id]
735 md = self.metadata[msg_id]
736 md.update(self._extract_metadata(msg))
736 md.update(self._extract_metadata(msg))
737 # is this redundant?
737 # is this redundant?
738 self.metadata[msg_id] = md
738 self.metadata[msg_id] = md
739
739
740 e_outstanding = self._outstanding_dict[md['engine_uuid']]
740 e_outstanding = self._outstanding_dict[md['engine_uuid']]
741 if msg_id in e_outstanding:
741 if msg_id in e_outstanding:
742 e_outstanding.remove(msg_id)
742 e_outstanding.remove(msg_id)
743
743
744 # construct result:
744 # construct result:
745 if content['status'] == 'ok':
745 if content['status'] == 'ok':
746 self.results[msg_id] = ExecuteReply(msg_id, content, md)
746 self.results[msg_id] = ExecuteReply(msg_id, content, md)
747 elif content['status'] == 'aborted':
747 elif content['status'] == 'aborted':
748 self.results[msg_id] = error.TaskAborted(msg_id)
748 self.results[msg_id] = error.TaskAborted(msg_id)
749 elif content['status'] == 'resubmitted':
749 elif content['status'] == 'resubmitted':
750 # TODO: handle resubmission
750 # TODO: handle resubmission
751 pass
751 pass
752 else:
752 else:
753 self.results[msg_id] = self._unwrap_exception(content)
753 self.results[msg_id] = self._unwrap_exception(content)
754
754
755 def _handle_apply_reply(self, msg):
755 def _handle_apply_reply(self, msg):
756 """Save the reply to an apply_request into our results."""
756 """Save the reply to an apply_request into our results."""
757 parent = msg['parent_header']
757 parent = msg['parent_header']
758 msg_id = parent['msg_id']
758 msg_id = parent['msg_id']
759 if msg_id not in self.outstanding:
759 if msg_id not in self.outstanding:
760 if msg_id in self.history:
760 if msg_id in self.history:
761 print("got stale result: %s"%msg_id)
761 print("got stale result: %s"%msg_id)
762 print(self.results[msg_id])
762 print(self.results[msg_id])
763 print(msg)
763 print(msg)
764 else:
764 else:
765 print("got unknown result: %s"%msg_id)
765 print("got unknown result: %s"%msg_id)
766 else:
766 else:
767 self.outstanding.remove(msg_id)
767 self.outstanding.remove(msg_id)
768 content = msg['content']
768 content = msg['content']
769 header = msg['header']
769 header = msg['header']
770
770
771 # construct metadata:
771 # construct metadata:
772 md = self.metadata[msg_id]
772 md = self.metadata[msg_id]
773 md.update(self._extract_metadata(msg))
773 md.update(self._extract_metadata(msg))
774 # is this redundant?
774 # is this redundant?
775 self.metadata[msg_id] = md
775 self.metadata[msg_id] = md
776
776
777 e_outstanding = self._outstanding_dict[md['engine_uuid']]
777 e_outstanding = self._outstanding_dict[md['engine_uuid']]
778 if msg_id in e_outstanding:
778 if msg_id in e_outstanding:
779 e_outstanding.remove(msg_id)
779 e_outstanding.remove(msg_id)
780
780
781 # construct result:
781 # construct result:
782 if content['status'] == 'ok':
782 if content['status'] == 'ok':
783 self.results[msg_id] = serialize.unserialize_object(msg['buffers'])[0]
783 self.results[msg_id] = serialize.unserialize_object(msg['buffers'])[0]
784 elif content['status'] == 'aborted':
784 elif content['status'] == 'aborted':
785 self.results[msg_id] = error.TaskAborted(msg_id)
785 self.results[msg_id] = error.TaskAborted(msg_id)
786 elif content['status'] == 'resubmitted':
786 elif content['status'] == 'resubmitted':
787 # TODO: handle resubmission
787 # TODO: handle resubmission
788 pass
788 pass
789 else:
789 else:
790 self.results[msg_id] = self._unwrap_exception(content)
790 self.results[msg_id] = self._unwrap_exception(content)
791
791
792 def _flush_notifications(self):
792 def _flush_notifications(self):
793 """Flush notifications of engine registrations waiting
793 """Flush notifications of engine registrations waiting
794 in ZMQ queue."""
794 in ZMQ queue."""
795 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
795 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
796 while msg is not None:
796 while msg is not None:
797 if self.debug:
797 if self.debug:
798 pprint(msg)
798 pprint(msg)
799 msg_type = msg['header']['msg_type']
799 msg_type = msg['header']['msg_type']
800 handler = self._notification_handlers.get(msg_type, None)
800 handler = self._notification_handlers.get(msg_type, None)
801 if handler is None:
801 if handler is None:
802 raise Exception("Unhandled message type: %s" % msg_type)
802 raise Exception("Unhandled message type: %s" % msg_type)
803 else:
803 else:
804 handler(msg)
804 handler(msg)
805 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
805 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
806
806
807 def _flush_results(self, sock):
807 def _flush_results(self, sock):
808 """Flush task or queue results waiting in ZMQ queue."""
808 """Flush task or queue results waiting in ZMQ queue."""
809 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
809 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
810 while msg is not None:
810 while msg is not None:
811 if self.debug:
811 if self.debug:
812 pprint(msg)
812 pprint(msg)
813 msg_type = msg['header']['msg_type']
813 msg_type = msg['header']['msg_type']
814 handler = self._queue_handlers.get(msg_type, None)
814 handler = self._queue_handlers.get(msg_type, None)
815 if handler is None:
815 if handler is None:
816 raise Exception("Unhandled message type: %s" % msg_type)
816 raise Exception("Unhandled message type: %s" % msg_type)
817 else:
817 else:
818 handler(msg)
818 handler(msg)
819 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
819 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
820
820
821 def _flush_control(self, sock):
821 def _flush_control(self, sock):
822 """Flush replies from the control channel waiting
822 """Flush replies from the control channel waiting
823 in the ZMQ queue.
823 in the ZMQ queue.
824
824
825 Currently: ignore them."""
825 Currently: ignore them."""
826 if self._ignored_control_replies <= 0:
826 if self._ignored_control_replies <= 0:
827 return
827 return
828 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
828 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
829 while msg is not None:
829 while msg is not None:
830 self._ignored_control_replies -= 1
830 self._ignored_control_replies -= 1
831 if self.debug:
831 if self.debug:
832 pprint(msg)
832 pprint(msg)
833 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
833 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
834
834
835 def _flush_ignored_control(self):
835 def _flush_ignored_control(self):
836 """flush ignored control replies"""
836 """flush ignored control replies"""
837 while self._ignored_control_replies > 0:
837 while self._ignored_control_replies > 0:
838 self.session.recv(self._control_socket)
838 self.session.recv(self._control_socket)
839 self._ignored_control_replies -= 1
839 self._ignored_control_replies -= 1
840
840
841 def _flush_ignored_hub_replies(self):
841 def _flush_ignored_hub_replies(self):
842 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
842 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
843 while msg is not None:
843 while msg is not None:
844 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
844 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
845
845
846 def _flush_iopub(self, sock):
846 def _flush_iopub(self, sock):
847 """Flush replies from the iopub channel waiting
847 """Flush replies from the iopub channel waiting
848 in the ZMQ queue.
848 in the ZMQ queue.
849 """
849 """
850 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
850 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
851 while msg is not None:
851 while msg is not None:
852 if self.debug:
852 if self.debug:
853 pprint(msg)
853 pprint(msg)
854 parent = msg['parent_header']
854 parent = msg['parent_header']
855 # ignore IOPub messages with no parent.
855 # ignore IOPub messages with no parent.
856 # Caused by print statements or warnings from before the first execution.
856 # Caused by print statements or warnings from before the first execution.
857 if not parent:
857 if not parent:
858 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
858 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
859 continue
859 continue
860 msg_id = parent['msg_id']
860 msg_id = parent['msg_id']
861 content = msg['content']
861 content = msg['content']
862 header = msg['header']
862 header = msg['header']
863 msg_type = msg['header']['msg_type']
863 msg_type = msg['header']['msg_type']
864
864
865 # init metadata:
865 # init metadata:
866 md = self.metadata[msg_id]
866 md = self.metadata[msg_id]
867
867
868 if msg_type == 'stream':
868 if msg_type == 'stream':
869 name = content['name']
869 name = content['name']
870 s = md[name] or ''
870 s = md[name] or ''
871 md[name] = s + content['data']
871 md[name] = s + content['data']
872 elif msg_type == 'pyerr':
872 elif msg_type == 'error':
873 md.update({'pyerr' : self._unwrap_exception(content)})
873 md.update({'error' : self._unwrap_exception(content)})
874 elif msg_type == 'execute_input':
874 elif msg_type == 'execute_input':
875 md.update({'execute_input' : content['code']})
875 md.update({'execute_input' : content['code']})
876 elif msg_type == 'display_data':
876 elif msg_type == 'display_data':
877 md['outputs'].append(content)
877 md['outputs'].append(content)
878 elif msg_type == 'execute_result':
878 elif msg_type == 'execute_result':
879 md['execute_result'] = content
879 md['execute_result'] = content
880 elif msg_type == 'data_message':
880 elif msg_type == 'data_message':
881 data, remainder = serialize.unserialize_object(msg['buffers'])
881 data, remainder = serialize.unserialize_object(msg['buffers'])
882 md['data'].update(data)
882 md['data'].update(data)
883 elif msg_type == 'status':
883 elif msg_type == 'status':
884 # idle message comes after all outputs
884 # idle message comes after all outputs
885 if content['execution_state'] == 'idle':
885 if content['execution_state'] == 'idle':
886 md['outputs_ready'] = True
886 md['outputs_ready'] = True
887 else:
887 else:
888 # unhandled msg_type (status, etc.)
888 # unhandled msg_type (status, etc.)
889 pass
889 pass
890
890
891 # reduntant?
891 # reduntant?
892 self.metadata[msg_id] = md
892 self.metadata[msg_id] = md
893
893
894 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
894 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
895
895
896 #--------------------------------------------------------------------------
896 #--------------------------------------------------------------------------
897 # len, getitem
897 # len, getitem
898 #--------------------------------------------------------------------------
898 #--------------------------------------------------------------------------
899
899
900 def __len__(self):
900 def __len__(self):
901 """len(client) returns # of engines."""
901 """len(client) returns # of engines."""
902 return len(self.ids)
902 return len(self.ids)
903
903
904 def __getitem__(self, key):
904 def __getitem__(self, key):
905 """index access returns DirectView multiplexer objects
905 """index access returns DirectView multiplexer objects
906
906
907 Must be int, slice, or list/tuple/xrange of ints"""
907 Must be int, slice, or list/tuple/xrange of ints"""
908 if not isinstance(key, (int, slice, tuple, list, xrange)):
908 if not isinstance(key, (int, slice, tuple, list, xrange)):
909 raise TypeError("key by int/slice/iterable of ints only, not %s"%(type(key)))
909 raise TypeError("key by int/slice/iterable of ints only, not %s"%(type(key)))
910 else:
910 else:
911 return self.direct_view(key)
911 return self.direct_view(key)
912
912
913 def __iter__(self):
913 def __iter__(self):
914 """Since we define getitem, Client is iterable
914 """Since we define getitem, Client is iterable
915
915
916 but unless we also define __iter__, it won't work correctly unless engine IDs
916 but unless we also define __iter__, it won't work correctly unless engine IDs
917 start at zero and are continuous.
917 start at zero and are continuous.
918 """
918 """
919 for eid in self.ids:
919 for eid in self.ids:
920 yield self.direct_view(eid)
920 yield self.direct_view(eid)
921
921
922 #--------------------------------------------------------------------------
922 #--------------------------------------------------------------------------
923 # Begin public methods
923 # Begin public methods
924 #--------------------------------------------------------------------------
924 #--------------------------------------------------------------------------
925
925
926 @property
926 @property
927 def ids(self):
927 def ids(self):
928 """Always up-to-date ids property."""
928 """Always up-to-date ids property."""
929 self._flush_notifications()
929 self._flush_notifications()
930 # always copy:
930 # always copy:
931 return list(self._ids)
931 return list(self._ids)
932
932
933 def activate(self, targets='all', suffix=''):
933 def activate(self, targets='all', suffix=''):
934 """Create a DirectView and register it with IPython magics
934 """Create a DirectView and register it with IPython magics
935
935
936 Defines the magics `%px, %autopx, %pxresult, %%px`
936 Defines the magics `%px, %autopx, %pxresult, %%px`
937
937
938 Parameters
938 Parameters
939 ----------
939 ----------
940
940
941 targets: int, list of ints, or 'all'
941 targets: int, list of ints, or 'all'
942 The engines on which the view's magics will run
942 The engines on which the view's magics will run
943 suffix: str [default: '']
943 suffix: str [default: '']
944 The suffix, if any, for the magics. This allows you to have
944 The suffix, if any, for the magics. This allows you to have
945 multiple views associated with parallel magics at the same time.
945 multiple views associated with parallel magics at the same time.
946
946
947 e.g. ``rc.activate(targets=0, suffix='0')`` will give you
947 e.g. ``rc.activate(targets=0, suffix='0')`` will give you
948 the magics ``%px0``, ``%pxresult0``, etc. for running magics just
948 the magics ``%px0``, ``%pxresult0``, etc. for running magics just
949 on engine 0.
949 on engine 0.
950 """
950 """
951 view = self.direct_view(targets)
951 view = self.direct_view(targets)
952 view.block = True
952 view.block = True
953 view.activate(suffix)
953 view.activate(suffix)
954 return view
954 return view
955
955
956 def close(self, linger=None):
956 def close(self, linger=None):
957 """Close my zmq Sockets
957 """Close my zmq Sockets
958
958
959 If `linger`, set the zmq LINGER socket option,
959 If `linger`, set the zmq LINGER socket option,
960 which allows discarding of messages.
960 which allows discarding of messages.
961 """
961 """
962 if self._closed:
962 if self._closed:
963 return
963 return
964 self.stop_spin_thread()
964 self.stop_spin_thread()
965 snames = [ trait for trait in self.trait_names() if trait.endswith("socket") ]
965 snames = [ trait for trait in self.trait_names() if trait.endswith("socket") ]
966 for name in snames:
966 for name in snames:
967 socket = getattr(self, name)
967 socket = getattr(self, name)
968 if socket is not None and not socket.closed:
968 if socket is not None and not socket.closed:
969 if linger is not None:
969 if linger is not None:
970 socket.close(linger=linger)
970 socket.close(linger=linger)
971 else:
971 else:
972 socket.close()
972 socket.close()
973 self._closed = True
973 self._closed = True
974
974
975 def _spin_every(self, interval=1):
975 def _spin_every(self, interval=1):
976 """target func for use in spin_thread"""
976 """target func for use in spin_thread"""
977 while True:
977 while True:
978 if self._stop_spinning.is_set():
978 if self._stop_spinning.is_set():
979 return
979 return
980 time.sleep(interval)
980 time.sleep(interval)
981 self.spin()
981 self.spin()
982
982
983 def spin_thread(self, interval=1):
983 def spin_thread(self, interval=1):
984 """call Client.spin() in a background thread on some regular interval
984 """call Client.spin() in a background thread on some regular interval
985
985
986 This helps ensure that messages don't pile up too much in the zmq queue
986 This helps ensure that messages don't pile up too much in the zmq queue
987 while you are working on other things, or just leaving an idle terminal.
987 while you are working on other things, or just leaving an idle terminal.
988
988
989 It also helps limit potential padding of the `received` timestamp
989 It also helps limit potential padding of the `received` timestamp
990 on AsyncResult objects, used for timings.
990 on AsyncResult objects, used for timings.
991
991
992 Parameters
992 Parameters
993 ----------
993 ----------
994
994
995 interval : float, optional
995 interval : float, optional
996 The interval on which to spin the client in the background thread
996 The interval on which to spin the client in the background thread
997 (simply passed to time.sleep).
997 (simply passed to time.sleep).
998
998
999 Notes
999 Notes
1000 -----
1000 -----
1001
1001
1002 For precision timing, you may want to use this method to put a bound
1002 For precision timing, you may want to use this method to put a bound
1003 on the jitter (in seconds) in `received` timestamps used
1003 on the jitter (in seconds) in `received` timestamps used
1004 in AsyncResult.wall_time.
1004 in AsyncResult.wall_time.
1005
1005
1006 """
1006 """
1007 if self._spin_thread is not None:
1007 if self._spin_thread is not None:
1008 self.stop_spin_thread()
1008 self.stop_spin_thread()
1009 self._stop_spinning.clear()
1009 self._stop_spinning.clear()
1010 self._spin_thread = Thread(target=self._spin_every, args=(interval,))
1010 self._spin_thread = Thread(target=self._spin_every, args=(interval,))
1011 self._spin_thread.daemon = True
1011 self._spin_thread.daemon = True
1012 self._spin_thread.start()
1012 self._spin_thread.start()
1013
1013
1014 def stop_spin_thread(self):
1014 def stop_spin_thread(self):
1015 """stop background spin_thread, if any"""
1015 """stop background spin_thread, if any"""
1016 if self._spin_thread is not None:
1016 if self._spin_thread is not None:
1017 self._stop_spinning.set()
1017 self._stop_spinning.set()
1018 self._spin_thread.join()
1018 self._spin_thread.join()
1019 self._spin_thread = None
1019 self._spin_thread = None
1020
1020
1021 def spin(self):
1021 def spin(self):
1022 """Flush any registration notifications and execution results
1022 """Flush any registration notifications and execution results
1023 waiting in the ZMQ queue.
1023 waiting in the ZMQ queue.
1024 """
1024 """
1025 if self._notification_socket:
1025 if self._notification_socket:
1026 self._flush_notifications()
1026 self._flush_notifications()
1027 if self._iopub_socket:
1027 if self._iopub_socket:
1028 self._flush_iopub(self._iopub_socket)
1028 self._flush_iopub(self._iopub_socket)
1029 if self._mux_socket:
1029 if self._mux_socket:
1030 self._flush_results(self._mux_socket)
1030 self._flush_results(self._mux_socket)
1031 if self._task_socket:
1031 if self._task_socket:
1032 self._flush_results(self._task_socket)
1032 self._flush_results(self._task_socket)
1033 if self._control_socket:
1033 if self._control_socket:
1034 self._flush_control(self._control_socket)
1034 self._flush_control(self._control_socket)
1035 if self._query_socket:
1035 if self._query_socket:
1036 self._flush_ignored_hub_replies()
1036 self._flush_ignored_hub_replies()
1037
1037
1038 def wait(self, jobs=None, timeout=-1):
1038 def wait(self, jobs=None, timeout=-1):
1039 """waits on one or more `jobs`, for up to `timeout` seconds.
1039 """waits on one or more `jobs`, for up to `timeout` seconds.
1040
1040
1041 Parameters
1041 Parameters
1042 ----------
1042 ----------
1043
1043
1044 jobs : int, str, or list of ints and/or strs, or one or more AsyncResult objects
1044 jobs : int, str, or list of ints and/or strs, or one or more AsyncResult objects
1045 ints are indices to self.history
1045 ints are indices to self.history
1046 strs are msg_ids
1046 strs are msg_ids
1047 default: wait on all outstanding messages
1047 default: wait on all outstanding messages
1048 timeout : float
1048 timeout : float
1049 a time in seconds, after which to give up.
1049 a time in seconds, after which to give up.
1050 default is -1, which means no timeout
1050 default is -1, which means no timeout
1051
1051
1052 Returns
1052 Returns
1053 -------
1053 -------
1054
1054
1055 True : when all msg_ids are done
1055 True : when all msg_ids are done
1056 False : timeout reached, some msg_ids still outstanding
1056 False : timeout reached, some msg_ids still outstanding
1057 """
1057 """
1058 tic = time.time()
1058 tic = time.time()
1059 if jobs is None:
1059 if jobs is None:
1060 theids = self.outstanding
1060 theids = self.outstanding
1061 else:
1061 else:
1062 if isinstance(jobs, string_types + (int, AsyncResult)):
1062 if isinstance(jobs, string_types + (int, AsyncResult)):
1063 jobs = [jobs]
1063 jobs = [jobs]
1064 theids = set()
1064 theids = set()
1065 for job in jobs:
1065 for job in jobs:
1066 if isinstance(job, int):
1066 if isinstance(job, int):
1067 # index access
1067 # index access
1068 job = self.history[job]
1068 job = self.history[job]
1069 elif isinstance(job, AsyncResult):
1069 elif isinstance(job, AsyncResult):
1070 theids.update(job.msg_ids)
1070 theids.update(job.msg_ids)
1071 continue
1071 continue
1072 theids.add(job)
1072 theids.add(job)
1073 if not theids.intersection(self.outstanding):
1073 if not theids.intersection(self.outstanding):
1074 return True
1074 return True
1075 self.spin()
1075 self.spin()
1076 while theids.intersection(self.outstanding):
1076 while theids.intersection(self.outstanding):
1077 if timeout >= 0 and ( time.time()-tic ) > timeout:
1077 if timeout >= 0 and ( time.time()-tic ) > timeout:
1078 break
1078 break
1079 time.sleep(1e-3)
1079 time.sleep(1e-3)
1080 self.spin()
1080 self.spin()
1081 return len(theids.intersection(self.outstanding)) == 0
1081 return len(theids.intersection(self.outstanding)) == 0
1082
1082
1083 #--------------------------------------------------------------------------
1083 #--------------------------------------------------------------------------
1084 # Control methods
1084 # Control methods
1085 #--------------------------------------------------------------------------
1085 #--------------------------------------------------------------------------
1086
1086
1087 @spin_first
1087 @spin_first
1088 def clear(self, targets=None, block=None):
1088 def clear(self, targets=None, block=None):
1089 """Clear the namespace in target(s)."""
1089 """Clear the namespace in target(s)."""
1090 block = self.block if block is None else block
1090 block = self.block if block is None else block
1091 targets = self._build_targets(targets)[0]
1091 targets = self._build_targets(targets)[0]
1092 for t in targets:
1092 for t in targets:
1093 self.session.send(self._control_socket, 'clear_request', content={}, ident=t)
1093 self.session.send(self._control_socket, 'clear_request', content={}, ident=t)
1094 error = False
1094 error = False
1095 if block:
1095 if block:
1096 self._flush_ignored_control()
1096 self._flush_ignored_control()
1097 for i in range(len(targets)):
1097 for i in range(len(targets)):
1098 idents,msg = self.session.recv(self._control_socket,0)
1098 idents,msg = self.session.recv(self._control_socket,0)
1099 if self.debug:
1099 if self.debug:
1100 pprint(msg)
1100 pprint(msg)
1101 if msg['content']['status'] != 'ok':
1101 if msg['content']['status'] != 'ok':
1102 error = self._unwrap_exception(msg['content'])
1102 error = self._unwrap_exception(msg['content'])
1103 else:
1103 else:
1104 self._ignored_control_replies += len(targets)
1104 self._ignored_control_replies += len(targets)
1105 if error:
1105 if error:
1106 raise error
1106 raise error
1107
1107
1108
1108
1109 @spin_first
1109 @spin_first
1110 def abort(self, jobs=None, targets=None, block=None):
1110 def abort(self, jobs=None, targets=None, block=None):
1111 """Abort specific jobs from the execution queues of target(s).
1111 """Abort specific jobs from the execution queues of target(s).
1112
1112
1113 This is a mechanism to prevent jobs that have already been submitted
1113 This is a mechanism to prevent jobs that have already been submitted
1114 from executing.
1114 from executing.
1115
1115
1116 Parameters
1116 Parameters
1117 ----------
1117 ----------
1118
1118
1119 jobs : msg_id, list of msg_ids, or AsyncResult
1119 jobs : msg_id, list of msg_ids, or AsyncResult
1120 The jobs to be aborted
1120 The jobs to be aborted
1121
1121
1122 If unspecified/None: abort all outstanding jobs.
1122 If unspecified/None: abort all outstanding jobs.
1123
1123
1124 """
1124 """
1125 block = self.block if block is None else block
1125 block = self.block if block is None else block
1126 jobs = jobs if jobs is not None else list(self.outstanding)
1126 jobs = jobs if jobs is not None else list(self.outstanding)
1127 targets = self._build_targets(targets)[0]
1127 targets = self._build_targets(targets)[0]
1128
1128
1129 msg_ids = []
1129 msg_ids = []
1130 if isinstance(jobs, string_types + (AsyncResult,)):
1130 if isinstance(jobs, string_types + (AsyncResult,)):
1131 jobs = [jobs]
1131 jobs = [jobs]
1132 bad_ids = [obj for obj in jobs if not isinstance(obj, string_types + (AsyncResult,))]
1132 bad_ids = [obj for obj in jobs if not isinstance(obj, string_types + (AsyncResult,))]
1133 if bad_ids:
1133 if bad_ids:
1134 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1134 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1135 for j in jobs:
1135 for j in jobs:
1136 if isinstance(j, AsyncResult):
1136 if isinstance(j, AsyncResult):
1137 msg_ids.extend(j.msg_ids)
1137 msg_ids.extend(j.msg_ids)
1138 else:
1138 else:
1139 msg_ids.append(j)
1139 msg_ids.append(j)
1140 content = dict(msg_ids=msg_ids)
1140 content = dict(msg_ids=msg_ids)
1141 for t in targets:
1141 for t in targets:
1142 self.session.send(self._control_socket, 'abort_request',
1142 self.session.send(self._control_socket, 'abort_request',
1143 content=content, ident=t)
1143 content=content, ident=t)
1144 error = False
1144 error = False
1145 if block:
1145 if block:
1146 self._flush_ignored_control()
1146 self._flush_ignored_control()
1147 for i in range(len(targets)):
1147 for i in range(len(targets)):
1148 idents,msg = self.session.recv(self._control_socket,0)
1148 idents,msg = self.session.recv(self._control_socket,0)
1149 if self.debug:
1149 if self.debug:
1150 pprint(msg)
1150 pprint(msg)
1151 if msg['content']['status'] != 'ok':
1151 if msg['content']['status'] != 'ok':
1152 error = self._unwrap_exception(msg['content'])
1152 error = self._unwrap_exception(msg['content'])
1153 else:
1153 else:
1154 self._ignored_control_replies += len(targets)
1154 self._ignored_control_replies += len(targets)
1155 if error:
1155 if error:
1156 raise error
1156 raise error
1157
1157
1158 @spin_first
1158 @spin_first
1159 def shutdown(self, targets='all', restart=False, hub=False, block=None):
1159 def shutdown(self, targets='all', restart=False, hub=False, block=None):
1160 """Terminates one or more engine processes, optionally including the hub.
1160 """Terminates one or more engine processes, optionally including the hub.
1161
1161
1162 Parameters
1162 Parameters
1163 ----------
1163 ----------
1164
1164
1165 targets: list of ints or 'all' [default: all]
1165 targets: list of ints or 'all' [default: all]
1166 Which engines to shutdown.
1166 Which engines to shutdown.
1167 hub: bool [default: False]
1167 hub: bool [default: False]
1168 Whether to include the Hub. hub=True implies targets='all'.
1168 Whether to include the Hub. hub=True implies targets='all'.
1169 block: bool [default: self.block]
1169 block: bool [default: self.block]
1170 Whether to wait for clean shutdown replies or not.
1170 Whether to wait for clean shutdown replies or not.
1171 restart: bool [default: False]
1171 restart: bool [default: False]
1172 NOT IMPLEMENTED
1172 NOT IMPLEMENTED
1173 whether to restart engines after shutting them down.
1173 whether to restart engines after shutting them down.
1174 """
1174 """
1175 from IPython.parallel.error import NoEnginesRegistered
1175 from IPython.parallel.error import NoEnginesRegistered
1176 if restart:
1176 if restart:
1177 raise NotImplementedError("Engine restart is not yet implemented")
1177 raise NotImplementedError("Engine restart is not yet implemented")
1178
1178
1179 block = self.block if block is None else block
1179 block = self.block if block is None else block
1180 if hub:
1180 if hub:
1181 targets = 'all'
1181 targets = 'all'
1182 try:
1182 try:
1183 targets = self._build_targets(targets)[0]
1183 targets = self._build_targets(targets)[0]
1184 except NoEnginesRegistered:
1184 except NoEnginesRegistered:
1185 targets = []
1185 targets = []
1186 for t in targets:
1186 for t in targets:
1187 self.session.send(self._control_socket, 'shutdown_request',
1187 self.session.send(self._control_socket, 'shutdown_request',
1188 content={'restart':restart},ident=t)
1188 content={'restart':restart},ident=t)
1189 error = False
1189 error = False
1190 if block or hub:
1190 if block or hub:
1191 self._flush_ignored_control()
1191 self._flush_ignored_control()
1192 for i in range(len(targets)):
1192 for i in range(len(targets)):
1193 idents,msg = self.session.recv(self._control_socket, 0)
1193 idents,msg = self.session.recv(self._control_socket, 0)
1194 if self.debug:
1194 if self.debug:
1195 pprint(msg)
1195 pprint(msg)
1196 if msg['content']['status'] != 'ok':
1196 if msg['content']['status'] != 'ok':
1197 error = self._unwrap_exception(msg['content'])
1197 error = self._unwrap_exception(msg['content'])
1198 else:
1198 else:
1199 self._ignored_control_replies += len(targets)
1199 self._ignored_control_replies += len(targets)
1200
1200
1201 if hub:
1201 if hub:
1202 time.sleep(0.25)
1202 time.sleep(0.25)
1203 self.session.send(self._query_socket, 'shutdown_request')
1203 self.session.send(self._query_socket, 'shutdown_request')
1204 idents,msg = self.session.recv(self._query_socket, 0)
1204 idents,msg = self.session.recv(self._query_socket, 0)
1205 if self.debug:
1205 if self.debug:
1206 pprint(msg)
1206 pprint(msg)
1207 if msg['content']['status'] != 'ok':
1207 if msg['content']['status'] != 'ok':
1208 error = self._unwrap_exception(msg['content'])
1208 error = self._unwrap_exception(msg['content'])
1209
1209
1210 if error:
1210 if error:
1211 raise error
1211 raise error
1212
1212
1213 #--------------------------------------------------------------------------
1213 #--------------------------------------------------------------------------
1214 # Execution related methods
1214 # Execution related methods
1215 #--------------------------------------------------------------------------
1215 #--------------------------------------------------------------------------
1216
1216
1217 def _maybe_raise(self, result):
1217 def _maybe_raise(self, result):
1218 """wrapper for maybe raising an exception if apply failed."""
1218 """wrapper for maybe raising an exception if apply failed."""
1219 if isinstance(result, error.RemoteError):
1219 if isinstance(result, error.RemoteError):
1220 raise result
1220 raise result
1221
1221
1222 return result
1222 return result
1223
1223
1224 def send_apply_request(self, socket, f, args=None, kwargs=None, metadata=None, track=False,
1224 def send_apply_request(self, socket, f, args=None, kwargs=None, metadata=None, track=False,
1225 ident=None):
1225 ident=None):
1226 """construct and send an apply message via a socket.
1226 """construct and send an apply message via a socket.
1227
1227
1228 This is the principal method with which all engine execution is performed by views.
1228 This is the principal method with which all engine execution is performed by views.
1229 """
1229 """
1230
1230
1231 if self._closed:
1231 if self._closed:
1232 raise RuntimeError("Client cannot be used after its sockets have been closed")
1232 raise RuntimeError("Client cannot be used after its sockets have been closed")
1233
1233
1234 # defaults:
1234 # defaults:
1235 args = args if args is not None else []
1235 args = args if args is not None else []
1236 kwargs = kwargs if kwargs is not None else {}
1236 kwargs = kwargs if kwargs is not None else {}
1237 metadata = metadata if metadata is not None else {}
1237 metadata = metadata if metadata is not None else {}
1238
1238
1239 # validate arguments
1239 # validate arguments
1240 if not callable(f) and not isinstance(f, Reference):
1240 if not callable(f) and not isinstance(f, Reference):
1241 raise TypeError("f must be callable, not %s"%type(f))
1241 raise TypeError("f must be callable, not %s"%type(f))
1242 if not isinstance(args, (tuple, list)):
1242 if not isinstance(args, (tuple, list)):
1243 raise TypeError("args must be tuple or list, not %s"%type(args))
1243 raise TypeError("args must be tuple or list, not %s"%type(args))
1244 if not isinstance(kwargs, dict):
1244 if not isinstance(kwargs, dict):
1245 raise TypeError("kwargs must be dict, not %s"%type(kwargs))
1245 raise TypeError("kwargs must be dict, not %s"%type(kwargs))
1246 if not isinstance(metadata, dict):
1246 if not isinstance(metadata, dict):
1247 raise TypeError("metadata must be dict, not %s"%type(metadata))
1247 raise TypeError("metadata must be dict, not %s"%type(metadata))
1248
1248
1249 bufs = serialize.pack_apply_message(f, args, kwargs,
1249 bufs = serialize.pack_apply_message(f, args, kwargs,
1250 buffer_threshold=self.session.buffer_threshold,
1250 buffer_threshold=self.session.buffer_threshold,
1251 item_threshold=self.session.item_threshold,
1251 item_threshold=self.session.item_threshold,
1252 )
1252 )
1253
1253
1254 msg = self.session.send(socket, "apply_request", buffers=bufs, ident=ident,
1254 msg = self.session.send(socket, "apply_request", buffers=bufs, ident=ident,
1255 metadata=metadata, track=track)
1255 metadata=metadata, track=track)
1256
1256
1257 msg_id = msg['header']['msg_id']
1257 msg_id = msg['header']['msg_id']
1258 self.outstanding.add(msg_id)
1258 self.outstanding.add(msg_id)
1259 if ident:
1259 if ident:
1260 # possibly routed to a specific engine
1260 # possibly routed to a specific engine
1261 if isinstance(ident, list):
1261 if isinstance(ident, list):
1262 ident = ident[-1]
1262 ident = ident[-1]
1263 if ident in self._engines.values():
1263 if ident in self._engines.values():
1264 # save for later, in case of engine death
1264 # save for later, in case of engine death
1265 self._outstanding_dict[ident].add(msg_id)
1265 self._outstanding_dict[ident].add(msg_id)
1266 self.history.append(msg_id)
1266 self.history.append(msg_id)
1267 self.metadata[msg_id]['submitted'] = datetime.now()
1267 self.metadata[msg_id]['submitted'] = datetime.now()
1268
1268
1269 return msg
1269 return msg
1270
1270
1271 def send_execute_request(self, socket, code, silent=True, metadata=None, ident=None):
1271 def send_execute_request(self, socket, code, silent=True, metadata=None, ident=None):
1272 """construct and send an execute request via a socket.
1272 """construct and send an execute request via a socket.
1273
1273
1274 """
1274 """
1275
1275
1276 if self._closed:
1276 if self._closed:
1277 raise RuntimeError("Client cannot be used after its sockets have been closed")
1277 raise RuntimeError("Client cannot be used after its sockets have been closed")
1278
1278
1279 # defaults:
1279 # defaults:
1280 metadata = metadata if metadata is not None else {}
1280 metadata = metadata if metadata is not None else {}
1281
1281
1282 # validate arguments
1282 # validate arguments
1283 if not isinstance(code, string_types):
1283 if not isinstance(code, string_types):
1284 raise TypeError("code must be text, not %s" % type(code))
1284 raise TypeError("code must be text, not %s" % type(code))
1285 if not isinstance(metadata, dict):
1285 if not isinstance(metadata, dict):
1286 raise TypeError("metadata must be dict, not %s" % type(metadata))
1286 raise TypeError("metadata must be dict, not %s" % type(metadata))
1287
1287
1288 content = dict(code=code, silent=bool(silent), user_variables=[], user_expressions={})
1288 content = dict(code=code, silent=bool(silent), user_variables=[], user_expressions={})
1289
1289
1290
1290
1291 msg = self.session.send(socket, "execute_request", content=content, ident=ident,
1291 msg = self.session.send(socket, "execute_request", content=content, ident=ident,
1292 metadata=metadata)
1292 metadata=metadata)
1293
1293
1294 msg_id = msg['header']['msg_id']
1294 msg_id = msg['header']['msg_id']
1295 self.outstanding.add(msg_id)
1295 self.outstanding.add(msg_id)
1296 if ident:
1296 if ident:
1297 # possibly routed to a specific engine
1297 # possibly routed to a specific engine
1298 if isinstance(ident, list):
1298 if isinstance(ident, list):
1299 ident = ident[-1]
1299 ident = ident[-1]
1300 if ident in self._engines.values():
1300 if ident in self._engines.values():
1301 # save for later, in case of engine death
1301 # save for later, in case of engine death
1302 self._outstanding_dict[ident].add(msg_id)
1302 self._outstanding_dict[ident].add(msg_id)
1303 self.history.append(msg_id)
1303 self.history.append(msg_id)
1304 self.metadata[msg_id]['submitted'] = datetime.now()
1304 self.metadata[msg_id]['submitted'] = datetime.now()
1305
1305
1306 return msg
1306 return msg
1307
1307
1308 #--------------------------------------------------------------------------
1308 #--------------------------------------------------------------------------
1309 # construct a View object
1309 # construct a View object
1310 #--------------------------------------------------------------------------
1310 #--------------------------------------------------------------------------
1311
1311
1312 def load_balanced_view(self, targets=None):
1312 def load_balanced_view(self, targets=None):
1313 """construct a DirectView object.
1313 """construct a DirectView object.
1314
1314
1315 If no arguments are specified, create a LoadBalancedView
1315 If no arguments are specified, create a LoadBalancedView
1316 using all engines.
1316 using all engines.
1317
1317
1318 Parameters
1318 Parameters
1319 ----------
1319 ----------
1320
1320
1321 targets: list,slice,int,etc. [default: use all engines]
1321 targets: list,slice,int,etc. [default: use all engines]
1322 The subset of engines across which to load-balance
1322 The subset of engines across which to load-balance
1323 """
1323 """
1324 if targets == 'all':
1324 if targets == 'all':
1325 targets = None
1325 targets = None
1326 if targets is not None:
1326 if targets is not None:
1327 targets = self._build_targets(targets)[1]
1327 targets = self._build_targets(targets)[1]
1328 return LoadBalancedView(client=self, socket=self._task_socket, targets=targets)
1328 return LoadBalancedView(client=self, socket=self._task_socket, targets=targets)
1329
1329
1330 def direct_view(self, targets='all'):
1330 def direct_view(self, targets='all'):
1331 """construct a DirectView object.
1331 """construct a DirectView object.
1332
1332
1333 If no targets are specified, create a DirectView using all engines.
1333 If no targets are specified, create a DirectView using all engines.
1334
1334
1335 rc.direct_view('all') is distinguished from rc[:] in that 'all' will
1335 rc.direct_view('all') is distinguished from rc[:] in that 'all' will
1336 evaluate the target engines at each execution, whereas rc[:] will connect to
1336 evaluate the target engines at each execution, whereas rc[:] will connect to
1337 all *current* engines, and that list will not change.
1337 all *current* engines, and that list will not change.
1338
1338
1339 That is, 'all' will always use all engines, whereas rc[:] will not use
1339 That is, 'all' will always use all engines, whereas rc[:] will not use
1340 engines added after the DirectView is constructed.
1340 engines added after the DirectView is constructed.
1341
1341
1342 Parameters
1342 Parameters
1343 ----------
1343 ----------
1344
1344
1345 targets: list,slice,int,etc. [default: use all engines]
1345 targets: list,slice,int,etc. [default: use all engines]
1346 The engines to use for the View
1346 The engines to use for the View
1347 """
1347 """
1348 single = isinstance(targets, int)
1348 single = isinstance(targets, int)
1349 # allow 'all' to be lazily evaluated at each execution
1349 # allow 'all' to be lazily evaluated at each execution
1350 if targets != 'all':
1350 if targets != 'all':
1351 targets = self._build_targets(targets)[1]
1351 targets = self._build_targets(targets)[1]
1352 if single:
1352 if single:
1353 targets = targets[0]
1353 targets = targets[0]
1354 return DirectView(client=self, socket=self._mux_socket, targets=targets)
1354 return DirectView(client=self, socket=self._mux_socket, targets=targets)
1355
1355
1356 #--------------------------------------------------------------------------
1356 #--------------------------------------------------------------------------
1357 # Query methods
1357 # Query methods
1358 #--------------------------------------------------------------------------
1358 #--------------------------------------------------------------------------
1359
1359
1360 @spin_first
1360 @spin_first
1361 def get_result(self, indices_or_msg_ids=None, block=None):
1361 def get_result(self, indices_or_msg_ids=None, block=None):
1362 """Retrieve a result by msg_id or history index, wrapped in an AsyncResult object.
1362 """Retrieve a result by msg_id or history index, wrapped in an AsyncResult object.
1363
1363
1364 If the client already has the results, no request to the Hub will be made.
1364 If the client already has the results, no request to the Hub will be made.
1365
1365
1366 This is a convenient way to construct AsyncResult objects, which are wrappers
1366 This is a convenient way to construct AsyncResult objects, which are wrappers
1367 that include metadata about execution, and allow for awaiting results that
1367 that include metadata about execution, and allow for awaiting results that
1368 were not submitted by this Client.
1368 were not submitted by this Client.
1369
1369
1370 It can also be a convenient way to retrieve the metadata associated with
1370 It can also be a convenient way to retrieve the metadata associated with
1371 blocking execution, since it always retrieves
1371 blocking execution, since it always retrieves
1372
1372
1373 Examples
1373 Examples
1374 --------
1374 --------
1375 ::
1375 ::
1376
1376
1377 In [10]: r = client.apply()
1377 In [10]: r = client.apply()
1378
1378
1379 Parameters
1379 Parameters
1380 ----------
1380 ----------
1381
1381
1382 indices_or_msg_ids : integer history index, str msg_id, or list of either
1382 indices_or_msg_ids : integer history index, str msg_id, or list of either
1383 The indices or msg_ids of indices to be retrieved
1383 The indices or msg_ids of indices to be retrieved
1384
1384
1385 block : bool
1385 block : bool
1386 Whether to wait for the result to be done
1386 Whether to wait for the result to be done
1387
1387
1388 Returns
1388 Returns
1389 -------
1389 -------
1390
1390
1391 AsyncResult
1391 AsyncResult
1392 A single AsyncResult object will always be returned.
1392 A single AsyncResult object will always be returned.
1393
1393
1394 AsyncHubResult
1394 AsyncHubResult
1395 A subclass of AsyncResult that retrieves results from the Hub
1395 A subclass of AsyncResult that retrieves results from the Hub
1396
1396
1397 """
1397 """
1398 block = self.block if block is None else block
1398 block = self.block if block is None else block
1399 if indices_or_msg_ids is None:
1399 if indices_or_msg_ids is None:
1400 indices_or_msg_ids = -1
1400 indices_or_msg_ids = -1
1401
1401
1402 single_result = False
1402 single_result = False
1403 if not isinstance(indices_or_msg_ids, (list,tuple)):
1403 if not isinstance(indices_or_msg_ids, (list,tuple)):
1404 indices_or_msg_ids = [indices_or_msg_ids]
1404 indices_or_msg_ids = [indices_or_msg_ids]
1405 single_result = True
1405 single_result = True
1406
1406
1407 theids = []
1407 theids = []
1408 for id in indices_or_msg_ids:
1408 for id in indices_or_msg_ids:
1409 if isinstance(id, int):
1409 if isinstance(id, int):
1410 id = self.history[id]
1410 id = self.history[id]
1411 if not isinstance(id, string_types):
1411 if not isinstance(id, string_types):
1412 raise TypeError("indices must be str or int, not %r"%id)
1412 raise TypeError("indices must be str or int, not %r"%id)
1413 theids.append(id)
1413 theids.append(id)
1414
1414
1415 local_ids = [msg_id for msg_id in theids if (msg_id in self.outstanding or msg_id in self.results)]
1415 local_ids = [msg_id for msg_id in theids if (msg_id in self.outstanding or msg_id in self.results)]
1416 remote_ids = [msg_id for msg_id in theids if msg_id not in local_ids]
1416 remote_ids = [msg_id for msg_id in theids if msg_id not in local_ids]
1417
1417
1418 # given single msg_id initially, get_result shot get the result itself,
1418 # given single msg_id initially, get_result shot get the result itself,
1419 # not a length-one list
1419 # not a length-one list
1420 if single_result:
1420 if single_result:
1421 theids = theids[0]
1421 theids = theids[0]
1422
1422
1423 if remote_ids:
1423 if remote_ids:
1424 ar = AsyncHubResult(self, msg_ids=theids)
1424 ar = AsyncHubResult(self, msg_ids=theids)
1425 else:
1425 else:
1426 ar = AsyncResult(self, msg_ids=theids)
1426 ar = AsyncResult(self, msg_ids=theids)
1427
1427
1428 if block:
1428 if block:
1429 ar.wait()
1429 ar.wait()
1430
1430
1431 return ar
1431 return ar
1432
1432
1433 @spin_first
1433 @spin_first
1434 def resubmit(self, indices_or_msg_ids=None, metadata=None, block=None):
1434 def resubmit(self, indices_or_msg_ids=None, metadata=None, block=None):
1435 """Resubmit one or more tasks.
1435 """Resubmit one or more tasks.
1436
1436
1437 in-flight tasks may not be resubmitted.
1437 in-flight tasks may not be resubmitted.
1438
1438
1439 Parameters
1439 Parameters
1440 ----------
1440 ----------
1441
1441
1442 indices_or_msg_ids : integer history index, str msg_id, or list of either
1442 indices_or_msg_ids : integer history index, str msg_id, or list of either
1443 The indices or msg_ids of indices to be retrieved
1443 The indices or msg_ids of indices to be retrieved
1444
1444
1445 block : bool
1445 block : bool
1446 Whether to wait for the result to be done
1446 Whether to wait for the result to be done
1447
1447
1448 Returns
1448 Returns
1449 -------
1449 -------
1450
1450
1451 AsyncHubResult
1451 AsyncHubResult
1452 A subclass of AsyncResult that retrieves results from the Hub
1452 A subclass of AsyncResult that retrieves results from the Hub
1453
1453
1454 """
1454 """
1455 block = self.block if block is None else block
1455 block = self.block if block is None else block
1456 if indices_or_msg_ids is None:
1456 if indices_or_msg_ids is None:
1457 indices_or_msg_ids = -1
1457 indices_or_msg_ids = -1
1458
1458
1459 if not isinstance(indices_or_msg_ids, (list,tuple)):
1459 if not isinstance(indices_or_msg_ids, (list,tuple)):
1460 indices_or_msg_ids = [indices_or_msg_ids]
1460 indices_or_msg_ids = [indices_or_msg_ids]
1461
1461
1462 theids = []
1462 theids = []
1463 for id in indices_or_msg_ids:
1463 for id in indices_or_msg_ids:
1464 if isinstance(id, int):
1464 if isinstance(id, int):
1465 id = self.history[id]
1465 id = self.history[id]
1466 if not isinstance(id, string_types):
1466 if not isinstance(id, string_types):
1467 raise TypeError("indices must be str or int, not %r"%id)
1467 raise TypeError("indices must be str or int, not %r"%id)
1468 theids.append(id)
1468 theids.append(id)
1469
1469
1470 content = dict(msg_ids = theids)
1470 content = dict(msg_ids = theids)
1471
1471
1472 self.session.send(self._query_socket, 'resubmit_request', content)
1472 self.session.send(self._query_socket, 'resubmit_request', content)
1473
1473
1474 zmq.select([self._query_socket], [], [])
1474 zmq.select([self._query_socket], [], [])
1475 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1475 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1476 if self.debug:
1476 if self.debug:
1477 pprint(msg)
1477 pprint(msg)
1478 content = msg['content']
1478 content = msg['content']
1479 if content['status'] != 'ok':
1479 if content['status'] != 'ok':
1480 raise self._unwrap_exception(content)
1480 raise self._unwrap_exception(content)
1481 mapping = content['resubmitted']
1481 mapping = content['resubmitted']
1482 new_ids = [ mapping[msg_id] for msg_id in theids ]
1482 new_ids = [ mapping[msg_id] for msg_id in theids ]
1483
1483
1484 ar = AsyncHubResult(self, msg_ids=new_ids)
1484 ar = AsyncHubResult(self, msg_ids=new_ids)
1485
1485
1486 if block:
1486 if block:
1487 ar.wait()
1487 ar.wait()
1488
1488
1489 return ar
1489 return ar
1490
1490
1491 @spin_first
1491 @spin_first
1492 def result_status(self, msg_ids, status_only=True):
1492 def result_status(self, msg_ids, status_only=True):
1493 """Check on the status of the result(s) of the apply request with `msg_ids`.
1493 """Check on the status of the result(s) of the apply request with `msg_ids`.
1494
1494
1495 If status_only is False, then the actual results will be retrieved, else
1495 If status_only is False, then the actual results will be retrieved, else
1496 only the status of the results will be checked.
1496 only the status of the results will be checked.
1497
1497
1498 Parameters
1498 Parameters
1499 ----------
1499 ----------
1500
1500
1501 msg_ids : list of msg_ids
1501 msg_ids : list of msg_ids
1502 if int:
1502 if int:
1503 Passed as index to self.history for convenience.
1503 Passed as index to self.history for convenience.
1504 status_only : bool (default: True)
1504 status_only : bool (default: True)
1505 if False:
1505 if False:
1506 Retrieve the actual results of completed tasks.
1506 Retrieve the actual results of completed tasks.
1507
1507
1508 Returns
1508 Returns
1509 -------
1509 -------
1510
1510
1511 results : dict
1511 results : dict
1512 There will always be the keys 'pending' and 'completed', which will
1512 There will always be the keys 'pending' and 'completed', which will
1513 be lists of msg_ids that are incomplete or complete. If `status_only`
1513 be lists of msg_ids that are incomplete or complete. If `status_only`
1514 is False, then completed results will be keyed by their `msg_id`.
1514 is False, then completed results will be keyed by their `msg_id`.
1515 """
1515 """
1516 if not isinstance(msg_ids, (list,tuple)):
1516 if not isinstance(msg_ids, (list,tuple)):
1517 msg_ids = [msg_ids]
1517 msg_ids = [msg_ids]
1518
1518
1519 theids = []
1519 theids = []
1520 for msg_id in msg_ids:
1520 for msg_id in msg_ids:
1521 if isinstance(msg_id, int):
1521 if isinstance(msg_id, int):
1522 msg_id = self.history[msg_id]
1522 msg_id = self.history[msg_id]
1523 if not isinstance(msg_id, string_types):
1523 if not isinstance(msg_id, string_types):
1524 raise TypeError("msg_ids must be str, not %r"%msg_id)
1524 raise TypeError("msg_ids must be str, not %r"%msg_id)
1525 theids.append(msg_id)
1525 theids.append(msg_id)
1526
1526
1527 completed = []
1527 completed = []
1528 local_results = {}
1528 local_results = {}
1529
1529
1530 # comment this block out to temporarily disable local shortcut:
1530 # comment this block out to temporarily disable local shortcut:
1531 for msg_id in theids:
1531 for msg_id in theids:
1532 if msg_id in self.results:
1532 if msg_id in self.results:
1533 completed.append(msg_id)
1533 completed.append(msg_id)
1534 local_results[msg_id] = self.results[msg_id]
1534 local_results[msg_id] = self.results[msg_id]
1535 theids.remove(msg_id)
1535 theids.remove(msg_id)
1536
1536
1537 if theids: # some not locally cached
1537 if theids: # some not locally cached
1538 content = dict(msg_ids=theids, status_only=status_only)
1538 content = dict(msg_ids=theids, status_only=status_only)
1539 msg = self.session.send(self._query_socket, "result_request", content=content)
1539 msg = self.session.send(self._query_socket, "result_request", content=content)
1540 zmq.select([self._query_socket], [], [])
1540 zmq.select([self._query_socket], [], [])
1541 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1541 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1542 if self.debug:
1542 if self.debug:
1543 pprint(msg)
1543 pprint(msg)
1544 content = msg['content']
1544 content = msg['content']
1545 if content['status'] != 'ok':
1545 if content['status'] != 'ok':
1546 raise self._unwrap_exception(content)
1546 raise self._unwrap_exception(content)
1547 buffers = msg['buffers']
1547 buffers = msg['buffers']
1548 else:
1548 else:
1549 content = dict(completed=[],pending=[])
1549 content = dict(completed=[],pending=[])
1550
1550
1551 content['completed'].extend(completed)
1551 content['completed'].extend(completed)
1552
1552
1553 if status_only:
1553 if status_only:
1554 return content
1554 return content
1555
1555
1556 failures = []
1556 failures = []
1557 # load cached results into result:
1557 # load cached results into result:
1558 content.update(local_results)
1558 content.update(local_results)
1559
1559
1560 # update cache with results:
1560 # update cache with results:
1561 for msg_id in sorted(theids):
1561 for msg_id in sorted(theids):
1562 if msg_id in content['completed']:
1562 if msg_id in content['completed']:
1563 rec = content[msg_id]
1563 rec = content[msg_id]
1564 parent = extract_dates(rec['header'])
1564 parent = extract_dates(rec['header'])
1565 header = extract_dates(rec['result_header'])
1565 header = extract_dates(rec['result_header'])
1566 rcontent = rec['result_content']
1566 rcontent = rec['result_content']
1567 iodict = rec['io']
1567 iodict = rec['io']
1568 if isinstance(rcontent, str):
1568 if isinstance(rcontent, str):
1569 rcontent = self.session.unpack(rcontent)
1569 rcontent = self.session.unpack(rcontent)
1570
1570
1571 md = self.metadata[msg_id]
1571 md = self.metadata[msg_id]
1572 md_msg = dict(
1572 md_msg = dict(
1573 content=rcontent,
1573 content=rcontent,
1574 parent_header=parent,
1574 parent_header=parent,
1575 header=header,
1575 header=header,
1576 metadata=rec['result_metadata'],
1576 metadata=rec['result_metadata'],
1577 )
1577 )
1578 md.update(self._extract_metadata(md_msg))
1578 md.update(self._extract_metadata(md_msg))
1579 if rec.get('received'):
1579 if rec.get('received'):
1580 md['received'] = parse_date(rec['received'])
1580 md['received'] = parse_date(rec['received'])
1581 md.update(iodict)
1581 md.update(iodict)
1582
1582
1583 if rcontent['status'] == 'ok':
1583 if rcontent['status'] == 'ok':
1584 if header['msg_type'] == 'apply_reply':
1584 if header['msg_type'] == 'apply_reply':
1585 res,buffers = serialize.unserialize_object(buffers)
1585 res,buffers = serialize.unserialize_object(buffers)
1586 elif header['msg_type'] == 'execute_reply':
1586 elif header['msg_type'] == 'execute_reply':
1587 res = ExecuteReply(msg_id, rcontent, md)
1587 res = ExecuteReply(msg_id, rcontent, md)
1588 else:
1588 else:
1589 raise KeyError("unhandled msg type: %r" % header['msg_type'])
1589 raise KeyError("unhandled msg type: %r" % header['msg_type'])
1590 else:
1590 else:
1591 res = self._unwrap_exception(rcontent)
1591 res = self._unwrap_exception(rcontent)
1592 failures.append(res)
1592 failures.append(res)
1593
1593
1594 self.results[msg_id] = res
1594 self.results[msg_id] = res
1595 content[msg_id] = res
1595 content[msg_id] = res
1596
1596
1597 if len(theids) == 1 and failures:
1597 if len(theids) == 1 and failures:
1598 raise failures[0]
1598 raise failures[0]
1599
1599
1600 error.collect_exceptions(failures, "result_status")
1600 error.collect_exceptions(failures, "result_status")
1601 return content
1601 return content
1602
1602
1603 @spin_first
1603 @spin_first
1604 def queue_status(self, targets='all', verbose=False):
1604 def queue_status(self, targets='all', verbose=False):
1605 """Fetch the status of engine queues.
1605 """Fetch the status of engine queues.
1606
1606
1607 Parameters
1607 Parameters
1608 ----------
1608 ----------
1609
1609
1610 targets : int/str/list of ints/strs
1610 targets : int/str/list of ints/strs
1611 the engines whose states are to be queried.
1611 the engines whose states are to be queried.
1612 default : all
1612 default : all
1613 verbose : bool
1613 verbose : bool
1614 Whether to return lengths only, or lists of ids for each element
1614 Whether to return lengths only, or lists of ids for each element
1615 """
1615 """
1616 if targets == 'all':
1616 if targets == 'all':
1617 # allow 'all' to be evaluated on the engine
1617 # allow 'all' to be evaluated on the engine
1618 engine_ids = None
1618 engine_ids = None
1619 else:
1619 else:
1620 engine_ids = self._build_targets(targets)[1]
1620 engine_ids = self._build_targets(targets)[1]
1621 content = dict(targets=engine_ids, verbose=verbose)
1621 content = dict(targets=engine_ids, verbose=verbose)
1622 self.session.send(self._query_socket, "queue_request", content=content)
1622 self.session.send(self._query_socket, "queue_request", content=content)
1623 idents,msg = self.session.recv(self._query_socket, 0)
1623 idents,msg = self.session.recv(self._query_socket, 0)
1624 if self.debug:
1624 if self.debug:
1625 pprint(msg)
1625 pprint(msg)
1626 content = msg['content']
1626 content = msg['content']
1627 status = content.pop('status')
1627 status = content.pop('status')
1628 if status != 'ok':
1628 if status != 'ok':
1629 raise self._unwrap_exception(content)
1629 raise self._unwrap_exception(content)
1630 content = rekey(content)
1630 content = rekey(content)
1631 if isinstance(targets, int):
1631 if isinstance(targets, int):
1632 return content[targets]
1632 return content[targets]
1633 else:
1633 else:
1634 return content
1634 return content
1635
1635
1636 def _build_msgids_from_target(self, targets=None):
1636 def _build_msgids_from_target(self, targets=None):
1637 """Build a list of msg_ids from the list of engine targets"""
1637 """Build a list of msg_ids from the list of engine targets"""
1638 if not targets: # needed as _build_targets otherwise uses all engines
1638 if not targets: # needed as _build_targets otherwise uses all engines
1639 return []
1639 return []
1640 target_ids = self._build_targets(targets)[0]
1640 target_ids = self._build_targets(targets)[0]
1641 return [md_id for md_id in self.metadata if self.metadata[md_id]["engine_uuid"] in target_ids]
1641 return [md_id for md_id in self.metadata if self.metadata[md_id]["engine_uuid"] in target_ids]
1642
1642
1643 def _build_msgids_from_jobs(self, jobs=None):
1643 def _build_msgids_from_jobs(self, jobs=None):
1644 """Build a list of msg_ids from "jobs" """
1644 """Build a list of msg_ids from "jobs" """
1645 if not jobs:
1645 if not jobs:
1646 return []
1646 return []
1647 msg_ids = []
1647 msg_ids = []
1648 if isinstance(jobs, string_types + (AsyncResult,)):
1648 if isinstance(jobs, string_types + (AsyncResult,)):
1649 jobs = [jobs]
1649 jobs = [jobs]
1650 bad_ids = [obj for obj in jobs if not isinstance(obj, string_types + (AsyncResult,))]
1650 bad_ids = [obj for obj in jobs if not isinstance(obj, string_types + (AsyncResult,))]
1651 if bad_ids:
1651 if bad_ids:
1652 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1652 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1653 for j in jobs:
1653 for j in jobs:
1654 if isinstance(j, AsyncResult):
1654 if isinstance(j, AsyncResult):
1655 msg_ids.extend(j.msg_ids)
1655 msg_ids.extend(j.msg_ids)
1656 else:
1656 else:
1657 msg_ids.append(j)
1657 msg_ids.append(j)
1658 return msg_ids
1658 return msg_ids
1659
1659
1660 def purge_local_results(self, jobs=[], targets=[]):
1660 def purge_local_results(self, jobs=[], targets=[]):
1661 """Clears the client caches of results and their metadata.
1661 """Clears the client caches of results and their metadata.
1662
1662
1663 Individual results can be purged by msg_id, or the entire
1663 Individual results can be purged by msg_id, or the entire
1664 history of specific targets can be purged.
1664 history of specific targets can be purged.
1665
1665
1666 Use `purge_local_results('all')` to scrub everything from the Clients's
1666 Use `purge_local_results('all')` to scrub everything from the Clients's
1667 results and metadata caches.
1667 results and metadata caches.
1668
1668
1669 After this call all `AsyncResults` are invalid and should be discarded.
1669 After this call all `AsyncResults` are invalid and should be discarded.
1670
1670
1671 If you must "reget" the results, you can still do so by using
1671 If you must "reget" the results, you can still do so by using
1672 `client.get_result(msg_id)` or `client.get_result(asyncresult)`. This will
1672 `client.get_result(msg_id)` or `client.get_result(asyncresult)`. This will
1673 redownload the results from the hub if they are still available
1673 redownload the results from the hub if they are still available
1674 (i.e `client.purge_hub_results(...)` has not been called.
1674 (i.e `client.purge_hub_results(...)` has not been called.
1675
1675
1676 Parameters
1676 Parameters
1677 ----------
1677 ----------
1678
1678
1679 jobs : str or list of str or AsyncResult objects
1679 jobs : str or list of str or AsyncResult objects
1680 the msg_ids whose results should be purged.
1680 the msg_ids whose results should be purged.
1681 targets : int/list of ints
1681 targets : int/list of ints
1682 The engines, by integer ID, whose entire result histories are to be purged.
1682 The engines, by integer ID, whose entire result histories are to be purged.
1683
1683
1684 Raises
1684 Raises
1685 ------
1685 ------
1686
1686
1687 RuntimeError : if any of the tasks to be purged are still outstanding.
1687 RuntimeError : if any of the tasks to be purged are still outstanding.
1688
1688
1689 """
1689 """
1690 if not targets and not jobs:
1690 if not targets and not jobs:
1691 raise ValueError("Must specify at least one of `targets` and `jobs`")
1691 raise ValueError("Must specify at least one of `targets` and `jobs`")
1692
1692
1693 if jobs == 'all':
1693 if jobs == 'all':
1694 if self.outstanding:
1694 if self.outstanding:
1695 raise RuntimeError("Can't purge outstanding tasks: %s" % self.outstanding)
1695 raise RuntimeError("Can't purge outstanding tasks: %s" % self.outstanding)
1696 self.results.clear()
1696 self.results.clear()
1697 self.metadata.clear()
1697 self.metadata.clear()
1698 else:
1698 else:
1699 msg_ids = set()
1699 msg_ids = set()
1700 msg_ids.update(self._build_msgids_from_target(targets))
1700 msg_ids.update(self._build_msgids_from_target(targets))
1701 msg_ids.update(self._build_msgids_from_jobs(jobs))
1701 msg_ids.update(self._build_msgids_from_jobs(jobs))
1702 still_outstanding = self.outstanding.intersection(msg_ids)
1702 still_outstanding = self.outstanding.intersection(msg_ids)
1703 if still_outstanding:
1703 if still_outstanding:
1704 raise RuntimeError("Can't purge outstanding tasks: %s" % still_outstanding)
1704 raise RuntimeError("Can't purge outstanding tasks: %s" % still_outstanding)
1705 for mid in msg_ids:
1705 for mid in msg_ids:
1706 self.results.pop(mid)
1706 self.results.pop(mid)
1707 self.metadata.pop(mid)
1707 self.metadata.pop(mid)
1708
1708
1709
1709
1710 @spin_first
1710 @spin_first
1711 def purge_hub_results(self, jobs=[], targets=[]):
1711 def purge_hub_results(self, jobs=[], targets=[]):
1712 """Tell the Hub to forget results.
1712 """Tell the Hub to forget results.
1713
1713
1714 Individual results can be purged by msg_id, or the entire
1714 Individual results can be purged by msg_id, or the entire
1715 history of specific targets can be purged.
1715 history of specific targets can be purged.
1716
1716
1717 Use `purge_results('all')` to scrub everything from the Hub's db.
1717 Use `purge_results('all')` to scrub everything from the Hub's db.
1718
1718
1719 Parameters
1719 Parameters
1720 ----------
1720 ----------
1721
1721
1722 jobs : str or list of str or AsyncResult objects
1722 jobs : str or list of str or AsyncResult objects
1723 the msg_ids whose results should be forgotten.
1723 the msg_ids whose results should be forgotten.
1724 targets : int/str/list of ints/strs
1724 targets : int/str/list of ints/strs
1725 The targets, by int_id, whose entire history is to be purged.
1725 The targets, by int_id, whose entire history is to be purged.
1726
1726
1727 default : None
1727 default : None
1728 """
1728 """
1729 if not targets and not jobs:
1729 if not targets and not jobs:
1730 raise ValueError("Must specify at least one of `targets` and `jobs`")
1730 raise ValueError("Must specify at least one of `targets` and `jobs`")
1731 if targets:
1731 if targets:
1732 targets = self._build_targets(targets)[1]
1732 targets = self._build_targets(targets)[1]
1733
1733
1734 # construct msg_ids from jobs
1734 # construct msg_ids from jobs
1735 if jobs == 'all':
1735 if jobs == 'all':
1736 msg_ids = jobs
1736 msg_ids = jobs
1737 else:
1737 else:
1738 msg_ids = self._build_msgids_from_jobs(jobs)
1738 msg_ids = self._build_msgids_from_jobs(jobs)
1739
1739
1740 content = dict(engine_ids=targets, msg_ids=msg_ids)
1740 content = dict(engine_ids=targets, msg_ids=msg_ids)
1741 self.session.send(self._query_socket, "purge_request", content=content)
1741 self.session.send(self._query_socket, "purge_request", content=content)
1742 idents, msg = self.session.recv(self._query_socket, 0)
1742 idents, msg = self.session.recv(self._query_socket, 0)
1743 if self.debug:
1743 if self.debug:
1744 pprint(msg)
1744 pprint(msg)
1745 content = msg['content']
1745 content = msg['content']
1746 if content['status'] != 'ok':
1746 if content['status'] != 'ok':
1747 raise self._unwrap_exception(content)
1747 raise self._unwrap_exception(content)
1748
1748
1749 def purge_results(self, jobs=[], targets=[]):
1749 def purge_results(self, jobs=[], targets=[]):
1750 """Clears the cached results from both the hub and the local client
1750 """Clears the cached results from both the hub and the local client
1751
1751
1752 Individual results can be purged by msg_id, or the entire
1752 Individual results can be purged by msg_id, or the entire
1753 history of specific targets can be purged.
1753 history of specific targets can be purged.
1754
1754
1755 Use `purge_results('all')` to scrub every cached result from both the Hub's and
1755 Use `purge_results('all')` to scrub every cached result from both the Hub's and
1756 the Client's db.
1756 the Client's db.
1757
1757
1758 Equivalent to calling both `purge_hub_results()` and `purge_client_results()` with
1758 Equivalent to calling both `purge_hub_results()` and `purge_client_results()` with
1759 the same arguments.
1759 the same arguments.
1760
1760
1761 Parameters
1761 Parameters
1762 ----------
1762 ----------
1763
1763
1764 jobs : str or list of str or AsyncResult objects
1764 jobs : str or list of str or AsyncResult objects
1765 the msg_ids whose results should be forgotten.
1765 the msg_ids whose results should be forgotten.
1766 targets : int/str/list of ints/strs
1766 targets : int/str/list of ints/strs
1767 The targets, by int_id, whose entire history is to be purged.
1767 The targets, by int_id, whose entire history is to be purged.
1768
1768
1769 default : None
1769 default : None
1770 """
1770 """
1771 self.purge_local_results(jobs=jobs, targets=targets)
1771 self.purge_local_results(jobs=jobs, targets=targets)
1772 self.purge_hub_results(jobs=jobs, targets=targets)
1772 self.purge_hub_results(jobs=jobs, targets=targets)
1773
1773
1774 def purge_everything(self):
1774 def purge_everything(self):
1775 """Clears all content from previous Tasks from both the hub and the local client
1775 """Clears all content from previous Tasks from both the hub and the local client
1776
1776
1777 In addition to calling `purge_results("all")` it also deletes the history and
1777 In addition to calling `purge_results("all")` it also deletes the history and
1778 other bookkeeping lists.
1778 other bookkeeping lists.
1779 """
1779 """
1780 self.purge_results("all")
1780 self.purge_results("all")
1781 self.history = []
1781 self.history = []
1782 self.session.digest_history.clear()
1782 self.session.digest_history.clear()
1783
1783
1784 @spin_first
1784 @spin_first
1785 def hub_history(self):
1785 def hub_history(self):
1786 """Get the Hub's history
1786 """Get the Hub's history
1787
1787
1788 Just like the Client, the Hub has a history, which is a list of msg_ids.
1788 Just like the Client, the Hub has a history, which is a list of msg_ids.
1789 This will contain the history of all clients, and, depending on configuration,
1789 This will contain the history of all clients, and, depending on configuration,
1790 may contain history across multiple cluster sessions.
1790 may contain history across multiple cluster sessions.
1791
1791
1792 Any msg_id returned here is a valid argument to `get_result`.
1792 Any msg_id returned here is a valid argument to `get_result`.
1793
1793
1794 Returns
1794 Returns
1795 -------
1795 -------
1796
1796
1797 msg_ids : list of strs
1797 msg_ids : list of strs
1798 list of all msg_ids, ordered by task submission time.
1798 list of all msg_ids, ordered by task submission time.
1799 """
1799 """
1800
1800
1801 self.session.send(self._query_socket, "history_request", content={})
1801 self.session.send(self._query_socket, "history_request", content={})
1802 idents, msg = self.session.recv(self._query_socket, 0)
1802 idents, msg = self.session.recv(self._query_socket, 0)
1803
1803
1804 if self.debug:
1804 if self.debug:
1805 pprint(msg)
1805 pprint(msg)
1806 content = msg['content']
1806 content = msg['content']
1807 if content['status'] != 'ok':
1807 if content['status'] != 'ok':
1808 raise self._unwrap_exception(content)
1808 raise self._unwrap_exception(content)
1809 else:
1809 else:
1810 return content['history']
1810 return content['history']
1811
1811
1812 @spin_first
1812 @spin_first
1813 def db_query(self, query, keys=None):
1813 def db_query(self, query, keys=None):
1814 """Query the Hub's TaskRecord database
1814 """Query the Hub's TaskRecord database
1815
1815
1816 This will return a list of task record dicts that match `query`
1816 This will return a list of task record dicts that match `query`
1817
1817
1818 Parameters
1818 Parameters
1819 ----------
1819 ----------
1820
1820
1821 query : mongodb query dict
1821 query : mongodb query dict
1822 The search dict. See mongodb query docs for details.
1822 The search dict. See mongodb query docs for details.
1823 keys : list of strs [optional]
1823 keys : list of strs [optional]
1824 The subset of keys to be returned. The default is to fetch everything but buffers.
1824 The subset of keys to be returned. The default is to fetch everything but buffers.
1825 'msg_id' will *always* be included.
1825 'msg_id' will *always* be included.
1826 """
1826 """
1827 if isinstance(keys, string_types):
1827 if isinstance(keys, string_types):
1828 keys = [keys]
1828 keys = [keys]
1829 content = dict(query=query, keys=keys)
1829 content = dict(query=query, keys=keys)
1830 self.session.send(self._query_socket, "db_request", content=content)
1830 self.session.send(self._query_socket, "db_request", content=content)
1831 idents, msg = self.session.recv(self._query_socket, 0)
1831 idents, msg = self.session.recv(self._query_socket, 0)
1832 if self.debug:
1832 if self.debug:
1833 pprint(msg)
1833 pprint(msg)
1834 content = msg['content']
1834 content = msg['content']
1835 if content['status'] != 'ok':
1835 if content['status'] != 'ok':
1836 raise self._unwrap_exception(content)
1836 raise self._unwrap_exception(content)
1837
1837
1838 records = content['records']
1838 records = content['records']
1839
1839
1840 buffer_lens = content['buffer_lens']
1840 buffer_lens = content['buffer_lens']
1841 result_buffer_lens = content['result_buffer_lens']
1841 result_buffer_lens = content['result_buffer_lens']
1842 buffers = msg['buffers']
1842 buffers = msg['buffers']
1843 has_bufs = buffer_lens is not None
1843 has_bufs = buffer_lens is not None
1844 has_rbufs = result_buffer_lens is not None
1844 has_rbufs = result_buffer_lens is not None
1845 for i,rec in enumerate(records):
1845 for i,rec in enumerate(records):
1846 # unpack datetime objects
1846 # unpack datetime objects
1847 for hkey in ('header', 'result_header'):
1847 for hkey in ('header', 'result_header'):
1848 if hkey in rec:
1848 if hkey in rec:
1849 rec[hkey] = extract_dates(rec[hkey])
1849 rec[hkey] = extract_dates(rec[hkey])
1850 for dtkey in ('submitted', 'started', 'completed', 'received'):
1850 for dtkey in ('submitted', 'started', 'completed', 'received'):
1851 if dtkey in rec:
1851 if dtkey in rec:
1852 rec[dtkey] = parse_date(rec[dtkey])
1852 rec[dtkey] = parse_date(rec[dtkey])
1853 # relink buffers
1853 # relink buffers
1854 if has_bufs:
1854 if has_bufs:
1855 blen = buffer_lens[i]
1855 blen = buffer_lens[i]
1856 rec['buffers'], buffers = buffers[:blen],buffers[blen:]
1856 rec['buffers'], buffers = buffers[:blen],buffers[blen:]
1857 if has_rbufs:
1857 if has_rbufs:
1858 blen = result_buffer_lens[i]
1858 blen = result_buffer_lens[i]
1859 rec['result_buffers'], buffers = buffers[:blen],buffers[blen:]
1859 rec['result_buffers'], buffers = buffers[:blen],buffers[blen:]
1860
1860
1861 return records
1861 return records
1862
1862
1863 __all__ = [ 'Client' ]
1863 __all__ = [ 'Client' ]
@@ -1,1440 +1,1440 b''
1 """The IPython Controller Hub with 0MQ
1 """The IPython Controller Hub with 0MQ
2
2
3 This is the master object that handles connections from engines and clients,
3 This is the master object that handles connections from engines and clients,
4 and monitors traffic through the various queues.
4 and monitors traffic through the various queues.
5 """
5 """
6
6
7 # Copyright (c) IPython Development Team.
7 # Copyright (c) IPython Development Team.
8 # Distributed under the terms of the Modified BSD License.
8 # Distributed under the terms of the Modified BSD License.
9
9
10 from __future__ import print_function
10 from __future__ import print_function
11
11
12 import json
12 import json
13 import os
13 import os
14 import sys
14 import sys
15 import time
15 import time
16 from datetime import datetime
16 from datetime import datetime
17
17
18 import zmq
18 import zmq
19 from zmq.eventloop import ioloop
19 from zmq.eventloop import ioloop
20 from zmq.eventloop.zmqstream import ZMQStream
20 from zmq.eventloop.zmqstream import ZMQStream
21
21
22 # internal:
22 # internal:
23 from IPython.utils.importstring import import_item
23 from IPython.utils.importstring import import_item
24 from IPython.utils.jsonutil import extract_dates
24 from IPython.utils.jsonutil import extract_dates
25 from IPython.utils.localinterfaces import localhost
25 from IPython.utils.localinterfaces import localhost
26 from IPython.utils.py3compat import cast_bytes, unicode_type, iteritems
26 from IPython.utils.py3compat import cast_bytes, unicode_type, iteritems
27 from IPython.utils.traitlets import (
27 from IPython.utils.traitlets import (
28 HasTraits, Instance, Integer, Unicode, Dict, Set, Tuple, CBytes, DottedObjectName
28 HasTraits, Instance, Integer, Unicode, Dict, Set, Tuple, CBytes, DottedObjectName
29 )
29 )
30
30
31 from IPython.parallel import error, util
31 from IPython.parallel import error, util
32 from IPython.parallel.factory import RegistrationFactory
32 from IPython.parallel.factory import RegistrationFactory
33
33
34 from IPython.kernel.zmq.session import SessionFactory
34 from IPython.kernel.zmq.session import SessionFactory
35
35
36 from .heartmonitor import HeartMonitor
36 from .heartmonitor import HeartMonitor
37
37
38 #-----------------------------------------------------------------------------
38 #-----------------------------------------------------------------------------
39 # Code
39 # Code
40 #-----------------------------------------------------------------------------
40 #-----------------------------------------------------------------------------
41
41
42 def _passer(*args, **kwargs):
42 def _passer(*args, **kwargs):
43 return
43 return
44
44
45 def _printer(*args, **kwargs):
45 def _printer(*args, **kwargs):
46 print (args)
46 print (args)
47 print (kwargs)
47 print (kwargs)
48
48
49 def empty_record():
49 def empty_record():
50 """Return an empty dict with all record keys."""
50 """Return an empty dict with all record keys."""
51 return {
51 return {
52 'msg_id' : None,
52 'msg_id' : None,
53 'header' : None,
53 'header' : None,
54 'metadata' : None,
54 'metadata' : None,
55 'content': None,
55 'content': None,
56 'buffers': None,
56 'buffers': None,
57 'submitted': None,
57 'submitted': None,
58 'client_uuid' : None,
58 'client_uuid' : None,
59 'engine_uuid' : None,
59 'engine_uuid' : None,
60 'started': None,
60 'started': None,
61 'completed': None,
61 'completed': None,
62 'resubmitted': None,
62 'resubmitted': None,
63 'received': None,
63 'received': None,
64 'result_header' : None,
64 'result_header' : None,
65 'result_metadata' : None,
65 'result_metadata' : None,
66 'result_content' : None,
66 'result_content' : None,
67 'result_buffers' : None,
67 'result_buffers' : None,
68 'queue' : None,
68 'queue' : None,
69 'execute_input' : None,
69 'execute_input' : None,
70 'execute_result': None,
70 'execute_result': None,
71 'pyerr': None,
71 'error': None,
72 'stdout': '',
72 'stdout': '',
73 'stderr': '',
73 'stderr': '',
74 }
74 }
75
75
76 def init_record(msg):
76 def init_record(msg):
77 """Initialize a TaskRecord based on a request."""
77 """Initialize a TaskRecord based on a request."""
78 header = msg['header']
78 header = msg['header']
79 return {
79 return {
80 'msg_id' : header['msg_id'],
80 'msg_id' : header['msg_id'],
81 'header' : header,
81 'header' : header,
82 'content': msg['content'],
82 'content': msg['content'],
83 'metadata': msg['metadata'],
83 'metadata': msg['metadata'],
84 'buffers': msg['buffers'],
84 'buffers': msg['buffers'],
85 'submitted': header['date'],
85 'submitted': header['date'],
86 'client_uuid' : None,
86 'client_uuid' : None,
87 'engine_uuid' : None,
87 'engine_uuid' : None,
88 'started': None,
88 'started': None,
89 'completed': None,
89 'completed': None,
90 'resubmitted': None,
90 'resubmitted': None,
91 'received': None,
91 'received': None,
92 'result_header' : None,
92 'result_header' : None,
93 'result_metadata': None,
93 'result_metadata': None,
94 'result_content' : None,
94 'result_content' : None,
95 'result_buffers' : None,
95 'result_buffers' : None,
96 'queue' : None,
96 'queue' : None,
97 'execute_input' : None,
97 'execute_input' : None,
98 'execute_result': None,
98 'execute_result': None,
99 'pyerr': None,
99 'error': None,
100 'stdout': '',
100 'stdout': '',
101 'stderr': '',
101 'stderr': '',
102 }
102 }
103
103
104
104
105 class EngineConnector(HasTraits):
105 class EngineConnector(HasTraits):
106 """A simple object for accessing the various zmq connections of an object.
106 """A simple object for accessing the various zmq connections of an object.
107 Attributes are:
107 Attributes are:
108 id (int): engine ID
108 id (int): engine ID
109 uuid (unicode): engine UUID
109 uuid (unicode): engine UUID
110 pending: set of msg_ids
110 pending: set of msg_ids
111 stallback: DelayedCallback for stalled registration
111 stallback: DelayedCallback for stalled registration
112 """
112 """
113
113
114 id = Integer(0)
114 id = Integer(0)
115 uuid = Unicode()
115 uuid = Unicode()
116 pending = Set()
116 pending = Set()
117 stallback = Instance(ioloop.DelayedCallback)
117 stallback = Instance(ioloop.DelayedCallback)
118
118
119
119
120 _db_shortcuts = {
120 _db_shortcuts = {
121 'sqlitedb' : 'IPython.parallel.controller.sqlitedb.SQLiteDB',
121 'sqlitedb' : 'IPython.parallel.controller.sqlitedb.SQLiteDB',
122 'mongodb' : 'IPython.parallel.controller.mongodb.MongoDB',
122 'mongodb' : 'IPython.parallel.controller.mongodb.MongoDB',
123 'dictdb' : 'IPython.parallel.controller.dictdb.DictDB',
123 'dictdb' : 'IPython.parallel.controller.dictdb.DictDB',
124 'nodb' : 'IPython.parallel.controller.dictdb.NoDB',
124 'nodb' : 'IPython.parallel.controller.dictdb.NoDB',
125 }
125 }
126
126
127 class HubFactory(RegistrationFactory):
127 class HubFactory(RegistrationFactory):
128 """The Configurable for setting up a Hub."""
128 """The Configurable for setting up a Hub."""
129
129
130 # port-pairs for monitoredqueues:
130 # port-pairs for monitoredqueues:
131 hb = Tuple(Integer,Integer,config=True,
131 hb = Tuple(Integer,Integer,config=True,
132 help="""PUB/ROUTER Port pair for Engine heartbeats""")
132 help="""PUB/ROUTER Port pair for Engine heartbeats""")
133 def _hb_default(self):
133 def _hb_default(self):
134 return tuple(util.select_random_ports(2))
134 return tuple(util.select_random_ports(2))
135
135
136 mux = Tuple(Integer,Integer,config=True,
136 mux = Tuple(Integer,Integer,config=True,
137 help="""Client/Engine Port pair for MUX queue""")
137 help="""Client/Engine Port pair for MUX queue""")
138
138
139 def _mux_default(self):
139 def _mux_default(self):
140 return tuple(util.select_random_ports(2))
140 return tuple(util.select_random_ports(2))
141
141
142 task = Tuple(Integer,Integer,config=True,
142 task = Tuple(Integer,Integer,config=True,
143 help="""Client/Engine Port pair for Task queue""")
143 help="""Client/Engine Port pair for Task queue""")
144 def _task_default(self):
144 def _task_default(self):
145 return tuple(util.select_random_ports(2))
145 return tuple(util.select_random_ports(2))
146
146
147 control = Tuple(Integer,Integer,config=True,
147 control = Tuple(Integer,Integer,config=True,
148 help="""Client/Engine Port pair for Control queue""")
148 help="""Client/Engine Port pair for Control queue""")
149
149
150 def _control_default(self):
150 def _control_default(self):
151 return tuple(util.select_random_ports(2))
151 return tuple(util.select_random_ports(2))
152
152
153 iopub = Tuple(Integer,Integer,config=True,
153 iopub = Tuple(Integer,Integer,config=True,
154 help="""Client/Engine Port pair for IOPub relay""")
154 help="""Client/Engine Port pair for IOPub relay""")
155
155
156 def _iopub_default(self):
156 def _iopub_default(self):
157 return tuple(util.select_random_ports(2))
157 return tuple(util.select_random_ports(2))
158
158
159 # single ports:
159 # single ports:
160 mon_port = Integer(config=True,
160 mon_port = Integer(config=True,
161 help="""Monitor (SUB) port for queue traffic""")
161 help="""Monitor (SUB) port for queue traffic""")
162
162
163 def _mon_port_default(self):
163 def _mon_port_default(self):
164 return util.select_random_ports(1)[0]
164 return util.select_random_ports(1)[0]
165
165
166 notifier_port = Integer(config=True,
166 notifier_port = Integer(config=True,
167 help="""PUB port for sending engine status notifications""")
167 help="""PUB port for sending engine status notifications""")
168
168
169 def _notifier_port_default(self):
169 def _notifier_port_default(self):
170 return util.select_random_ports(1)[0]
170 return util.select_random_ports(1)[0]
171
171
172 engine_ip = Unicode(config=True,
172 engine_ip = Unicode(config=True,
173 help="IP on which to listen for engine connections. [default: loopback]")
173 help="IP on which to listen for engine connections. [default: loopback]")
174 def _engine_ip_default(self):
174 def _engine_ip_default(self):
175 return localhost()
175 return localhost()
176 engine_transport = Unicode('tcp', config=True,
176 engine_transport = Unicode('tcp', config=True,
177 help="0MQ transport for engine connections. [default: tcp]")
177 help="0MQ transport for engine connections. [default: tcp]")
178
178
179 client_ip = Unicode(config=True,
179 client_ip = Unicode(config=True,
180 help="IP on which to listen for client connections. [default: loopback]")
180 help="IP on which to listen for client connections. [default: loopback]")
181 client_transport = Unicode('tcp', config=True,
181 client_transport = Unicode('tcp', config=True,
182 help="0MQ transport for client connections. [default : tcp]")
182 help="0MQ transport for client connections. [default : tcp]")
183
183
184 monitor_ip = Unicode(config=True,
184 monitor_ip = Unicode(config=True,
185 help="IP on which to listen for monitor messages. [default: loopback]")
185 help="IP on which to listen for monitor messages. [default: loopback]")
186 monitor_transport = Unicode('tcp', config=True,
186 monitor_transport = Unicode('tcp', config=True,
187 help="0MQ transport for monitor messages. [default : tcp]")
187 help="0MQ transport for monitor messages. [default : tcp]")
188
188
189 _client_ip_default = _monitor_ip_default = _engine_ip_default
189 _client_ip_default = _monitor_ip_default = _engine_ip_default
190
190
191
191
192 monitor_url = Unicode('')
192 monitor_url = Unicode('')
193
193
194 db_class = DottedObjectName('NoDB',
194 db_class = DottedObjectName('NoDB',
195 config=True, help="""The class to use for the DB backend
195 config=True, help="""The class to use for the DB backend
196
196
197 Options include:
197 Options include:
198
198
199 SQLiteDB: SQLite
199 SQLiteDB: SQLite
200 MongoDB : use MongoDB
200 MongoDB : use MongoDB
201 DictDB : in-memory storage (fastest, but be mindful of memory growth of the Hub)
201 DictDB : in-memory storage (fastest, but be mindful of memory growth of the Hub)
202 NoDB : disable database altogether (default)
202 NoDB : disable database altogether (default)
203
203
204 """)
204 """)
205
205
206 registration_timeout = Integer(0, config=True,
206 registration_timeout = Integer(0, config=True,
207 help="Engine registration timeout in seconds [default: max(30,"
207 help="Engine registration timeout in seconds [default: max(30,"
208 "10*heartmonitor.period)]" )
208 "10*heartmonitor.period)]" )
209
209
210 def _registration_timeout_default(self):
210 def _registration_timeout_default(self):
211 if self.heartmonitor is None:
211 if self.heartmonitor is None:
212 # early initialization, this value will be ignored
212 # early initialization, this value will be ignored
213 return 0
213 return 0
214 # heartmonitor period is in milliseconds, so 10x in seconds is .01
214 # heartmonitor period is in milliseconds, so 10x in seconds is .01
215 return max(30, int(.01 * self.heartmonitor.period))
215 return max(30, int(.01 * self.heartmonitor.period))
216
216
217 # not configurable
217 # not configurable
218 db = Instance('IPython.parallel.controller.dictdb.BaseDB')
218 db = Instance('IPython.parallel.controller.dictdb.BaseDB')
219 heartmonitor = Instance('IPython.parallel.controller.heartmonitor.HeartMonitor')
219 heartmonitor = Instance('IPython.parallel.controller.heartmonitor.HeartMonitor')
220
220
221 def _ip_changed(self, name, old, new):
221 def _ip_changed(self, name, old, new):
222 self.engine_ip = new
222 self.engine_ip = new
223 self.client_ip = new
223 self.client_ip = new
224 self.monitor_ip = new
224 self.monitor_ip = new
225 self._update_monitor_url()
225 self._update_monitor_url()
226
226
227 def _update_monitor_url(self):
227 def _update_monitor_url(self):
228 self.monitor_url = "%s://%s:%i" % (self.monitor_transport, self.monitor_ip, self.mon_port)
228 self.monitor_url = "%s://%s:%i" % (self.monitor_transport, self.monitor_ip, self.mon_port)
229
229
230 def _transport_changed(self, name, old, new):
230 def _transport_changed(self, name, old, new):
231 self.engine_transport = new
231 self.engine_transport = new
232 self.client_transport = new
232 self.client_transport = new
233 self.monitor_transport = new
233 self.monitor_transport = new
234 self._update_monitor_url()
234 self._update_monitor_url()
235
235
236 def __init__(self, **kwargs):
236 def __init__(self, **kwargs):
237 super(HubFactory, self).__init__(**kwargs)
237 super(HubFactory, self).__init__(**kwargs)
238 self._update_monitor_url()
238 self._update_monitor_url()
239
239
240
240
241 def construct(self):
241 def construct(self):
242 self.init_hub()
242 self.init_hub()
243
243
244 def start(self):
244 def start(self):
245 self.heartmonitor.start()
245 self.heartmonitor.start()
246 self.log.info("Heartmonitor started")
246 self.log.info("Heartmonitor started")
247
247
248 def client_url(self, channel):
248 def client_url(self, channel):
249 """return full zmq url for a named client channel"""
249 """return full zmq url for a named client channel"""
250 return "%s://%s:%i" % (self.client_transport, self.client_ip, self.client_info[channel])
250 return "%s://%s:%i" % (self.client_transport, self.client_ip, self.client_info[channel])
251
251
252 def engine_url(self, channel):
252 def engine_url(self, channel):
253 """return full zmq url for a named engine channel"""
253 """return full zmq url for a named engine channel"""
254 return "%s://%s:%i" % (self.engine_transport, self.engine_ip, self.engine_info[channel])
254 return "%s://%s:%i" % (self.engine_transport, self.engine_ip, self.engine_info[channel])
255
255
256 def init_hub(self):
256 def init_hub(self):
257 """construct Hub object"""
257 """construct Hub object"""
258
258
259 ctx = self.context
259 ctx = self.context
260 loop = self.loop
260 loop = self.loop
261 if 'TaskScheduler.scheme_name' in self.config:
261 if 'TaskScheduler.scheme_name' in self.config:
262 scheme = self.config.TaskScheduler.scheme_name
262 scheme = self.config.TaskScheduler.scheme_name
263 else:
263 else:
264 from .scheduler import TaskScheduler
264 from .scheduler import TaskScheduler
265 scheme = TaskScheduler.scheme_name.get_default_value()
265 scheme = TaskScheduler.scheme_name.get_default_value()
266
266
267 # build connection dicts
267 # build connection dicts
268 engine = self.engine_info = {
268 engine = self.engine_info = {
269 'interface' : "%s://%s" % (self.engine_transport, self.engine_ip),
269 'interface' : "%s://%s" % (self.engine_transport, self.engine_ip),
270 'registration' : self.regport,
270 'registration' : self.regport,
271 'control' : self.control[1],
271 'control' : self.control[1],
272 'mux' : self.mux[1],
272 'mux' : self.mux[1],
273 'hb_ping' : self.hb[0],
273 'hb_ping' : self.hb[0],
274 'hb_pong' : self.hb[1],
274 'hb_pong' : self.hb[1],
275 'task' : self.task[1],
275 'task' : self.task[1],
276 'iopub' : self.iopub[1],
276 'iopub' : self.iopub[1],
277 }
277 }
278
278
279 client = self.client_info = {
279 client = self.client_info = {
280 'interface' : "%s://%s" % (self.client_transport, self.client_ip),
280 'interface' : "%s://%s" % (self.client_transport, self.client_ip),
281 'registration' : self.regport,
281 'registration' : self.regport,
282 'control' : self.control[0],
282 'control' : self.control[0],
283 'mux' : self.mux[0],
283 'mux' : self.mux[0],
284 'task' : self.task[0],
284 'task' : self.task[0],
285 'task_scheme' : scheme,
285 'task_scheme' : scheme,
286 'iopub' : self.iopub[0],
286 'iopub' : self.iopub[0],
287 'notification' : self.notifier_port,
287 'notification' : self.notifier_port,
288 }
288 }
289
289
290 self.log.debug("Hub engine addrs: %s", self.engine_info)
290 self.log.debug("Hub engine addrs: %s", self.engine_info)
291 self.log.debug("Hub client addrs: %s", self.client_info)
291 self.log.debug("Hub client addrs: %s", self.client_info)
292
292
293 # Registrar socket
293 # Registrar socket
294 q = ZMQStream(ctx.socket(zmq.ROUTER), loop)
294 q = ZMQStream(ctx.socket(zmq.ROUTER), loop)
295 util.set_hwm(q, 0)
295 util.set_hwm(q, 0)
296 q.bind(self.client_url('registration'))
296 q.bind(self.client_url('registration'))
297 self.log.info("Hub listening on %s for registration.", self.client_url('registration'))
297 self.log.info("Hub listening on %s for registration.", self.client_url('registration'))
298 if self.client_ip != self.engine_ip:
298 if self.client_ip != self.engine_ip:
299 q.bind(self.engine_url('registration'))
299 q.bind(self.engine_url('registration'))
300 self.log.info("Hub listening on %s for registration.", self.engine_url('registration'))
300 self.log.info("Hub listening on %s for registration.", self.engine_url('registration'))
301
301
302 ### Engine connections ###
302 ### Engine connections ###
303
303
304 # heartbeat
304 # heartbeat
305 hpub = ctx.socket(zmq.PUB)
305 hpub = ctx.socket(zmq.PUB)
306 hpub.bind(self.engine_url('hb_ping'))
306 hpub.bind(self.engine_url('hb_ping'))
307 hrep = ctx.socket(zmq.ROUTER)
307 hrep = ctx.socket(zmq.ROUTER)
308 util.set_hwm(hrep, 0)
308 util.set_hwm(hrep, 0)
309 hrep.bind(self.engine_url('hb_pong'))
309 hrep.bind(self.engine_url('hb_pong'))
310 self.heartmonitor = HeartMonitor(loop=loop, parent=self, log=self.log,
310 self.heartmonitor = HeartMonitor(loop=loop, parent=self, log=self.log,
311 pingstream=ZMQStream(hpub,loop),
311 pingstream=ZMQStream(hpub,loop),
312 pongstream=ZMQStream(hrep,loop)
312 pongstream=ZMQStream(hrep,loop)
313 )
313 )
314
314
315 ### Client connections ###
315 ### Client connections ###
316
316
317 # Notifier socket
317 # Notifier socket
318 n = ZMQStream(ctx.socket(zmq.PUB), loop)
318 n = ZMQStream(ctx.socket(zmq.PUB), loop)
319 n.bind(self.client_url('notification'))
319 n.bind(self.client_url('notification'))
320
320
321 ### build and launch the queues ###
321 ### build and launch the queues ###
322
322
323 # monitor socket
323 # monitor socket
324 sub = ctx.socket(zmq.SUB)
324 sub = ctx.socket(zmq.SUB)
325 sub.setsockopt(zmq.SUBSCRIBE, b"")
325 sub.setsockopt(zmq.SUBSCRIBE, b"")
326 sub.bind(self.monitor_url)
326 sub.bind(self.monitor_url)
327 sub.bind('inproc://monitor')
327 sub.bind('inproc://monitor')
328 sub = ZMQStream(sub, loop)
328 sub = ZMQStream(sub, loop)
329
329
330 # connect the db
330 # connect the db
331 db_class = _db_shortcuts.get(self.db_class.lower(), self.db_class)
331 db_class = _db_shortcuts.get(self.db_class.lower(), self.db_class)
332 self.log.info('Hub using DB backend: %r', (db_class.split('.')[-1]))
332 self.log.info('Hub using DB backend: %r', (db_class.split('.')[-1]))
333 self.db = import_item(str(db_class))(session=self.session.session,
333 self.db = import_item(str(db_class))(session=self.session.session,
334 parent=self, log=self.log)
334 parent=self, log=self.log)
335 time.sleep(.25)
335 time.sleep(.25)
336
336
337 # resubmit stream
337 # resubmit stream
338 r = ZMQStream(ctx.socket(zmq.DEALER), loop)
338 r = ZMQStream(ctx.socket(zmq.DEALER), loop)
339 url = util.disambiguate_url(self.client_url('task'))
339 url = util.disambiguate_url(self.client_url('task'))
340 r.connect(url)
340 r.connect(url)
341
341
342 # convert seconds to msec
342 # convert seconds to msec
343 registration_timeout = 1000*self.registration_timeout
343 registration_timeout = 1000*self.registration_timeout
344
344
345 self.hub = Hub(loop=loop, session=self.session, monitor=sub, heartmonitor=self.heartmonitor,
345 self.hub = Hub(loop=loop, session=self.session, monitor=sub, heartmonitor=self.heartmonitor,
346 query=q, notifier=n, resubmit=r, db=self.db,
346 query=q, notifier=n, resubmit=r, db=self.db,
347 engine_info=self.engine_info, client_info=self.client_info,
347 engine_info=self.engine_info, client_info=self.client_info,
348 log=self.log, registration_timeout=registration_timeout)
348 log=self.log, registration_timeout=registration_timeout)
349
349
350
350
351 class Hub(SessionFactory):
351 class Hub(SessionFactory):
352 """The IPython Controller Hub with 0MQ connections
352 """The IPython Controller Hub with 0MQ connections
353
353
354 Parameters
354 Parameters
355 ==========
355 ==========
356 loop: zmq IOLoop instance
356 loop: zmq IOLoop instance
357 session: Session object
357 session: Session object
358 <removed> context: zmq context for creating new connections (?)
358 <removed> context: zmq context for creating new connections (?)
359 queue: ZMQStream for monitoring the command queue (SUB)
359 queue: ZMQStream for monitoring the command queue (SUB)
360 query: ZMQStream for engine registration and client queries requests (ROUTER)
360 query: ZMQStream for engine registration and client queries requests (ROUTER)
361 heartbeat: HeartMonitor object checking the pulse of the engines
361 heartbeat: HeartMonitor object checking the pulse of the engines
362 notifier: ZMQStream for broadcasting engine registration changes (PUB)
362 notifier: ZMQStream for broadcasting engine registration changes (PUB)
363 db: connection to db for out of memory logging of commands
363 db: connection to db for out of memory logging of commands
364 NotImplemented
364 NotImplemented
365 engine_info: dict of zmq connection information for engines to connect
365 engine_info: dict of zmq connection information for engines to connect
366 to the queues.
366 to the queues.
367 client_info: dict of zmq connection information for engines to connect
367 client_info: dict of zmq connection information for engines to connect
368 to the queues.
368 to the queues.
369 """
369 """
370
370
371 engine_state_file = Unicode()
371 engine_state_file = Unicode()
372
372
373 # internal data structures:
373 # internal data structures:
374 ids=Set() # engine IDs
374 ids=Set() # engine IDs
375 keytable=Dict()
375 keytable=Dict()
376 by_ident=Dict()
376 by_ident=Dict()
377 engines=Dict()
377 engines=Dict()
378 clients=Dict()
378 clients=Dict()
379 hearts=Dict()
379 hearts=Dict()
380 pending=Set()
380 pending=Set()
381 queues=Dict() # pending msg_ids keyed by engine_id
381 queues=Dict() # pending msg_ids keyed by engine_id
382 tasks=Dict() # pending msg_ids submitted as tasks, keyed by client_id
382 tasks=Dict() # pending msg_ids submitted as tasks, keyed by client_id
383 completed=Dict() # completed msg_ids keyed by engine_id
383 completed=Dict() # completed msg_ids keyed by engine_id
384 all_completed=Set() # completed msg_ids keyed by engine_id
384 all_completed=Set() # completed msg_ids keyed by engine_id
385 dead_engines=Set() # completed msg_ids keyed by engine_id
385 dead_engines=Set() # completed msg_ids keyed by engine_id
386 unassigned=Set() # set of task msg_ds not yet assigned a destination
386 unassigned=Set() # set of task msg_ds not yet assigned a destination
387 incoming_registrations=Dict()
387 incoming_registrations=Dict()
388 registration_timeout=Integer()
388 registration_timeout=Integer()
389 _idcounter=Integer(0)
389 _idcounter=Integer(0)
390
390
391 # objects from constructor:
391 # objects from constructor:
392 query=Instance(ZMQStream)
392 query=Instance(ZMQStream)
393 monitor=Instance(ZMQStream)
393 monitor=Instance(ZMQStream)
394 notifier=Instance(ZMQStream)
394 notifier=Instance(ZMQStream)
395 resubmit=Instance(ZMQStream)
395 resubmit=Instance(ZMQStream)
396 heartmonitor=Instance(HeartMonitor)
396 heartmonitor=Instance(HeartMonitor)
397 db=Instance(object)
397 db=Instance(object)
398 client_info=Dict()
398 client_info=Dict()
399 engine_info=Dict()
399 engine_info=Dict()
400
400
401
401
402 def __init__(self, **kwargs):
402 def __init__(self, **kwargs):
403 """
403 """
404 # universal:
404 # universal:
405 loop: IOLoop for creating future connections
405 loop: IOLoop for creating future connections
406 session: streamsession for sending serialized data
406 session: streamsession for sending serialized data
407 # engine:
407 # engine:
408 queue: ZMQStream for monitoring queue messages
408 queue: ZMQStream for monitoring queue messages
409 query: ZMQStream for engine+client registration and client requests
409 query: ZMQStream for engine+client registration and client requests
410 heartbeat: HeartMonitor object for tracking engines
410 heartbeat: HeartMonitor object for tracking engines
411 # extra:
411 # extra:
412 db: ZMQStream for db connection (NotImplemented)
412 db: ZMQStream for db connection (NotImplemented)
413 engine_info: zmq address/protocol dict for engine connections
413 engine_info: zmq address/protocol dict for engine connections
414 client_info: zmq address/protocol dict for client connections
414 client_info: zmq address/protocol dict for client connections
415 """
415 """
416
416
417 super(Hub, self).__init__(**kwargs)
417 super(Hub, self).__init__(**kwargs)
418
418
419 # register our callbacks
419 # register our callbacks
420 self.query.on_recv(self.dispatch_query)
420 self.query.on_recv(self.dispatch_query)
421 self.monitor.on_recv(self.dispatch_monitor_traffic)
421 self.monitor.on_recv(self.dispatch_monitor_traffic)
422
422
423 self.heartmonitor.add_heart_failure_handler(self.handle_heart_failure)
423 self.heartmonitor.add_heart_failure_handler(self.handle_heart_failure)
424 self.heartmonitor.add_new_heart_handler(self.handle_new_heart)
424 self.heartmonitor.add_new_heart_handler(self.handle_new_heart)
425
425
426 self.monitor_handlers = {b'in' : self.save_queue_request,
426 self.monitor_handlers = {b'in' : self.save_queue_request,
427 b'out': self.save_queue_result,
427 b'out': self.save_queue_result,
428 b'intask': self.save_task_request,
428 b'intask': self.save_task_request,
429 b'outtask': self.save_task_result,
429 b'outtask': self.save_task_result,
430 b'tracktask': self.save_task_destination,
430 b'tracktask': self.save_task_destination,
431 b'incontrol': _passer,
431 b'incontrol': _passer,
432 b'outcontrol': _passer,
432 b'outcontrol': _passer,
433 b'iopub': self.save_iopub_message,
433 b'iopub': self.save_iopub_message,
434 }
434 }
435
435
436 self.query_handlers = {'queue_request': self.queue_status,
436 self.query_handlers = {'queue_request': self.queue_status,
437 'result_request': self.get_results,
437 'result_request': self.get_results,
438 'history_request': self.get_history,
438 'history_request': self.get_history,
439 'db_request': self.db_query,
439 'db_request': self.db_query,
440 'purge_request': self.purge_results,
440 'purge_request': self.purge_results,
441 'load_request': self.check_load,
441 'load_request': self.check_load,
442 'resubmit_request': self.resubmit_task,
442 'resubmit_request': self.resubmit_task,
443 'shutdown_request': self.shutdown_request,
443 'shutdown_request': self.shutdown_request,
444 'registration_request' : self.register_engine,
444 'registration_request' : self.register_engine,
445 'unregistration_request' : self.unregister_engine,
445 'unregistration_request' : self.unregister_engine,
446 'connection_request': self.connection_request,
446 'connection_request': self.connection_request,
447 }
447 }
448
448
449 # ignore resubmit replies
449 # ignore resubmit replies
450 self.resubmit.on_recv(lambda msg: None, copy=False)
450 self.resubmit.on_recv(lambda msg: None, copy=False)
451
451
452 self.log.info("hub::created hub")
452 self.log.info("hub::created hub")
453
453
454 @property
454 @property
455 def _next_id(self):
455 def _next_id(self):
456 """gemerate a new ID.
456 """gemerate a new ID.
457
457
458 No longer reuse old ids, just count from 0."""
458 No longer reuse old ids, just count from 0."""
459 newid = self._idcounter
459 newid = self._idcounter
460 self._idcounter += 1
460 self._idcounter += 1
461 return newid
461 return newid
462 # newid = 0
462 # newid = 0
463 # incoming = [id[0] for id in itervalues(self.incoming_registrations)]
463 # incoming = [id[0] for id in itervalues(self.incoming_registrations)]
464 # # print newid, self.ids, self.incoming_registrations
464 # # print newid, self.ids, self.incoming_registrations
465 # while newid in self.ids or newid in incoming:
465 # while newid in self.ids or newid in incoming:
466 # newid += 1
466 # newid += 1
467 # return newid
467 # return newid
468
468
469 #-----------------------------------------------------------------------------
469 #-----------------------------------------------------------------------------
470 # message validation
470 # message validation
471 #-----------------------------------------------------------------------------
471 #-----------------------------------------------------------------------------
472
472
473 def _validate_targets(self, targets):
473 def _validate_targets(self, targets):
474 """turn any valid targets argument into a list of integer ids"""
474 """turn any valid targets argument into a list of integer ids"""
475 if targets is None:
475 if targets is None:
476 # default to all
476 # default to all
477 return self.ids
477 return self.ids
478
478
479 if isinstance(targets, (int,str,unicode_type)):
479 if isinstance(targets, (int,str,unicode_type)):
480 # only one target specified
480 # only one target specified
481 targets = [targets]
481 targets = [targets]
482 _targets = []
482 _targets = []
483 for t in targets:
483 for t in targets:
484 # map raw identities to ids
484 # map raw identities to ids
485 if isinstance(t, (str,unicode_type)):
485 if isinstance(t, (str,unicode_type)):
486 t = self.by_ident.get(cast_bytes(t), t)
486 t = self.by_ident.get(cast_bytes(t), t)
487 _targets.append(t)
487 _targets.append(t)
488 targets = _targets
488 targets = _targets
489 bad_targets = [ t for t in targets if t not in self.ids ]
489 bad_targets = [ t for t in targets if t not in self.ids ]
490 if bad_targets:
490 if bad_targets:
491 raise IndexError("No Such Engine: %r" % bad_targets)
491 raise IndexError("No Such Engine: %r" % bad_targets)
492 if not targets:
492 if not targets:
493 raise IndexError("No Engines Registered")
493 raise IndexError("No Engines Registered")
494 return targets
494 return targets
495
495
496 #-----------------------------------------------------------------------------
496 #-----------------------------------------------------------------------------
497 # dispatch methods (1 per stream)
497 # dispatch methods (1 per stream)
498 #-----------------------------------------------------------------------------
498 #-----------------------------------------------------------------------------
499
499
500
500
501 @util.log_errors
501 @util.log_errors
502 def dispatch_monitor_traffic(self, msg):
502 def dispatch_monitor_traffic(self, msg):
503 """all ME and Task queue messages come through here, as well as
503 """all ME and Task queue messages come through here, as well as
504 IOPub traffic."""
504 IOPub traffic."""
505 self.log.debug("monitor traffic: %r", msg[0])
505 self.log.debug("monitor traffic: %r", msg[0])
506 switch = msg[0]
506 switch = msg[0]
507 try:
507 try:
508 idents, msg = self.session.feed_identities(msg[1:])
508 idents, msg = self.session.feed_identities(msg[1:])
509 except ValueError:
509 except ValueError:
510 idents=[]
510 idents=[]
511 if not idents:
511 if not idents:
512 self.log.error("Monitor message without topic: %r", msg)
512 self.log.error("Monitor message without topic: %r", msg)
513 return
513 return
514 handler = self.monitor_handlers.get(switch, None)
514 handler = self.monitor_handlers.get(switch, None)
515 if handler is not None:
515 if handler is not None:
516 handler(idents, msg)
516 handler(idents, msg)
517 else:
517 else:
518 self.log.error("Unrecognized monitor topic: %r", switch)
518 self.log.error("Unrecognized monitor topic: %r", switch)
519
519
520
520
521 @util.log_errors
521 @util.log_errors
522 def dispatch_query(self, msg):
522 def dispatch_query(self, msg):
523 """Route registration requests and queries from clients."""
523 """Route registration requests and queries from clients."""
524 try:
524 try:
525 idents, msg = self.session.feed_identities(msg)
525 idents, msg = self.session.feed_identities(msg)
526 except ValueError:
526 except ValueError:
527 idents = []
527 idents = []
528 if not idents:
528 if not idents:
529 self.log.error("Bad Query Message: %r", msg)
529 self.log.error("Bad Query Message: %r", msg)
530 return
530 return
531 client_id = idents[0]
531 client_id = idents[0]
532 try:
532 try:
533 msg = self.session.unserialize(msg, content=True)
533 msg = self.session.unserialize(msg, content=True)
534 except Exception:
534 except Exception:
535 content = error.wrap_exception()
535 content = error.wrap_exception()
536 self.log.error("Bad Query Message: %r", msg, exc_info=True)
536 self.log.error("Bad Query Message: %r", msg, exc_info=True)
537 self.session.send(self.query, "hub_error", ident=client_id,
537 self.session.send(self.query, "hub_error", ident=client_id,
538 content=content)
538 content=content)
539 return
539 return
540 # print client_id, header, parent, content
540 # print client_id, header, parent, content
541 #switch on message type:
541 #switch on message type:
542 msg_type = msg['header']['msg_type']
542 msg_type = msg['header']['msg_type']
543 self.log.info("client::client %r requested %r", client_id, msg_type)
543 self.log.info("client::client %r requested %r", client_id, msg_type)
544 handler = self.query_handlers.get(msg_type, None)
544 handler = self.query_handlers.get(msg_type, None)
545 try:
545 try:
546 assert handler is not None, "Bad Message Type: %r" % msg_type
546 assert handler is not None, "Bad Message Type: %r" % msg_type
547 except:
547 except:
548 content = error.wrap_exception()
548 content = error.wrap_exception()
549 self.log.error("Bad Message Type: %r", msg_type, exc_info=True)
549 self.log.error("Bad Message Type: %r", msg_type, exc_info=True)
550 self.session.send(self.query, "hub_error", ident=client_id,
550 self.session.send(self.query, "hub_error", ident=client_id,
551 content=content)
551 content=content)
552 return
552 return
553
553
554 else:
554 else:
555 handler(idents, msg)
555 handler(idents, msg)
556
556
557 def dispatch_db(self, msg):
557 def dispatch_db(self, msg):
558 """"""
558 """"""
559 raise NotImplementedError
559 raise NotImplementedError
560
560
561 #---------------------------------------------------------------------------
561 #---------------------------------------------------------------------------
562 # handler methods (1 per event)
562 # handler methods (1 per event)
563 #---------------------------------------------------------------------------
563 #---------------------------------------------------------------------------
564
564
565 #----------------------- Heartbeat --------------------------------------
565 #----------------------- Heartbeat --------------------------------------
566
566
567 def handle_new_heart(self, heart):
567 def handle_new_heart(self, heart):
568 """handler to attach to heartbeater.
568 """handler to attach to heartbeater.
569 Called when a new heart starts to beat.
569 Called when a new heart starts to beat.
570 Triggers completion of registration."""
570 Triggers completion of registration."""
571 self.log.debug("heartbeat::handle_new_heart(%r)", heart)
571 self.log.debug("heartbeat::handle_new_heart(%r)", heart)
572 if heart not in self.incoming_registrations:
572 if heart not in self.incoming_registrations:
573 self.log.info("heartbeat::ignoring new heart: %r", heart)
573 self.log.info("heartbeat::ignoring new heart: %r", heart)
574 else:
574 else:
575 self.finish_registration(heart)
575 self.finish_registration(heart)
576
576
577
577
578 def handle_heart_failure(self, heart):
578 def handle_heart_failure(self, heart):
579 """handler to attach to heartbeater.
579 """handler to attach to heartbeater.
580 called when a previously registered heart fails to respond to beat request.
580 called when a previously registered heart fails to respond to beat request.
581 triggers unregistration"""
581 triggers unregistration"""
582 self.log.debug("heartbeat::handle_heart_failure(%r)", heart)
582 self.log.debug("heartbeat::handle_heart_failure(%r)", heart)
583 eid = self.hearts.get(heart, None)
583 eid = self.hearts.get(heart, None)
584 uuid = self.engines[eid].uuid
584 uuid = self.engines[eid].uuid
585 if eid is None or self.keytable[eid] in self.dead_engines:
585 if eid is None or self.keytable[eid] in self.dead_engines:
586 self.log.info("heartbeat::ignoring heart failure %r (not an engine or already dead)", heart)
586 self.log.info("heartbeat::ignoring heart failure %r (not an engine or already dead)", heart)
587 else:
587 else:
588 self.unregister_engine(heart, dict(content=dict(id=eid, queue=uuid)))
588 self.unregister_engine(heart, dict(content=dict(id=eid, queue=uuid)))
589
589
590 #----------------------- MUX Queue Traffic ------------------------------
590 #----------------------- MUX Queue Traffic ------------------------------
591
591
592 def save_queue_request(self, idents, msg):
592 def save_queue_request(self, idents, msg):
593 if len(idents) < 2:
593 if len(idents) < 2:
594 self.log.error("invalid identity prefix: %r", idents)
594 self.log.error("invalid identity prefix: %r", idents)
595 return
595 return
596 queue_id, client_id = idents[:2]
596 queue_id, client_id = idents[:2]
597 try:
597 try:
598 msg = self.session.unserialize(msg)
598 msg = self.session.unserialize(msg)
599 except Exception:
599 except Exception:
600 self.log.error("queue::client %r sent invalid message to %r: %r", client_id, queue_id, msg, exc_info=True)
600 self.log.error("queue::client %r sent invalid message to %r: %r", client_id, queue_id, msg, exc_info=True)
601 return
601 return
602
602
603 eid = self.by_ident.get(queue_id, None)
603 eid = self.by_ident.get(queue_id, None)
604 if eid is None:
604 if eid is None:
605 self.log.error("queue::target %r not registered", queue_id)
605 self.log.error("queue::target %r not registered", queue_id)
606 self.log.debug("queue:: valid are: %r", self.by_ident.keys())
606 self.log.debug("queue:: valid are: %r", self.by_ident.keys())
607 return
607 return
608 record = init_record(msg)
608 record = init_record(msg)
609 msg_id = record['msg_id']
609 msg_id = record['msg_id']
610 self.log.info("queue::client %r submitted request %r to %s", client_id, msg_id, eid)
610 self.log.info("queue::client %r submitted request %r to %s", client_id, msg_id, eid)
611 # Unicode in records
611 # Unicode in records
612 record['engine_uuid'] = queue_id.decode('ascii')
612 record['engine_uuid'] = queue_id.decode('ascii')
613 record['client_uuid'] = msg['header']['session']
613 record['client_uuid'] = msg['header']['session']
614 record['queue'] = 'mux'
614 record['queue'] = 'mux'
615
615
616 try:
616 try:
617 # it's posible iopub arrived first:
617 # it's posible iopub arrived first:
618 existing = self.db.get_record(msg_id)
618 existing = self.db.get_record(msg_id)
619 for key,evalue in iteritems(existing):
619 for key,evalue in iteritems(existing):
620 rvalue = record.get(key, None)
620 rvalue = record.get(key, None)
621 if evalue and rvalue and evalue != rvalue:
621 if evalue and rvalue and evalue != rvalue:
622 self.log.warn("conflicting initial state for record: %r:%r <%r> %r", msg_id, rvalue, key, evalue)
622 self.log.warn("conflicting initial state for record: %r:%r <%r> %r", msg_id, rvalue, key, evalue)
623 elif evalue and not rvalue:
623 elif evalue and not rvalue:
624 record[key] = evalue
624 record[key] = evalue
625 try:
625 try:
626 self.db.update_record(msg_id, record)
626 self.db.update_record(msg_id, record)
627 except Exception:
627 except Exception:
628 self.log.error("DB Error updating record %r", msg_id, exc_info=True)
628 self.log.error("DB Error updating record %r", msg_id, exc_info=True)
629 except KeyError:
629 except KeyError:
630 try:
630 try:
631 self.db.add_record(msg_id, record)
631 self.db.add_record(msg_id, record)
632 except Exception:
632 except Exception:
633 self.log.error("DB Error adding record %r", msg_id, exc_info=True)
633 self.log.error("DB Error adding record %r", msg_id, exc_info=True)
634
634
635
635
636 self.pending.add(msg_id)
636 self.pending.add(msg_id)
637 self.queues[eid].append(msg_id)
637 self.queues[eid].append(msg_id)
638
638
639 def save_queue_result(self, idents, msg):
639 def save_queue_result(self, idents, msg):
640 if len(idents) < 2:
640 if len(idents) < 2:
641 self.log.error("invalid identity prefix: %r", idents)
641 self.log.error("invalid identity prefix: %r", idents)
642 return
642 return
643
643
644 client_id, queue_id = idents[:2]
644 client_id, queue_id = idents[:2]
645 try:
645 try:
646 msg = self.session.unserialize(msg)
646 msg = self.session.unserialize(msg)
647 except Exception:
647 except Exception:
648 self.log.error("queue::engine %r sent invalid message to %r: %r",
648 self.log.error("queue::engine %r sent invalid message to %r: %r",
649 queue_id, client_id, msg, exc_info=True)
649 queue_id, client_id, msg, exc_info=True)
650 return
650 return
651
651
652 eid = self.by_ident.get(queue_id, None)
652 eid = self.by_ident.get(queue_id, None)
653 if eid is None:
653 if eid is None:
654 self.log.error("queue::unknown engine %r is sending a reply: ", queue_id)
654 self.log.error("queue::unknown engine %r is sending a reply: ", queue_id)
655 return
655 return
656
656
657 parent = msg['parent_header']
657 parent = msg['parent_header']
658 if not parent:
658 if not parent:
659 return
659 return
660 msg_id = parent['msg_id']
660 msg_id = parent['msg_id']
661 if msg_id in self.pending:
661 if msg_id in self.pending:
662 self.pending.remove(msg_id)
662 self.pending.remove(msg_id)
663 self.all_completed.add(msg_id)
663 self.all_completed.add(msg_id)
664 self.queues[eid].remove(msg_id)
664 self.queues[eid].remove(msg_id)
665 self.completed[eid].append(msg_id)
665 self.completed[eid].append(msg_id)
666 self.log.info("queue::request %r completed on %s", msg_id, eid)
666 self.log.info("queue::request %r completed on %s", msg_id, eid)
667 elif msg_id not in self.all_completed:
667 elif msg_id not in self.all_completed:
668 # it could be a result from a dead engine that died before delivering the
668 # it could be a result from a dead engine that died before delivering the
669 # result
669 # result
670 self.log.warn("queue:: unknown msg finished %r", msg_id)
670 self.log.warn("queue:: unknown msg finished %r", msg_id)
671 return
671 return
672 # update record anyway, because the unregistration could have been premature
672 # update record anyway, because the unregistration could have been premature
673 rheader = msg['header']
673 rheader = msg['header']
674 md = msg['metadata']
674 md = msg['metadata']
675 completed = rheader['date']
675 completed = rheader['date']
676 started = extract_dates(md.get('started', None))
676 started = extract_dates(md.get('started', None))
677 result = {
677 result = {
678 'result_header' : rheader,
678 'result_header' : rheader,
679 'result_metadata': md,
679 'result_metadata': md,
680 'result_content': msg['content'],
680 'result_content': msg['content'],
681 'received': datetime.now(),
681 'received': datetime.now(),
682 'started' : started,
682 'started' : started,
683 'completed' : completed
683 'completed' : completed
684 }
684 }
685
685
686 result['result_buffers'] = msg['buffers']
686 result['result_buffers'] = msg['buffers']
687 try:
687 try:
688 self.db.update_record(msg_id, result)
688 self.db.update_record(msg_id, result)
689 except Exception:
689 except Exception:
690 self.log.error("DB Error updating record %r", msg_id, exc_info=True)
690 self.log.error("DB Error updating record %r", msg_id, exc_info=True)
691
691
692
692
693 #--------------------- Task Queue Traffic ------------------------------
693 #--------------------- Task Queue Traffic ------------------------------
694
694
695 def save_task_request(self, idents, msg):
695 def save_task_request(self, idents, msg):
696 """Save the submission of a task."""
696 """Save the submission of a task."""
697 client_id = idents[0]
697 client_id = idents[0]
698
698
699 try:
699 try:
700 msg = self.session.unserialize(msg)
700 msg = self.session.unserialize(msg)
701 except Exception:
701 except Exception:
702 self.log.error("task::client %r sent invalid task message: %r",
702 self.log.error("task::client %r sent invalid task message: %r",
703 client_id, msg, exc_info=True)
703 client_id, msg, exc_info=True)
704 return
704 return
705 record = init_record(msg)
705 record = init_record(msg)
706
706
707 record['client_uuid'] = msg['header']['session']
707 record['client_uuid'] = msg['header']['session']
708 record['queue'] = 'task'
708 record['queue'] = 'task'
709 header = msg['header']
709 header = msg['header']
710 msg_id = header['msg_id']
710 msg_id = header['msg_id']
711 self.pending.add(msg_id)
711 self.pending.add(msg_id)
712 self.unassigned.add(msg_id)
712 self.unassigned.add(msg_id)
713 try:
713 try:
714 # it's posible iopub arrived first:
714 # it's posible iopub arrived first:
715 existing = self.db.get_record(msg_id)
715 existing = self.db.get_record(msg_id)
716 if existing['resubmitted']:
716 if existing['resubmitted']:
717 for key in ('submitted', 'client_uuid', 'buffers'):
717 for key in ('submitted', 'client_uuid', 'buffers'):
718 # don't clobber these keys on resubmit
718 # don't clobber these keys on resubmit
719 # submitted and client_uuid should be different
719 # submitted and client_uuid should be different
720 # and buffers might be big, and shouldn't have changed
720 # and buffers might be big, and shouldn't have changed
721 record.pop(key)
721 record.pop(key)
722 # still check content,header which should not change
722 # still check content,header which should not change
723 # but are not expensive to compare as buffers
723 # but are not expensive to compare as buffers
724
724
725 for key,evalue in iteritems(existing):
725 for key,evalue in iteritems(existing):
726 if key.endswith('buffers'):
726 if key.endswith('buffers'):
727 # don't compare buffers
727 # don't compare buffers
728 continue
728 continue
729 rvalue = record.get(key, None)
729 rvalue = record.get(key, None)
730 if evalue and rvalue and evalue != rvalue:
730 if evalue and rvalue and evalue != rvalue:
731 self.log.warn("conflicting initial state for record: %r:%r <%r> %r", msg_id, rvalue, key, evalue)
731 self.log.warn("conflicting initial state for record: %r:%r <%r> %r", msg_id, rvalue, key, evalue)
732 elif evalue and not rvalue:
732 elif evalue and not rvalue:
733 record[key] = evalue
733 record[key] = evalue
734 try:
734 try:
735 self.db.update_record(msg_id, record)
735 self.db.update_record(msg_id, record)
736 except Exception:
736 except Exception:
737 self.log.error("DB Error updating record %r", msg_id, exc_info=True)
737 self.log.error("DB Error updating record %r", msg_id, exc_info=True)
738 except KeyError:
738 except KeyError:
739 try:
739 try:
740 self.db.add_record(msg_id, record)
740 self.db.add_record(msg_id, record)
741 except Exception:
741 except Exception:
742 self.log.error("DB Error adding record %r", msg_id, exc_info=True)
742 self.log.error("DB Error adding record %r", msg_id, exc_info=True)
743 except Exception:
743 except Exception:
744 self.log.error("DB Error saving task request %r", msg_id, exc_info=True)
744 self.log.error("DB Error saving task request %r", msg_id, exc_info=True)
745
745
746 def save_task_result(self, idents, msg):
746 def save_task_result(self, idents, msg):
747 """save the result of a completed task."""
747 """save the result of a completed task."""
748 client_id = idents[0]
748 client_id = idents[0]
749 try:
749 try:
750 msg = self.session.unserialize(msg)
750 msg = self.session.unserialize(msg)
751 except Exception:
751 except Exception:
752 self.log.error("task::invalid task result message send to %r: %r",
752 self.log.error("task::invalid task result message send to %r: %r",
753 client_id, msg, exc_info=True)
753 client_id, msg, exc_info=True)
754 return
754 return
755
755
756 parent = msg['parent_header']
756 parent = msg['parent_header']
757 if not parent:
757 if not parent:
758 # print msg
758 # print msg
759 self.log.warn("Task %r had no parent!", msg)
759 self.log.warn("Task %r had no parent!", msg)
760 return
760 return
761 msg_id = parent['msg_id']
761 msg_id = parent['msg_id']
762 if msg_id in self.unassigned:
762 if msg_id in self.unassigned:
763 self.unassigned.remove(msg_id)
763 self.unassigned.remove(msg_id)
764
764
765 header = msg['header']
765 header = msg['header']
766 md = msg['metadata']
766 md = msg['metadata']
767 engine_uuid = md.get('engine', u'')
767 engine_uuid = md.get('engine', u'')
768 eid = self.by_ident.get(cast_bytes(engine_uuid), None)
768 eid = self.by_ident.get(cast_bytes(engine_uuid), None)
769
769
770 status = md.get('status', None)
770 status = md.get('status', None)
771
771
772 if msg_id in self.pending:
772 if msg_id in self.pending:
773 self.log.info("task::task %r finished on %s", msg_id, eid)
773 self.log.info("task::task %r finished on %s", msg_id, eid)
774 self.pending.remove(msg_id)
774 self.pending.remove(msg_id)
775 self.all_completed.add(msg_id)
775 self.all_completed.add(msg_id)
776 if eid is not None:
776 if eid is not None:
777 if status != 'aborted':
777 if status != 'aborted':
778 self.completed[eid].append(msg_id)
778 self.completed[eid].append(msg_id)
779 if msg_id in self.tasks[eid]:
779 if msg_id in self.tasks[eid]:
780 self.tasks[eid].remove(msg_id)
780 self.tasks[eid].remove(msg_id)
781 completed = header['date']
781 completed = header['date']
782 started = extract_dates(md.get('started', None))
782 started = extract_dates(md.get('started', None))
783 result = {
783 result = {
784 'result_header' : header,
784 'result_header' : header,
785 'result_metadata': msg['metadata'],
785 'result_metadata': msg['metadata'],
786 'result_content': msg['content'],
786 'result_content': msg['content'],
787 'started' : started,
787 'started' : started,
788 'completed' : completed,
788 'completed' : completed,
789 'received' : datetime.now(),
789 'received' : datetime.now(),
790 'engine_uuid': engine_uuid,
790 'engine_uuid': engine_uuid,
791 }
791 }
792
792
793 result['result_buffers'] = msg['buffers']
793 result['result_buffers'] = msg['buffers']
794 try:
794 try:
795 self.db.update_record(msg_id, result)
795 self.db.update_record(msg_id, result)
796 except Exception:
796 except Exception:
797 self.log.error("DB Error saving task request %r", msg_id, exc_info=True)
797 self.log.error("DB Error saving task request %r", msg_id, exc_info=True)
798
798
799 else:
799 else:
800 self.log.debug("task::unknown task %r finished", msg_id)
800 self.log.debug("task::unknown task %r finished", msg_id)
801
801
802 def save_task_destination(self, idents, msg):
802 def save_task_destination(self, idents, msg):
803 try:
803 try:
804 msg = self.session.unserialize(msg, content=True)
804 msg = self.session.unserialize(msg, content=True)
805 except Exception:
805 except Exception:
806 self.log.error("task::invalid task tracking message", exc_info=True)
806 self.log.error("task::invalid task tracking message", exc_info=True)
807 return
807 return
808 content = msg['content']
808 content = msg['content']
809 # print (content)
809 # print (content)
810 msg_id = content['msg_id']
810 msg_id = content['msg_id']
811 engine_uuid = content['engine_id']
811 engine_uuid = content['engine_id']
812 eid = self.by_ident[cast_bytes(engine_uuid)]
812 eid = self.by_ident[cast_bytes(engine_uuid)]
813
813
814 self.log.info("task::task %r arrived on %r", msg_id, eid)
814 self.log.info("task::task %r arrived on %r", msg_id, eid)
815 if msg_id in self.unassigned:
815 if msg_id in self.unassigned:
816 self.unassigned.remove(msg_id)
816 self.unassigned.remove(msg_id)
817 # else:
817 # else:
818 # self.log.debug("task::task %r not listed as MIA?!"%(msg_id))
818 # self.log.debug("task::task %r not listed as MIA?!"%(msg_id))
819
819
820 self.tasks[eid].append(msg_id)
820 self.tasks[eid].append(msg_id)
821 # self.pending[msg_id][1].update(received=datetime.now(),engine=(eid,engine_uuid))
821 # self.pending[msg_id][1].update(received=datetime.now(),engine=(eid,engine_uuid))
822 try:
822 try:
823 self.db.update_record(msg_id, dict(engine_uuid=engine_uuid))
823 self.db.update_record(msg_id, dict(engine_uuid=engine_uuid))
824 except Exception:
824 except Exception:
825 self.log.error("DB Error saving task destination %r", msg_id, exc_info=True)
825 self.log.error("DB Error saving task destination %r", msg_id, exc_info=True)
826
826
827
827
828 def mia_task_request(self, idents, msg):
828 def mia_task_request(self, idents, msg):
829 raise NotImplementedError
829 raise NotImplementedError
830 client_id = idents[0]
830 client_id = idents[0]
831 # content = dict(mia=self.mia,status='ok')
831 # content = dict(mia=self.mia,status='ok')
832 # self.session.send('mia_reply', content=content, idents=client_id)
832 # self.session.send('mia_reply', content=content, idents=client_id)
833
833
834
834
835 #--------------------- IOPub Traffic ------------------------------
835 #--------------------- IOPub Traffic ------------------------------
836
836
837 def save_iopub_message(self, topics, msg):
837 def save_iopub_message(self, topics, msg):
838 """save an iopub message into the db"""
838 """save an iopub message into the db"""
839 # print (topics)
839 # print (topics)
840 try:
840 try:
841 msg = self.session.unserialize(msg, content=True)
841 msg = self.session.unserialize(msg, content=True)
842 except Exception:
842 except Exception:
843 self.log.error("iopub::invalid IOPub message", exc_info=True)
843 self.log.error("iopub::invalid IOPub message", exc_info=True)
844 return
844 return
845
845
846 parent = msg['parent_header']
846 parent = msg['parent_header']
847 if not parent:
847 if not parent:
848 self.log.warn("iopub::IOPub message lacks parent: %r", msg)
848 self.log.warn("iopub::IOPub message lacks parent: %r", msg)
849 return
849 return
850 msg_id = parent['msg_id']
850 msg_id = parent['msg_id']
851 msg_type = msg['header']['msg_type']
851 msg_type = msg['header']['msg_type']
852 content = msg['content']
852 content = msg['content']
853
853
854 # ensure msg_id is in db
854 # ensure msg_id is in db
855 try:
855 try:
856 rec = self.db.get_record(msg_id)
856 rec = self.db.get_record(msg_id)
857 except KeyError:
857 except KeyError:
858 rec = empty_record()
858 rec = empty_record()
859 rec['msg_id'] = msg_id
859 rec['msg_id'] = msg_id
860 self.db.add_record(msg_id, rec)
860 self.db.add_record(msg_id, rec)
861 # stream
861 # stream
862 d = {}
862 d = {}
863 if msg_type == 'stream':
863 if msg_type == 'stream':
864 name = content['name']
864 name = content['name']
865 s = rec[name] or ''
865 s = rec[name] or ''
866 d[name] = s + content['data']
866 d[name] = s + content['data']
867
867
868 elif msg_type == 'pyerr':
868 elif msg_type == 'error':
869 d['pyerr'] = content
869 d['error'] = content
870 elif msg_type == 'execute_input':
870 elif msg_type == 'execute_input':
871 d['execute_input'] = content['code']
871 d['execute_input'] = content['code']
872 elif msg_type in ('display_data', 'execute_result'):
872 elif msg_type in ('display_data', 'execute_result'):
873 d[msg_type] = content
873 d[msg_type] = content
874 elif msg_type == 'status':
874 elif msg_type == 'status':
875 pass
875 pass
876 elif msg_type == 'data_pub':
876 elif msg_type == 'data_pub':
877 self.log.info("ignored data_pub message for %s" % msg_id)
877 self.log.info("ignored data_pub message for %s" % msg_id)
878 else:
878 else:
879 self.log.warn("unhandled iopub msg_type: %r", msg_type)
879 self.log.warn("unhandled iopub msg_type: %r", msg_type)
880
880
881 if not d:
881 if not d:
882 return
882 return
883
883
884 try:
884 try:
885 self.db.update_record(msg_id, d)
885 self.db.update_record(msg_id, d)
886 except Exception:
886 except Exception:
887 self.log.error("DB Error saving iopub message %r", msg_id, exc_info=True)
887 self.log.error("DB Error saving iopub message %r", msg_id, exc_info=True)
888
888
889
889
890
890
891 #-------------------------------------------------------------------------
891 #-------------------------------------------------------------------------
892 # Registration requests
892 # Registration requests
893 #-------------------------------------------------------------------------
893 #-------------------------------------------------------------------------
894
894
895 def connection_request(self, client_id, msg):
895 def connection_request(self, client_id, msg):
896 """Reply with connection addresses for clients."""
896 """Reply with connection addresses for clients."""
897 self.log.info("client::client %r connected", client_id)
897 self.log.info("client::client %r connected", client_id)
898 content = dict(status='ok')
898 content = dict(status='ok')
899 jsonable = {}
899 jsonable = {}
900 for k,v in iteritems(self.keytable):
900 for k,v in iteritems(self.keytable):
901 if v not in self.dead_engines:
901 if v not in self.dead_engines:
902 jsonable[str(k)] = v
902 jsonable[str(k)] = v
903 content['engines'] = jsonable
903 content['engines'] = jsonable
904 self.session.send(self.query, 'connection_reply', content, parent=msg, ident=client_id)
904 self.session.send(self.query, 'connection_reply', content, parent=msg, ident=client_id)
905
905
906 def register_engine(self, reg, msg):
906 def register_engine(self, reg, msg):
907 """Register a new engine."""
907 """Register a new engine."""
908 content = msg['content']
908 content = msg['content']
909 try:
909 try:
910 uuid = content['uuid']
910 uuid = content['uuid']
911 except KeyError:
911 except KeyError:
912 self.log.error("registration::queue not specified", exc_info=True)
912 self.log.error("registration::queue not specified", exc_info=True)
913 return
913 return
914
914
915 eid = self._next_id
915 eid = self._next_id
916
916
917 self.log.debug("registration::register_engine(%i, %r)", eid, uuid)
917 self.log.debug("registration::register_engine(%i, %r)", eid, uuid)
918
918
919 content = dict(id=eid,status='ok',hb_period=self.heartmonitor.period)
919 content = dict(id=eid,status='ok',hb_period=self.heartmonitor.period)
920 # check if requesting available IDs:
920 # check if requesting available IDs:
921 if cast_bytes(uuid) in self.by_ident:
921 if cast_bytes(uuid) in self.by_ident:
922 try:
922 try:
923 raise KeyError("uuid %r in use" % uuid)
923 raise KeyError("uuid %r in use" % uuid)
924 except:
924 except:
925 content = error.wrap_exception()
925 content = error.wrap_exception()
926 self.log.error("uuid %r in use", uuid, exc_info=True)
926 self.log.error("uuid %r in use", uuid, exc_info=True)
927 else:
927 else:
928 for h, ec in iteritems(self.incoming_registrations):
928 for h, ec in iteritems(self.incoming_registrations):
929 if uuid == h:
929 if uuid == h:
930 try:
930 try:
931 raise KeyError("heart_id %r in use" % uuid)
931 raise KeyError("heart_id %r in use" % uuid)
932 except:
932 except:
933 self.log.error("heart_id %r in use", uuid, exc_info=True)
933 self.log.error("heart_id %r in use", uuid, exc_info=True)
934 content = error.wrap_exception()
934 content = error.wrap_exception()
935 break
935 break
936 elif uuid == ec.uuid:
936 elif uuid == ec.uuid:
937 try:
937 try:
938 raise KeyError("uuid %r in use" % uuid)
938 raise KeyError("uuid %r in use" % uuid)
939 except:
939 except:
940 self.log.error("uuid %r in use", uuid, exc_info=True)
940 self.log.error("uuid %r in use", uuid, exc_info=True)
941 content = error.wrap_exception()
941 content = error.wrap_exception()
942 break
942 break
943
943
944 msg = self.session.send(self.query, "registration_reply",
944 msg = self.session.send(self.query, "registration_reply",
945 content=content,
945 content=content,
946 ident=reg)
946 ident=reg)
947
947
948 heart = cast_bytes(uuid)
948 heart = cast_bytes(uuid)
949
949
950 if content['status'] == 'ok':
950 if content['status'] == 'ok':
951 if heart in self.heartmonitor.hearts:
951 if heart in self.heartmonitor.hearts:
952 # already beating
952 # already beating
953 self.incoming_registrations[heart] = EngineConnector(id=eid,uuid=uuid)
953 self.incoming_registrations[heart] = EngineConnector(id=eid,uuid=uuid)
954 self.finish_registration(heart)
954 self.finish_registration(heart)
955 else:
955 else:
956 purge = lambda : self._purge_stalled_registration(heart)
956 purge = lambda : self._purge_stalled_registration(heart)
957 dc = ioloop.DelayedCallback(purge, self.registration_timeout, self.loop)
957 dc = ioloop.DelayedCallback(purge, self.registration_timeout, self.loop)
958 dc.start()
958 dc.start()
959 self.incoming_registrations[heart] = EngineConnector(id=eid,uuid=uuid,stallback=dc)
959 self.incoming_registrations[heart] = EngineConnector(id=eid,uuid=uuid,stallback=dc)
960 else:
960 else:
961 self.log.error("registration::registration %i failed: %r", eid, content['evalue'])
961 self.log.error("registration::registration %i failed: %r", eid, content['evalue'])
962
962
963 return eid
963 return eid
964
964
965 def unregister_engine(self, ident, msg):
965 def unregister_engine(self, ident, msg):
966 """Unregister an engine that explicitly requested to leave."""
966 """Unregister an engine that explicitly requested to leave."""
967 try:
967 try:
968 eid = msg['content']['id']
968 eid = msg['content']['id']
969 except:
969 except:
970 self.log.error("registration::bad engine id for unregistration: %r", ident, exc_info=True)
970 self.log.error("registration::bad engine id for unregistration: %r", ident, exc_info=True)
971 return
971 return
972 self.log.info("registration::unregister_engine(%r)", eid)
972 self.log.info("registration::unregister_engine(%r)", eid)
973 # print (eid)
973 # print (eid)
974 uuid = self.keytable[eid]
974 uuid = self.keytable[eid]
975 content=dict(id=eid, uuid=uuid)
975 content=dict(id=eid, uuid=uuid)
976 self.dead_engines.add(uuid)
976 self.dead_engines.add(uuid)
977 # self.ids.remove(eid)
977 # self.ids.remove(eid)
978 # uuid = self.keytable.pop(eid)
978 # uuid = self.keytable.pop(eid)
979 #
979 #
980 # ec = self.engines.pop(eid)
980 # ec = self.engines.pop(eid)
981 # self.hearts.pop(ec.heartbeat)
981 # self.hearts.pop(ec.heartbeat)
982 # self.by_ident.pop(ec.queue)
982 # self.by_ident.pop(ec.queue)
983 # self.completed.pop(eid)
983 # self.completed.pop(eid)
984 handleit = lambda : self._handle_stranded_msgs(eid, uuid)
984 handleit = lambda : self._handle_stranded_msgs(eid, uuid)
985 dc = ioloop.DelayedCallback(handleit, self.registration_timeout, self.loop)
985 dc = ioloop.DelayedCallback(handleit, self.registration_timeout, self.loop)
986 dc.start()
986 dc.start()
987 ############## TODO: HANDLE IT ################
987 ############## TODO: HANDLE IT ################
988
988
989 self._save_engine_state()
989 self._save_engine_state()
990
990
991 if self.notifier:
991 if self.notifier:
992 self.session.send(self.notifier, "unregistration_notification", content=content)
992 self.session.send(self.notifier, "unregistration_notification", content=content)
993
993
994 def _handle_stranded_msgs(self, eid, uuid):
994 def _handle_stranded_msgs(self, eid, uuid):
995 """Handle messages known to be on an engine when the engine unregisters.
995 """Handle messages known to be on an engine when the engine unregisters.
996
996
997 It is possible that this will fire prematurely - that is, an engine will
997 It is possible that this will fire prematurely - that is, an engine will
998 go down after completing a result, and the client will be notified
998 go down after completing a result, and the client will be notified
999 that the result failed and later receive the actual result.
999 that the result failed and later receive the actual result.
1000 """
1000 """
1001
1001
1002 outstanding = self.queues[eid]
1002 outstanding = self.queues[eid]
1003
1003
1004 for msg_id in outstanding:
1004 for msg_id in outstanding:
1005 self.pending.remove(msg_id)
1005 self.pending.remove(msg_id)
1006 self.all_completed.add(msg_id)
1006 self.all_completed.add(msg_id)
1007 try:
1007 try:
1008 raise error.EngineError("Engine %r died while running task %r" % (eid, msg_id))
1008 raise error.EngineError("Engine %r died while running task %r" % (eid, msg_id))
1009 except:
1009 except:
1010 content = error.wrap_exception()
1010 content = error.wrap_exception()
1011 # build a fake header:
1011 # build a fake header:
1012 header = {}
1012 header = {}
1013 header['engine'] = uuid
1013 header['engine'] = uuid
1014 header['date'] = datetime.now()
1014 header['date'] = datetime.now()
1015 rec = dict(result_content=content, result_header=header, result_buffers=[])
1015 rec = dict(result_content=content, result_header=header, result_buffers=[])
1016 rec['completed'] = header['date']
1016 rec['completed'] = header['date']
1017 rec['engine_uuid'] = uuid
1017 rec['engine_uuid'] = uuid
1018 try:
1018 try:
1019 self.db.update_record(msg_id, rec)
1019 self.db.update_record(msg_id, rec)
1020 except Exception:
1020 except Exception:
1021 self.log.error("DB Error handling stranded msg %r", msg_id, exc_info=True)
1021 self.log.error("DB Error handling stranded msg %r", msg_id, exc_info=True)
1022
1022
1023
1023
1024 def finish_registration(self, heart):
1024 def finish_registration(self, heart):
1025 """Second half of engine registration, called after our HeartMonitor
1025 """Second half of engine registration, called after our HeartMonitor
1026 has received a beat from the Engine's Heart."""
1026 has received a beat from the Engine's Heart."""
1027 try:
1027 try:
1028 ec = self.incoming_registrations.pop(heart)
1028 ec = self.incoming_registrations.pop(heart)
1029 except KeyError:
1029 except KeyError:
1030 self.log.error("registration::tried to finish nonexistant registration", exc_info=True)
1030 self.log.error("registration::tried to finish nonexistant registration", exc_info=True)
1031 return
1031 return
1032 self.log.info("registration::finished registering engine %i:%s", ec.id, ec.uuid)
1032 self.log.info("registration::finished registering engine %i:%s", ec.id, ec.uuid)
1033 if ec.stallback is not None:
1033 if ec.stallback is not None:
1034 ec.stallback.stop()
1034 ec.stallback.stop()
1035 eid = ec.id
1035 eid = ec.id
1036 self.ids.add(eid)
1036 self.ids.add(eid)
1037 self.keytable[eid] = ec.uuid
1037 self.keytable[eid] = ec.uuid
1038 self.engines[eid] = ec
1038 self.engines[eid] = ec
1039 self.by_ident[cast_bytes(ec.uuid)] = ec.id
1039 self.by_ident[cast_bytes(ec.uuid)] = ec.id
1040 self.queues[eid] = list()
1040 self.queues[eid] = list()
1041 self.tasks[eid] = list()
1041 self.tasks[eid] = list()
1042 self.completed[eid] = list()
1042 self.completed[eid] = list()
1043 self.hearts[heart] = eid
1043 self.hearts[heart] = eid
1044 content = dict(id=eid, uuid=self.engines[eid].uuid)
1044 content = dict(id=eid, uuid=self.engines[eid].uuid)
1045 if self.notifier:
1045 if self.notifier:
1046 self.session.send(self.notifier, "registration_notification", content=content)
1046 self.session.send(self.notifier, "registration_notification", content=content)
1047 self.log.info("engine::Engine Connected: %i", eid)
1047 self.log.info("engine::Engine Connected: %i", eid)
1048
1048
1049 self._save_engine_state()
1049 self._save_engine_state()
1050
1050
1051 def _purge_stalled_registration(self, heart):
1051 def _purge_stalled_registration(self, heart):
1052 if heart in self.incoming_registrations:
1052 if heart in self.incoming_registrations:
1053 ec = self.incoming_registrations.pop(heart)
1053 ec = self.incoming_registrations.pop(heart)
1054 self.log.info("registration::purging stalled registration: %i", ec.id)
1054 self.log.info("registration::purging stalled registration: %i", ec.id)
1055 else:
1055 else:
1056 pass
1056 pass
1057
1057
1058 #-------------------------------------------------------------------------
1058 #-------------------------------------------------------------------------
1059 # Engine State
1059 # Engine State
1060 #-------------------------------------------------------------------------
1060 #-------------------------------------------------------------------------
1061
1061
1062
1062
1063 def _cleanup_engine_state_file(self):
1063 def _cleanup_engine_state_file(self):
1064 """cleanup engine state mapping"""
1064 """cleanup engine state mapping"""
1065
1065
1066 if os.path.exists(self.engine_state_file):
1066 if os.path.exists(self.engine_state_file):
1067 self.log.debug("cleaning up engine state: %s", self.engine_state_file)
1067 self.log.debug("cleaning up engine state: %s", self.engine_state_file)
1068 try:
1068 try:
1069 os.remove(self.engine_state_file)
1069 os.remove(self.engine_state_file)
1070 except IOError:
1070 except IOError:
1071 self.log.error("Couldn't cleanup file: %s", self.engine_state_file, exc_info=True)
1071 self.log.error("Couldn't cleanup file: %s", self.engine_state_file, exc_info=True)
1072
1072
1073
1073
1074 def _save_engine_state(self):
1074 def _save_engine_state(self):
1075 """save engine mapping to JSON file"""
1075 """save engine mapping to JSON file"""
1076 if not self.engine_state_file:
1076 if not self.engine_state_file:
1077 return
1077 return
1078 self.log.debug("save engine state to %s" % self.engine_state_file)
1078 self.log.debug("save engine state to %s" % self.engine_state_file)
1079 state = {}
1079 state = {}
1080 engines = {}
1080 engines = {}
1081 for eid, ec in iteritems(self.engines):
1081 for eid, ec in iteritems(self.engines):
1082 if ec.uuid not in self.dead_engines:
1082 if ec.uuid not in self.dead_engines:
1083 engines[eid] = ec.uuid
1083 engines[eid] = ec.uuid
1084
1084
1085 state['engines'] = engines
1085 state['engines'] = engines
1086
1086
1087 state['next_id'] = self._idcounter
1087 state['next_id'] = self._idcounter
1088
1088
1089 with open(self.engine_state_file, 'w') as f:
1089 with open(self.engine_state_file, 'w') as f:
1090 json.dump(state, f)
1090 json.dump(state, f)
1091
1091
1092
1092
1093 def _load_engine_state(self):
1093 def _load_engine_state(self):
1094 """load engine mapping from JSON file"""
1094 """load engine mapping from JSON file"""
1095 if not os.path.exists(self.engine_state_file):
1095 if not os.path.exists(self.engine_state_file):
1096 return
1096 return
1097
1097
1098 self.log.info("loading engine state from %s" % self.engine_state_file)
1098 self.log.info("loading engine state from %s" % self.engine_state_file)
1099
1099
1100 with open(self.engine_state_file) as f:
1100 with open(self.engine_state_file) as f:
1101 state = json.load(f)
1101 state = json.load(f)
1102
1102
1103 save_notifier = self.notifier
1103 save_notifier = self.notifier
1104 self.notifier = None
1104 self.notifier = None
1105 for eid, uuid in iteritems(state['engines']):
1105 for eid, uuid in iteritems(state['engines']):
1106 heart = uuid.encode('ascii')
1106 heart = uuid.encode('ascii')
1107 # start with this heart as current and beating:
1107 # start with this heart as current and beating:
1108 self.heartmonitor.responses.add(heart)
1108 self.heartmonitor.responses.add(heart)
1109 self.heartmonitor.hearts.add(heart)
1109 self.heartmonitor.hearts.add(heart)
1110
1110
1111 self.incoming_registrations[heart] = EngineConnector(id=int(eid), uuid=uuid)
1111 self.incoming_registrations[heart] = EngineConnector(id=int(eid), uuid=uuid)
1112 self.finish_registration(heart)
1112 self.finish_registration(heart)
1113
1113
1114 self.notifier = save_notifier
1114 self.notifier = save_notifier
1115
1115
1116 self._idcounter = state['next_id']
1116 self._idcounter = state['next_id']
1117
1117
1118 #-------------------------------------------------------------------------
1118 #-------------------------------------------------------------------------
1119 # Client Requests
1119 # Client Requests
1120 #-------------------------------------------------------------------------
1120 #-------------------------------------------------------------------------
1121
1121
1122 def shutdown_request(self, client_id, msg):
1122 def shutdown_request(self, client_id, msg):
1123 """handle shutdown request."""
1123 """handle shutdown request."""
1124 self.session.send(self.query, 'shutdown_reply', content={'status': 'ok'}, ident=client_id)
1124 self.session.send(self.query, 'shutdown_reply', content={'status': 'ok'}, ident=client_id)
1125 # also notify other clients of shutdown
1125 # also notify other clients of shutdown
1126 self.session.send(self.notifier, 'shutdown_notice', content={'status': 'ok'})
1126 self.session.send(self.notifier, 'shutdown_notice', content={'status': 'ok'})
1127 dc = ioloop.DelayedCallback(lambda : self._shutdown(), 1000, self.loop)
1127 dc = ioloop.DelayedCallback(lambda : self._shutdown(), 1000, self.loop)
1128 dc.start()
1128 dc.start()
1129
1129
1130 def _shutdown(self):
1130 def _shutdown(self):
1131 self.log.info("hub::hub shutting down.")
1131 self.log.info("hub::hub shutting down.")
1132 time.sleep(0.1)
1132 time.sleep(0.1)
1133 sys.exit(0)
1133 sys.exit(0)
1134
1134
1135
1135
1136 def check_load(self, client_id, msg):
1136 def check_load(self, client_id, msg):
1137 content = msg['content']
1137 content = msg['content']
1138 try:
1138 try:
1139 targets = content['targets']
1139 targets = content['targets']
1140 targets = self._validate_targets(targets)
1140 targets = self._validate_targets(targets)
1141 except:
1141 except:
1142 content = error.wrap_exception()
1142 content = error.wrap_exception()
1143 self.session.send(self.query, "hub_error",
1143 self.session.send(self.query, "hub_error",
1144 content=content, ident=client_id)
1144 content=content, ident=client_id)
1145 return
1145 return
1146
1146
1147 content = dict(status='ok')
1147 content = dict(status='ok')
1148 # loads = {}
1148 # loads = {}
1149 for t in targets:
1149 for t in targets:
1150 content[bytes(t)] = len(self.queues[t])+len(self.tasks[t])
1150 content[bytes(t)] = len(self.queues[t])+len(self.tasks[t])
1151 self.session.send(self.query, "load_reply", content=content, ident=client_id)
1151 self.session.send(self.query, "load_reply", content=content, ident=client_id)
1152
1152
1153
1153
1154 def queue_status(self, client_id, msg):
1154 def queue_status(self, client_id, msg):
1155 """Return the Queue status of one or more targets.
1155 """Return the Queue status of one or more targets.
1156
1156
1157 If verbose, return the msg_ids, else return len of each type.
1157 If verbose, return the msg_ids, else return len of each type.
1158
1158
1159 Keys:
1159 Keys:
1160
1160
1161 * queue (pending MUX jobs)
1161 * queue (pending MUX jobs)
1162 * tasks (pending Task jobs)
1162 * tasks (pending Task jobs)
1163 * completed (finished jobs from both queues)
1163 * completed (finished jobs from both queues)
1164 """
1164 """
1165 content = msg['content']
1165 content = msg['content']
1166 targets = content['targets']
1166 targets = content['targets']
1167 try:
1167 try:
1168 targets = self._validate_targets(targets)
1168 targets = self._validate_targets(targets)
1169 except:
1169 except:
1170 content = error.wrap_exception()
1170 content = error.wrap_exception()
1171 self.session.send(self.query, "hub_error",
1171 self.session.send(self.query, "hub_error",
1172 content=content, ident=client_id)
1172 content=content, ident=client_id)
1173 return
1173 return
1174 verbose = content.get('verbose', False)
1174 verbose = content.get('verbose', False)
1175 content = dict(status='ok')
1175 content = dict(status='ok')
1176 for t in targets:
1176 for t in targets:
1177 queue = self.queues[t]
1177 queue = self.queues[t]
1178 completed = self.completed[t]
1178 completed = self.completed[t]
1179 tasks = self.tasks[t]
1179 tasks = self.tasks[t]
1180 if not verbose:
1180 if not verbose:
1181 queue = len(queue)
1181 queue = len(queue)
1182 completed = len(completed)
1182 completed = len(completed)
1183 tasks = len(tasks)
1183 tasks = len(tasks)
1184 content[str(t)] = {'queue': queue, 'completed': completed , 'tasks': tasks}
1184 content[str(t)] = {'queue': queue, 'completed': completed , 'tasks': tasks}
1185 content['unassigned'] = list(self.unassigned) if verbose else len(self.unassigned)
1185 content['unassigned'] = list(self.unassigned) if verbose else len(self.unassigned)
1186 # print (content)
1186 # print (content)
1187 self.session.send(self.query, "queue_reply", content=content, ident=client_id)
1187 self.session.send(self.query, "queue_reply", content=content, ident=client_id)
1188
1188
1189 def purge_results(self, client_id, msg):
1189 def purge_results(self, client_id, msg):
1190 """Purge results from memory. This method is more valuable before we move
1190 """Purge results from memory. This method is more valuable before we move
1191 to a DB based message storage mechanism."""
1191 to a DB based message storage mechanism."""
1192 content = msg['content']
1192 content = msg['content']
1193 self.log.info("Dropping records with %s", content)
1193 self.log.info("Dropping records with %s", content)
1194 msg_ids = content.get('msg_ids', [])
1194 msg_ids = content.get('msg_ids', [])
1195 reply = dict(status='ok')
1195 reply = dict(status='ok')
1196 if msg_ids == 'all':
1196 if msg_ids == 'all':
1197 try:
1197 try:
1198 self.db.drop_matching_records(dict(completed={'$ne':None}))
1198 self.db.drop_matching_records(dict(completed={'$ne':None}))
1199 except Exception:
1199 except Exception:
1200 reply = error.wrap_exception()
1200 reply = error.wrap_exception()
1201 self.log.exception("Error dropping records")
1201 self.log.exception("Error dropping records")
1202 else:
1202 else:
1203 pending = [m for m in msg_ids if (m in self.pending)]
1203 pending = [m for m in msg_ids if (m in self.pending)]
1204 if pending:
1204 if pending:
1205 try:
1205 try:
1206 raise IndexError("msg pending: %r" % pending[0])
1206 raise IndexError("msg pending: %r" % pending[0])
1207 except:
1207 except:
1208 reply = error.wrap_exception()
1208 reply = error.wrap_exception()
1209 self.log.exception("Error dropping records")
1209 self.log.exception("Error dropping records")
1210 else:
1210 else:
1211 try:
1211 try:
1212 self.db.drop_matching_records(dict(msg_id={'$in':msg_ids}))
1212 self.db.drop_matching_records(dict(msg_id={'$in':msg_ids}))
1213 except Exception:
1213 except Exception:
1214 reply = error.wrap_exception()
1214 reply = error.wrap_exception()
1215 self.log.exception("Error dropping records")
1215 self.log.exception("Error dropping records")
1216
1216
1217 if reply['status'] == 'ok':
1217 if reply['status'] == 'ok':
1218 eids = content.get('engine_ids', [])
1218 eids = content.get('engine_ids', [])
1219 for eid in eids:
1219 for eid in eids:
1220 if eid not in self.engines:
1220 if eid not in self.engines:
1221 try:
1221 try:
1222 raise IndexError("No such engine: %i" % eid)
1222 raise IndexError("No such engine: %i" % eid)
1223 except:
1223 except:
1224 reply = error.wrap_exception()
1224 reply = error.wrap_exception()
1225 self.log.exception("Error dropping records")
1225 self.log.exception("Error dropping records")
1226 break
1226 break
1227 uid = self.engines[eid].uuid
1227 uid = self.engines[eid].uuid
1228 try:
1228 try:
1229 self.db.drop_matching_records(dict(engine_uuid=uid, completed={'$ne':None}))
1229 self.db.drop_matching_records(dict(engine_uuid=uid, completed={'$ne':None}))
1230 except Exception:
1230 except Exception:
1231 reply = error.wrap_exception()
1231 reply = error.wrap_exception()
1232 self.log.exception("Error dropping records")
1232 self.log.exception("Error dropping records")
1233 break
1233 break
1234
1234
1235 self.session.send(self.query, 'purge_reply', content=reply, ident=client_id)
1235 self.session.send(self.query, 'purge_reply', content=reply, ident=client_id)
1236
1236
1237 def resubmit_task(self, client_id, msg):
1237 def resubmit_task(self, client_id, msg):
1238 """Resubmit one or more tasks."""
1238 """Resubmit one or more tasks."""
1239 def finish(reply):
1239 def finish(reply):
1240 self.session.send(self.query, 'resubmit_reply', content=reply, ident=client_id)
1240 self.session.send(self.query, 'resubmit_reply', content=reply, ident=client_id)
1241
1241
1242 content = msg['content']
1242 content = msg['content']
1243 msg_ids = content['msg_ids']
1243 msg_ids = content['msg_ids']
1244 reply = dict(status='ok')
1244 reply = dict(status='ok')
1245 try:
1245 try:
1246 records = self.db.find_records({'msg_id' : {'$in' : msg_ids}}, keys=[
1246 records = self.db.find_records({'msg_id' : {'$in' : msg_ids}}, keys=[
1247 'header', 'content', 'buffers'])
1247 'header', 'content', 'buffers'])
1248 except Exception:
1248 except Exception:
1249 self.log.error('db::db error finding tasks to resubmit', exc_info=True)
1249 self.log.error('db::db error finding tasks to resubmit', exc_info=True)
1250 return finish(error.wrap_exception())
1250 return finish(error.wrap_exception())
1251
1251
1252 # validate msg_ids
1252 # validate msg_ids
1253 found_ids = [ rec['msg_id'] for rec in records ]
1253 found_ids = [ rec['msg_id'] for rec in records ]
1254 pending_ids = [ msg_id for msg_id in found_ids if msg_id in self.pending ]
1254 pending_ids = [ msg_id for msg_id in found_ids if msg_id in self.pending ]
1255 if len(records) > len(msg_ids):
1255 if len(records) > len(msg_ids):
1256 try:
1256 try:
1257 raise RuntimeError("DB appears to be in an inconsistent state."
1257 raise RuntimeError("DB appears to be in an inconsistent state."
1258 "More matching records were found than should exist")
1258 "More matching records were found than should exist")
1259 except Exception:
1259 except Exception:
1260 self.log.exception("Failed to resubmit task")
1260 self.log.exception("Failed to resubmit task")
1261 return finish(error.wrap_exception())
1261 return finish(error.wrap_exception())
1262 elif len(records) < len(msg_ids):
1262 elif len(records) < len(msg_ids):
1263 missing = [ m for m in msg_ids if m not in found_ids ]
1263 missing = [ m for m in msg_ids if m not in found_ids ]
1264 try:
1264 try:
1265 raise KeyError("No such msg(s): %r" % missing)
1265 raise KeyError("No such msg(s): %r" % missing)
1266 except KeyError:
1266 except KeyError:
1267 self.log.exception("Failed to resubmit task")
1267 self.log.exception("Failed to resubmit task")
1268 return finish(error.wrap_exception())
1268 return finish(error.wrap_exception())
1269 elif pending_ids:
1269 elif pending_ids:
1270 pass
1270 pass
1271 # no need to raise on resubmit of pending task, now that we
1271 # no need to raise on resubmit of pending task, now that we
1272 # resubmit under new ID, but do we want to raise anyway?
1272 # resubmit under new ID, but do we want to raise anyway?
1273 # msg_id = invalid_ids[0]
1273 # msg_id = invalid_ids[0]
1274 # try:
1274 # try:
1275 # raise ValueError("Task(s) %r appears to be inflight" % )
1275 # raise ValueError("Task(s) %r appears to be inflight" % )
1276 # except Exception:
1276 # except Exception:
1277 # return finish(error.wrap_exception())
1277 # return finish(error.wrap_exception())
1278
1278
1279 # mapping of original IDs to resubmitted IDs
1279 # mapping of original IDs to resubmitted IDs
1280 resubmitted = {}
1280 resubmitted = {}
1281
1281
1282 # send the messages
1282 # send the messages
1283 for rec in records:
1283 for rec in records:
1284 header = rec['header']
1284 header = rec['header']
1285 msg = self.session.msg(header['msg_type'], parent=header)
1285 msg = self.session.msg(header['msg_type'], parent=header)
1286 msg_id = msg['msg_id']
1286 msg_id = msg['msg_id']
1287 msg['content'] = rec['content']
1287 msg['content'] = rec['content']
1288
1288
1289 # use the old header, but update msg_id and timestamp
1289 # use the old header, but update msg_id and timestamp
1290 fresh = msg['header']
1290 fresh = msg['header']
1291 header['msg_id'] = fresh['msg_id']
1291 header['msg_id'] = fresh['msg_id']
1292 header['date'] = fresh['date']
1292 header['date'] = fresh['date']
1293 msg['header'] = header
1293 msg['header'] = header
1294
1294
1295 self.session.send(self.resubmit, msg, buffers=rec['buffers'])
1295 self.session.send(self.resubmit, msg, buffers=rec['buffers'])
1296
1296
1297 resubmitted[rec['msg_id']] = msg_id
1297 resubmitted[rec['msg_id']] = msg_id
1298 self.pending.add(msg_id)
1298 self.pending.add(msg_id)
1299 msg['buffers'] = rec['buffers']
1299 msg['buffers'] = rec['buffers']
1300 try:
1300 try:
1301 self.db.add_record(msg_id, init_record(msg))
1301 self.db.add_record(msg_id, init_record(msg))
1302 except Exception:
1302 except Exception:
1303 self.log.error("db::DB Error updating record: %s", msg_id, exc_info=True)
1303 self.log.error("db::DB Error updating record: %s", msg_id, exc_info=True)
1304 return finish(error.wrap_exception())
1304 return finish(error.wrap_exception())
1305
1305
1306 finish(dict(status='ok', resubmitted=resubmitted))
1306 finish(dict(status='ok', resubmitted=resubmitted))
1307
1307
1308 # store the new IDs in the Task DB
1308 # store the new IDs in the Task DB
1309 for msg_id, resubmit_id in iteritems(resubmitted):
1309 for msg_id, resubmit_id in iteritems(resubmitted):
1310 try:
1310 try:
1311 self.db.update_record(msg_id, {'resubmitted' : resubmit_id})
1311 self.db.update_record(msg_id, {'resubmitted' : resubmit_id})
1312 except Exception:
1312 except Exception:
1313 self.log.error("db::DB Error updating record: %s", msg_id, exc_info=True)
1313 self.log.error("db::DB Error updating record: %s", msg_id, exc_info=True)
1314
1314
1315
1315
1316 def _extract_record(self, rec):
1316 def _extract_record(self, rec):
1317 """decompose a TaskRecord dict into subsection of reply for get_result"""
1317 """decompose a TaskRecord dict into subsection of reply for get_result"""
1318 io_dict = {}
1318 io_dict = {}
1319 for key in ('execute_input', 'execute_result', 'pyerr', 'stdout', 'stderr'):
1319 for key in ('execute_input', 'execute_result', 'error', 'stdout', 'stderr'):
1320 io_dict[key] = rec[key]
1320 io_dict[key] = rec[key]
1321 content = {
1321 content = {
1322 'header': rec['header'],
1322 'header': rec['header'],
1323 'metadata': rec['metadata'],
1323 'metadata': rec['metadata'],
1324 'result_metadata': rec['result_metadata'],
1324 'result_metadata': rec['result_metadata'],
1325 'result_header' : rec['result_header'],
1325 'result_header' : rec['result_header'],
1326 'result_content': rec['result_content'],
1326 'result_content': rec['result_content'],
1327 'received' : rec['received'],
1327 'received' : rec['received'],
1328 'io' : io_dict,
1328 'io' : io_dict,
1329 }
1329 }
1330 if rec['result_buffers']:
1330 if rec['result_buffers']:
1331 buffers = list(map(bytes, rec['result_buffers']))
1331 buffers = list(map(bytes, rec['result_buffers']))
1332 else:
1332 else:
1333 buffers = []
1333 buffers = []
1334
1334
1335 return content, buffers
1335 return content, buffers
1336
1336
1337 def get_results(self, client_id, msg):
1337 def get_results(self, client_id, msg):
1338 """Get the result of 1 or more messages."""
1338 """Get the result of 1 or more messages."""
1339 content = msg['content']
1339 content = msg['content']
1340 msg_ids = sorted(set(content['msg_ids']))
1340 msg_ids = sorted(set(content['msg_ids']))
1341 statusonly = content.get('status_only', False)
1341 statusonly = content.get('status_only', False)
1342 pending = []
1342 pending = []
1343 completed = []
1343 completed = []
1344 content = dict(status='ok')
1344 content = dict(status='ok')
1345 content['pending'] = pending
1345 content['pending'] = pending
1346 content['completed'] = completed
1346 content['completed'] = completed
1347 buffers = []
1347 buffers = []
1348 if not statusonly:
1348 if not statusonly:
1349 try:
1349 try:
1350 matches = self.db.find_records(dict(msg_id={'$in':msg_ids}))
1350 matches = self.db.find_records(dict(msg_id={'$in':msg_ids}))
1351 # turn match list into dict, for faster lookup
1351 # turn match list into dict, for faster lookup
1352 records = {}
1352 records = {}
1353 for rec in matches:
1353 for rec in matches:
1354 records[rec['msg_id']] = rec
1354 records[rec['msg_id']] = rec
1355 except Exception:
1355 except Exception:
1356 content = error.wrap_exception()
1356 content = error.wrap_exception()
1357 self.log.exception("Failed to get results")
1357 self.log.exception("Failed to get results")
1358 self.session.send(self.query, "result_reply", content=content,
1358 self.session.send(self.query, "result_reply", content=content,
1359 parent=msg, ident=client_id)
1359 parent=msg, ident=client_id)
1360 return
1360 return
1361 else:
1361 else:
1362 records = {}
1362 records = {}
1363 for msg_id in msg_ids:
1363 for msg_id in msg_ids:
1364 if msg_id in self.pending:
1364 if msg_id in self.pending:
1365 pending.append(msg_id)
1365 pending.append(msg_id)
1366 elif msg_id in self.all_completed:
1366 elif msg_id in self.all_completed:
1367 completed.append(msg_id)
1367 completed.append(msg_id)
1368 if not statusonly:
1368 if not statusonly:
1369 c,bufs = self._extract_record(records[msg_id])
1369 c,bufs = self._extract_record(records[msg_id])
1370 content[msg_id] = c
1370 content[msg_id] = c
1371 buffers.extend(bufs)
1371 buffers.extend(bufs)
1372 elif msg_id in records:
1372 elif msg_id in records:
1373 if rec['completed']:
1373 if rec['completed']:
1374 completed.append(msg_id)
1374 completed.append(msg_id)
1375 c,bufs = self._extract_record(records[msg_id])
1375 c,bufs = self._extract_record(records[msg_id])
1376 content[msg_id] = c
1376 content[msg_id] = c
1377 buffers.extend(bufs)
1377 buffers.extend(bufs)
1378 else:
1378 else:
1379 pending.append(msg_id)
1379 pending.append(msg_id)
1380 else:
1380 else:
1381 try:
1381 try:
1382 raise KeyError('No such message: '+msg_id)
1382 raise KeyError('No such message: '+msg_id)
1383 except:
1383 except:
1384 content = error.wrap_exception()
1384 content = error.wrap_exception()
1385 break
1385 break
1386 self.session.send(self.query, "result_reply", content=content,
1386 self.session.send(self.query, "result_reply", content=content,
1387 parent=msg, ident=client_id,
1387 parent=msg, ident=client_id,
1388 buffers=buffers)
1388 buffers=buffers)
1389
1389
1390 def get_history(self, client_id, msg):
1390 def get_history(self, client_id, msg):
1391 """Get a list of all msg_ids in our DB records"""
1391 """Get a list of all msg_ids in our DB records"""
1392 try:
1392 try:
1393 msg_ids = self.db.get_history()
1393 msg_ids = self.db.get_history()
1394 except Exception as e:
1394 except Exception as e:
1395 content = error.wrap_exception()
1395 content = error.wrap_exception()
1396 self.log.exception("Failed to get history")
1396 self.log.exception("Failed to get history")
1397 else:
1397 else:
1398 content = dict(status='ok', history=msg_ids)
1398 content = dict(status='ok', history=msg_ids)
1399
1399
1400 self.session.send(self.query, "history_reply", content=content,
1400 self.session.send(self.query, "history_reply", content=content,
1401 parent=msg, ident=client_id)
1401 parent=msg, ident=client_id)
1402
1402
1403 def db_query(self, client_id, msg):
1403 def db_query(self, client_id, msg):
1404 """Perform a raw query on the task record database."""
1404 """Perform a raw query on the task record database."""
1405 content = msg['content']
1405 content = msg['content']
1406 query = extract_dates(content.get('query', {}))
1406 query = extract_dates(content.get('query', {}))
1407 keys = content.get('keys', None)
1407 keys = content.get('keys', None)
1408 buffers = []
1408 buffers = []
1409 empty = list()
1409 empty = list()
1410 try:
1410 try:
1411 records = self.db.find_records(query, keys)
1411 records = self.db.find_records(query, keys)
1412 except Exception as e:
1412 except Exception as e:
1413 content = error.wrap_exception()
1413 content = error.wrap_exception()
1414 self.log.exception("DB query failed")
1414 self.log.exception("DB query failed")
1415 else:
1415 else:
1416 # extract buffers from reply content:
1416 # extract buffers from reply content:
1417 if keys is not None:
1417 if keys is not None:
1418 buffer_lens = [] if 'buffers' in keys else None
1418 buffer_lens = [] if 'buffers' in keys else None
1419 result_buffer_lens = [] if 'result_buffers' in keys else None
1419 result_buffer_lens = [] if 'result_buffers' in keys else None
1420 else:
1420 else:
1421 buffer_lens = None
1421 buffer_lens = None
1422 result_buffer_lens = None
1422 result_buffer_lens = None
1423
1423
1424 for rec in records:
1424 for rec in records:
1425 # buffers may be None, so double check
1425 # buffers may be None, so double check
1426 b = rec.pop('buffers', empty) or empty
1426 b = rec.pop('buffers', empty) or empty
1427 if buffer_lens is not None:
1427 if buffer_lens is not None:
1428 buffer_lens.append(len(b))
1428 buffer_lens.append(len(b))
1429 buffers.extend(b)
1429 buffers.extend(b)
1430 rb = rec.pop('result_buffers', empty) or empty
1430 rb = rec.pop('result_buffers', empty) or empty
1431 if result_buffer_lens is not None:
1431 if result_buffer_lens is not None:
1432 result_buffer_lens.append(len(rb))
1432 result_buffer_lens.append(len(rb))
1433 buffers.extend(rb)
1433 buffers.extend(rb)
1434 content = dict(status='ok', records=records, buffer_lens=buffer_lens,
1434 content = dict(status='ok', records=records, buffer_lens=buffer_lens,
1435 result_buffer_lens=result_buffer_lens)
1435 result_buffer_lens=result_buffer_lens)
1436 # self.log.debug (content)
1436 # self.log.debug (content)
1437 self.session.send(self.query, "db_reply", content=content,
1437 self.session.send(self.query, "db_reply", content=content,
1438 parent=msg, ident=client_id,
1438 parent=msg, ident=client_id,
1439 buffers=buffers)
1439 buffers=buffers)
1440
1440
@@ -1,414 +1,414 b''
1 """A TaskRecord backend using sqlite3"""
1 """A TaskRecord backend using sqlite3"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 import json
6 import json
7 import os
7 import os
8 try:
8 try:
9 import cPickle as pickle
9 import cPickle as pickle
10 except ImportError:
10 except ImportError:
11 import pickle
11 import pickle
12 from datetime import datetime
12 from datetime import datetime
13
13
14 try:
14 try:
15 import sqlite3
15 import sqlite3
16 except ImportError:
16 except ImportError:
17 sqlite3 = None
17 sqlite3 = None
18
18
19 from zmq.eventloop import ioloop
19 from zmq.eventloop import ioloop
20
20
21 from IPython.utils.traitlets import Unicode, Instance, List, Dict
21 from IPython.utils.traitlets import Unicode, Instance, List, Dict
22 from .dictdb import BaseDB
22 from .dictdb import BaseDB
23 from IPython.utils.jsonutil import date_default, extract_dates, squash_dates
23 from IPython.utils.jsonutil import date_default, extract_dates, squash_dates
24 from IPython.utils.py3compat import iteritems
24 from IPython.utils.py3compat import iteritems
25
25
26 #-----------------------------------------------------------------------------
26 #-----------------------------------------------------------------------------
27 # SQLite operators, adapters, and converters
27 # SQLite operators, adapters, and converters
28 #-----------------------------------------------------------------------------
28 #-----------------------------------------------------------------------------
29
29
30 try:
30 try:
31 buffer
31 buffer
32 except NameError:
32 except NameError:
33 # py3k
33 # py3k
34 buffer = memoryview
34 buffer = memoryview
35
35
36 operators = {
36 operators = {
37 '$lt' : "<",
37 '$lt' : "<",
38 '$gt' : ">",
38 '$gt' : ">",
39 # null is handled weird with ==,!=
39 # null is handled weird with ==,!=
40 '$eq' : "=",
40 '$eq' : "=",
41 '$ne' : "!=",
41 '$ne' : "!=",
42 '$lte': "<=",
42 '$lte': "<=",
43 '$gte': ">=",
43 '$gte': ">=",
44 '$in' : ('=', ' OR '),
44 '$in' : ('=', ' OR '),
45 '$nin': ('!=', ' AND '),
45 '$nin': ('!=', ' AND '),
46 # '$all': None,
46 # '$all': None,
47 # '$mod': None,
47 # '$mod': None,
48 # '$exists' : None
48 # '$exists' : None
49 }
49 }
50 null_operators = {
50 null_operators = {
51 '=' : "IS NULL",
51 '=' : "IS NULL",
52 '!=' : "IS NOT NULL",
52 '!=' : "IS NOT NULL",
53 }
53 }
54
54
55 def _adapt_dict(d):
55 def _adapt_dict(d):
56 return json.dumps(d, default=date_default)
56 return json.dumps(d, default=date_default)
57
57
58 def _convert_dict(ds):
58 def _convert_dict(ds):
59 if ds is None:
59 if ds is None:
60 return ds
60 return ds
61 else:
61 else:
62 if isinstance(ds, bytes):
62 if isinstance(ds, bytes):
63 # If I understand the sqlite doc correctly, this will always be utf8
63 # If I understand the sqlite doc correctly, this will always be utf8
64 ds = ds.decode('utf8')
64 ds = ds.decode('utf8')
65 return extract_dates(json.loads(ds))
65 return extract_dates(json.loads(ds))
66
66
67 def _adapt_bufs(bufs):
67 def _adapt_bufs(bufs):
68 # this is *horrible*
68 # this is *horrible*
69 # copy buffers into single list and pickle it:
69 # copy buffers into single list and pickle it:
70 if bufs and isinstance(bufs[0], (bytes, buffer)):
70 if bufs and isinstance(bufs[0], (bytes, buffer)):
71 return sqlite3.Binary(pickle.dumps(list(map(bytes, bufs)),-1))
71 return sqlite3.Binary(pickle.dumps(list(map(bytes, bufs)),-1))
72 elif bufs:
72 elif bufs:
73 return bufs
73 return bufs
74 else:
74 else:
75 return None
75 return None
76
76
77 def _convert_bufs(bs):
77 def _convert_bufs(bs):
78 if bs is None:
78 if bs is None:
79 return []
79 return []
80 else:
80 else:
81 return pickle.loads(bytes(bs))
81 return pickle.loads(bytes(bs))
82
82
83 #-----------------------------------------------------------------------------
83 #-----------------------------------------------------------------------------
84 # SQLiteDB class
84 # SQLiteDB class
85 #-----------------------------------------------------------------------------
85 #-----------------------------------------------------------------------------
86
86
87 class SQLiteDB(BaseDB):
87 class SQLiteDB(BaseDB):
88 """SQLite3 TaskRecord backend."""
88 """SQLite3 TaskRecord backend."""
89
89
90 filename = Unicode('tasks.db', config=True,
90 filename = Unicode('tasks.db', config=True,
91 help="""The filename of the sqlite task database. [default: 'tasks.db']""")
91 help="""The filename of the sqlite task database. [default: 'tasks.db']""")
92 location = Unicode('', config=True,
92 location = Unicode('', config=True,
93 help="""The directory containing the sqlite task database. The default
93 help="""The directory containing the sqlite task database. The default
94 is to use the cluster_dir location.""")
94 is to use the cluster_dir location.""")
95 table = Unicode("ipython-tasks", config=True,
95 table = Unicode("ipython-tasks", config=True,
96 help="""The SQLite Table to use for storing tasks for this session. If unspecified,
96 help="""The SQLite Table to use for storing tasks for this session. If unspecified,
97 a new table will be created with the Hub's IDENT. Specifying the table will result
97 a new table will be created with the Hub's IDENT. Specifying the table will result
98 in tasks from previous sessions being available via Clients' db_query and
98 in tasks from previous sessions being available via Clients' db_query and
99 get_result methods.""")
99 get_result methods.""")
100
100
101 if sqlite3 is not None:
101 if sqlite3 is not None:
102 _db = Instance('sqlite3.Connection')
102 _db = Instance('sqlite3.Connection')
103 else:
103 else:
104 _db = None
104 _db = None
105 # the ordered list of column names
105 # the ordered list of column names
106 _keys = List(['msg_id' ,
106 _keys = List(['msg_id' ,
107 'header' ,
107 'header' ,
108 'metadata',
108 'metadata',
109 'content',
109 'content',
110 'buffers',
110 'buffers',
111 'submitted',
111 'submitted',
112 'client_uuid' ,
112 'client_uuid' ,
113 'engine_uuid' ,
113 'engine_uuid' ,
114 'started',
114 'started',
115 'completed',
115 'completed',
116 'resubmitted',
116 'resubmitted',
117 'received',
117 'received',
118 'result_header' ,
118 'result_header' ,
119 'result_metadata',
119 'result_metadata',
120 'result_content' ,
120 'result_content' ,
121 'result_buffers' ,
121 'result_buffers' ,
122 'queue' ,
122 'queue' ,
123 'execute_input' ,
123 'execute_input' ,
124 'execute_result',
124 'execute_result',
125 'pyerr',
125 'error',
126 'stdout',
126 'stdout',
127 'stderr',
127 'stderr',
128 ])
128 ])
129 # sqlite datatypes for checking that db is current format
129 # sqlite datatypes for checking that db is current format
130 _types = Dict({'msg_id' : 'text' ,
130 _types = Dict({'msg_id' : 'text' ,
131 'header' : 'dict text',
131 'header' : 'dict text',
132 'metadata' : 'dict text',
132 'metadata' : 'dict text',
133 'content' : 'dict text',
133 'content' : 'dict text',
134 'buffers' : 'bufs blob',
134 'buffers' : 'bufs blob',
135 'submitted' : 'timestamp',
135 'submitted' : 'timestamp',
136 'client_uuid' : 'text',
136 'client_uuid' : 'text',
137 'engine_uuid' : 'text',
137 'engine_uuid' : 'text',
138 'started' : 'timestamp',
138 'started' : 'timestamp',
139 'completed' : 'timestamp',
139 'completed' : 'timestamp',
140 'resubmitted' : 'text',
140 'resubmitted' : 'text',
141 'received' : 'timestamp',
141 'received' : 'timestamp',
142 'result_header' : 'dict text',
142 'result_header' : 'dict text',
143 'result_metadata' : 'dict text',
143 'result_metadata' : 'dict text',
144 'result_content' : 'dict text',
144 'result_content' : 'dict text',
145 'result_buffers' : 'bufs blob',
145 'result_buffers' : 'bufs blob',
146 'queue' : 'text',
146 'queue' : 'text',
147 'execute_input' : 'text',
147 'execute_input' : 'text',
148 'execute_result' : 'text',
148 'execute_result' : 'text',
149 'pyerr' : 'text',
149 'error' : 'text',
150 'stdout' : 'text',
150 'stdout' : 'text',
151 'stderr' : 'text',
151 'stderr' : 'text',
152 })
152 })
153
153
154 def __init__(self, **kwargs):
154 def __init__(self, **kwargs):
155 super(SQLiteDB, self).__init__(**kwargs)
155 super(SQLiteDB, self).__init__(**kwargs)
156 if sqlite3 is None:
156 if sqlite3 is None:
157 raise ImportError("SQLiteDB requires sqlite3")
157 raise ImportError("SQLiteDB requires sqlite3")
158 if not self.table:
158 if not self.table:
159 # use session, and prefix _, since starting with # is illegal
159 # use session, and prefix _, since starting with # is illegal
160 self.table = '_'+self.session.replace('-','_')
160 self.table = '_'+self.session.replace('-','_')
161 if not self.location:
161 if not self.location:
162 # get current profile
162 # get current profile
163 from IPython.core.application import BaseIPythonApplication
163 from IPython.core.application import BaseIPythonApplication
164 if BaseIPythonApplication.initialized():
164 if BaseIPythonApplication.initialized():
165 app = BaseIPythonApplication.instance()
165 app = BaseIPythonApplication.instance()
166 if app.profile_dir is not None:
166 if app.profile_dir is not None:
167 self.location = app.profile_dir.location
167 self.location = app.profile_dir.location
168 else:
168 else:
169 self.location = u'.'
169 self.location = u'.'
170 else:
170 else:
171 self.location = u'.'
171 self.location = u'.'
172 self._init_db()
172 self._init_db()
173
173
174 # register db commit as 2s periodic callback
174 # register db commit as 2s periodic callback
175 # to prevent clogging pipes
175 # to prevent clogging pipes
176 # assumes we are being run in a zmq ioloop app
176 # assumes we are being run in a zmq ioloop app
177 loop = ioloop.IOLoop.instance()
177 loop = ioloop.IOLoop.instance()
178 pc = ioloop.PeriodicCallback(self._db.commit, 2000, loop)
178 pc = ioloop.PeriodicCallback(self._db.commit, 2000, loop)
179 pc.start()
179 pc.start()
180
180
181 def _defaults(self, keys=None):
181 def _defaults(self, keys=None):
182 """create an empty record"""
182 """create an empty record"""
183 d = {}
183 d = {}
184 keys = self._keys if keys is None else keys
184 keys = self._keys if keys is None else keys
185 for key in keys:
185 for key in keys:
186 d[key] = None
186 d[key] = None
187 return d
187 return d
188
188
189 def _check_table(self):
189 def _check_table(self):
190 """Ensure that an incorrect table doesn't exist
190 """Ensure that an incorrect table doesn't exist
191
191
192 If a bad (old) table does exist, return False
192 If a bad (old) table does exist, return False
193 """
193 """
194 cursor = self._db.execute("PRAGMA table_info('%s')"%self.table)
194 cursor = self._db.execute("PRAGMA table_info('%s')"%self.table)
195 lines = cursor.fetchall()
195 lines = cursor.fetchall()
196 if not lines:
196 if not lines:
197 # table does not exist
197 # table does not exist
198 return True
198 return True
199 types = {}
199 types = {}
200 keys = []
200 keys = []
201 for line in lines:
201 for line in lines:
202 keys.append(line[1])
202 keys.append(line[1])
203 types[line[1]] = line[2]
203 types[line[1]] = line[2]
204 if self._keys != keys:
204 if self._keys != keys:
205 # key mismatch
205 # key mismatch
206 self.log.warn('keys mismatch')
206 self.log.warn('keys mismatch')
207 return False
207 return False
208 for key in self._keys:
208 for key in self._keys:
209 if types[key] != self._types[key]:
209 if types[key] != self._types[key]:
210 self.log.warn(
210 self.log.warn(
211 'type mismatch: %s: %s != %s'%(key,types[key],self._types[key])
211 'type mismatch: %s: %s != %s'%(key,types[key],self._types[key])
212 )
212 )
213 return False
213 return False
214 return True
214 return True
215
215
216 def _init_db(self):
216 def _init_db(self):
217 """Connect to the database and get new session number."""
217 """Connect to the database and get new session number."""
218 # register adapters
218 # register adapters
219 sqlite3.register_adapter(dict, _adapt_dict)
219 sqlite3.register_adapter(dict, _adapt_dict)
220 sqlite3.register_converter('dict', _convert_dict)
220 sqlite3.register_converter('dict', _convert_dict)
221 sqlite3.register_adapter(list, _adapt_bufs)
221 sqlite3.register_adapter(list, _adapt_bufs)
222 sqlite3.register_converter('bufs', _convert_bufs)
222 sqlite3.register_converter('bufs', _convert_bufs)
223 # connect to the db
223 # connect to the db
224 dbfile = os.path.join(self.location, self.filename)
224 dbfile = os.path.join(self.location, self.filename)
225 self._db = sqlite3.connect(dbfile, detect_types=sqlite3.PARSE_DECLTYPES,
225 self._db = sqlite3.connect(dbfile, detect_types=sqlite3.PARSE_DECLTYPES,
226 # isolation_level = None)#,
226 # isolation_level = None)#,
227 cached_statements=64)
227 cached_statements=64)
228 # print dir(self._db)
228 # print dir(self._db)
229 first_table = previous_table = self.table
229 first_table = previous_table = self.table
230 i=0
230 i=0
231 while not self._check_table():
231 while not self._check_table():
232 i+=1
232 i+=1
233 self.table = first_table+'_%i'%i
233 self.table = first_table+'_%i'%i
234 self.log.warn(
234 self.log.warn(
235 "Table %s exists and doesn't match db format, trying %s"%
235 "Table %s exists and doesn't match db format, trying %s"%
236 (previous_table, self.table)
236 (previous_table, self.table)
237 )
237 )
238 previous_table = self.table
238 previous_table = self.table
239
239
240 self._db.execute("""CREATE TABLE IF NOT EXISTS '%s'
240 self._db.execute("""CREATE TABLE IF NOT EXISTS '%s'
241 (msg_id text PRIMARY KEY,
241 (msg_id text PRIMARY KEY,
242 header dict text,
242 header dict text,
243 metadata dict text,
243 metadata dict text,
244 content dict text,
244 content dict text,
245 buffers bufs blob,
245 buffers bufs blob,
246 submitted timestamp,
246 submitted timestamp,
247 client_uuid text,
247 client_uuid text,
248 engine_uuid text,
248 engine_uuid text,
249 started timestamp,
249 started timestamp,
250 completed timestamp,
250 completed timestamp,
251 resubmitted text,
251 resubmitted text,
252 received timestamp,
252 received timestamp,
253 result_header dict text,
253 result_header dict text,
254 result_metadata dict text,
254 result_metadata dict text,
255 result_content dict text,
255 result_content dict text,
256 result_buffers bufs blob,
256 result_buffers bufs blob,
257 queue text,
257 queue text,
258 execute_input text,
258 execute_input text,
259 execute_result text,
259 execute_result text,
260 pyerr text,
260 error text,
261 stdout text,
261 stdout text,
262 stderr text)
262 stderr text)
263 """%self.table)
263 """%self.table)
264 self._db.commit()
264 self._db.commit()
265
265
266 def _dict_to_list(self, d):
266 def _dict_to_list(self, d):
267 """turn a mongodb-style record dict into a list."""
267 """turn a mongodb-style record dict into a list."""
268
268
269 return [ d[key] for key in self._keys ]
269 return [ d[key] for key in self._keys ]
270
270
271 def _list_to_dict(self, line, keys=None):
271 def _list_to_dict(self, line, keys=None):
272 """Inverse of dict_to_list"""
272 """Inverse of dict_to_list"""
273 keys = self._keys if keys is None else keys
273 keys = self._keys if keys is None else keys
274 d = self._defaults(keys)
274 d = self._defaults(keys)
275 for key,value in zip(keys, line):
275 for key,value in zip(keys, line):
276 d[key] = value
276 d[key] = value
277
277
278 return d
278 return d
279
279
280 def _render_expression(self, check):
280 def _render_expression(self, check):
281 """Turn a mongodb-style search dict into an SQL query."""
281 """Turn a mongodb-style search dict into an SQL query."""
282 expressions = []
282 expressions = []
283 args = []
283 args = []
284
284
285 skeys = set(check.keys())
285 skeys = set(check.keys())
286 skeys.difference_update(set(self._keys))
286 skeys.difference_update(set(self._keys))
287 skeys.difference_update(set(['buffers', 'result_buffers']))
287 skeys.difference_update(set(['buffers', 'result_buffers']))
288 if skeys:
288 if skeys:
289 raise KeyError("Illegal testing key(s): %s"%skeys)
289 raise KeyError("Illegal testing key(s): %s"%skeys)
290
290
291 for name,sub_check in iteritems(check):
291 for name,sub_check in iteritems(check):
292 if isinstance(sub_check, dict):
292 if isinstance(sub_check, dict):
293 for test,value in iteritems(sub_check):
293 for test,value in iteritems(sub_check):
294 try:
294 try:
295 op = operators[test]
295 op = operators[test]
296 except KeyError:
296 except KeyError:
297 raise KeyError("Unsupported operator: %r"%test)
297 raise KeyError("Unsupported operator: %r"%test)
298 if isinstance(op, tuple):
298 if isinstance(op, tuple):
299 op, join = op
299 op, join = op
300
300
301 if value is None and op in null_operators:
301 if value is None and op in null_operators:
302 expr = "%s %s" % (name, null_operators[op])
302 expr = "%s %s" % (name, null_operators[op])
303 else:
303 else:
304 expr = "%s %s ?"%(name, op)
304 expr = "%s %s ?"%(name, op)
305 if isinstance(value, (tuple,list)):
305 if isinstance(value, (tuple,list)):
306 if op in null_operators and any([v is None for v in value]):
306 if op in null_operators and any([v is None for v in value]):
307 # equality tests don't work with NULL
307 # equality tests don't work with NULL
308 raise ValueError("Cannot use %r test with NULL values on SQLite backend"%test)
308 raise ValueError("Cannot use %r test with NULL values on SQLite backend"%test)
309 expr = '( %s )'%( join.join([expr]*len(value)) )
309 expr = '( %s )'%( join.join([expr]*len(value)) )
310 args.extend(value)
310 args.extend(value)
311 else:
311 else:
312 args.append(value)
312 args.append(value)
313 expressions.append(expr)
313 expressions.append(expr)
314 else:
314 else:
315 # it's an equality check
315 # it's an equality check
316 if sub_check is None:
316 if sub_check is None:
317 expressions.append("%s IS NULL" % name)
317 expressions.append("%s IS NULL" % name)
318 else:
318 else:
319 expressions.append("%s = ?"%name)
319 expressions.append("%s = ?"%name)
320 args.append(sub_check)
320 args.append(sub_check)
321
321
322 expr = " AND ".join(expressions)
322 expr = " AND ".join(expressions)
323 return expr, args
323 return expr, args
324
324
325 def add_record(self, msg_id, rec):
325 def add_record(self, msg_id, rec):
326 """Add a new Task Record, by msg_id."""
326 """Add a new Task Record, by msg_id."""
327 d = self._defaults()
327 d = self._defaults()
328 d.update(rec)
328 d.update(rec)
329 d['msg_id'] = msg_id
329 d['msg_id'] = msg_id
330 line = self._dict_to_list(d)
330 line = self._dict_to_list(d)
331 tups = '(%s)'%(','.join(['?']*len(line)))
331 tups = '(%s)'%(','.join(['?']*len(line)))
332 self._db.execute("INSERT INTO '%s' VALUES %s"%(self.table, tups), line)
332 self._db.execute("INSERT INTO '%s' VALUES %s"%(self.table, tups), line)
333 # self._db.commit()
333 # self._db.commit()
334
334
335 def get_record(self, msg_id):
335 def get_record(self, msg_id):
336 """Get a specific Task Record, by msg_id."""
336 """Get a specific Task Record, by msg_id."""
337 cursor = self._db.execute("""SELECT * FROM '%s' WHERE msg_id==?"""%self.table, (msg_id,))
337 cursor = self._db.execute("""SELECT * FROM '%s' WHERE msg_id==?"""%self.table, (msg_id,))
338 line = cursor.fetchone()
338 line = cursor.fetchone()
339 if line is None:
339 if line is None:
340 raise KeyError("No such msg: %r"%msg_id)
340 raise KeyError("No such msg: %r"%msg_id)
341 return self._list_to_dict(line)
341 return self._list_to_dict(line)
342
342
343 def update_record(self, msg_id, rec):
343 def update_record(self, msg_id, rec):
344 """Update the data in an existing record."""
344 """Update the data in an existing record."""
345 query = "UPDATE '%s' SET "%self.table
345 query = "UPDATE '%s' SET "%self.table
346 sets = []
346 sets = []
347 keys = sorted(rec.keys())
347 keys = sorted(rec.keys())
348 values = []
348 values = []
349 for key in keys:
349 for key in keys:
350 sets.append('%s = ?'%key)
350 sets.append('%s = ?'%key)
351 values.append(rec[key])
351 values.append(rec[key])
352 query += ', '.join(sets)
352 query += ', '.join(sets)
353 query += ' WHERE msg_id == ?'
353 query += ' WHERE msg_id == ?'
354 values.append(msg_id)
354 values.append(msg_id)
355 self._db.execute(query, values)
355 self._db.execute(query, values)
356 # self._db.commit()
356 # self._db.commit()
357
357
358 def drop_record(self, msg_id):
358 def drop_record(self, msg_id):
359 """Remove a record from the DB."""
359 """Remove a record from the DB."""
360 self._db.execute("""DELETE FROM '%s' WHERE msg_id==?"""%self.table, (msg_id,))
360 self._db.execute("""DELETE FROM '%s' WHERE msg_id==?"""%self.table, (msg_id,))
361 # self._db.commit()
361 # self._db.commit()
362
362
363 def drop_matching_records(self, check):
363 def drop_matching_records(self, check):
364 """Remove a record from the DB."""
364 """Remove a record from the DB."""
365 expr,args = self._render_expression(check)
365 expr,args = self._render_expression(check)
366 query = "DELETE FROM '%s' WHERE %s"%(self.table, expr)
366 query = "DELETE FROM '%s' WHERE %s"%(self.table, expr)
367 self._db.execute(query,args)
367 self._db.execute(query,args)
368 # self._db.commit()
368 # self._db.commit()
369
369
370 def find_records(self, check, keys=None):
370 def find_records(self, check, keys=None):
371 """Find records matching a query dict, optionally extracting subset of keys.
371 """Find records matching a query dict, optionally extracting subset of keys.
372
372
373 Returns list of matching records.
373 Returns list of matching records.
374
374
375 Parameters
375 Parameters
376 ----------
376 ----------
377
377
378 check: dict
378 check: dict
379 mongodb-style query argument
379 mongodb-style query argument
380 keys: list of strs [optional]
380 keys: list of strs [optional]
381 if specified, the subset of keys to extract. msg_id will *always* be
381 if specified, the subset of keys to extract. msg_id will *always* be
382 included.
382 included.
383 """
383 """
384 if keys:
384 if keys:
385 bad_keys = [ key for key in keys if key not in self._keys ]
385 bad_keys = [ key for key in keys if key not in self._keys ]
386 if bad_keys:
386 if bad_keys:
387 raise KeyError("Bad record key(s): %s"%bad_keys)
387 raise KeyError("Bad record key(s): %s"%bad_keys)
388
388
389 if keys:
389 if keys:
390 # ensure msg_id is present and first:
390 # ensure msg_id is present and first:
391 if 'msg_id' in keys:
391 if 'msg_id' in keys:
392 keys.remove('msg_id')
392 keys.remove('msg_id')
393 keys.insert(0, 'msg_id')
393 keys.insert(0, 'msg_id')
394 req = ', '.join(keys)
394 req = ', '.join(keys)
395 else:
395 else:
396 req = '*'
396 req = '*'
397 expr,args = self._render_expression(check)
397 expr,args = self._render_expression(check)
398 query = """SELECT %s FROM '%s' WHERE %s"""%(req, self.table, expr)
398 query = """SELECT %s FROM '%s' WHERE %s"""%(req, self.table, expr)
399 cursor = self._db.execute(query, args)
399 cursor = self._db.execute(query, args)
400 matches = cursor.fetchall()
400 matches = cursor.fetchall()
401 records = []
401 records = []
402 for line in matches:
402 for line in matches:
403 rec = self._list_to_dict(line, keys)
403 rec = self._list_to_dict(line, keys)
404 records.append(rec)
404 records.append(rec)
405 return records
405 return records
406
406
407 def get_history(self):
407 def get_history(self):
408 """get all msg_ids, ordered by time submitted."""
408 """get all msg_ids, ordered by time submitted."""
409 query = """SELECT msg_id FROM '%s' ORDER by submitted ASC"""%self.table
409 query = """SELECT msg_id FROM '%s' ORDER by submitted ASC"""%self.table
410 cursor = self._db.execute(query)
410 cursor = self._db.execute(query)
411 # will be a list of length 1 tuples
411 # will be a list of length 1 tuples
412 return [ tup[0] for tup in cursor.fetchall()]
412 return [ tup[0] for tup in cursor.fetchall()]
413
413
414 __all__ = ['SQLiteDB'] No newline at end of file
414 __all__ = ['SQLiteDB']
@@ -1,215 +1,215 b''
1 """Defines a KernelManager that provides signals and slots."""
1 """Defines a KernelManager that provides signals and slots."""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 from IPython.external.qt import QtCore
6 from IPython.external.qt import QtCore
7
7
8 from IPython.utils.traitlets import HasTraits, Type
8 from IPython.utils.traitlets import HasTraits, Type
9 from .util import MetaQObjectHasTraits, SuperQObject
9 from .util import MetaQObjectHasTraits, SuperQObject
10
10
11
11
12 class ChannelQObject(SuperQObject):
12 class ChannelQObject(SuperQObject):
13
13
14 # Emitted when the channel is started.
14 # Emitted when the channel is started.
15 started = QtCore.Signal()
15 started = QtCore.Signal()
16
16
17 # Emitted when the channel is stopped.
17 # Emitted when the channel is stopped.
18 stopped = QtCore.Signal()
18 stopped = QtCore.Signal()
19
19
20 #---------------------------------------------------------------------------
20 #---------------------------------------------------------------------------
21 # Channel interface
21 # Channel interface
22 #---------------------------------------------------------------------------
22 #---------------------------------------------------------------------------
23
23
24 def start(self):
24 def start(self):
25 """ Reimplemented to emit signal.
25 """ Reimplemented to emit signal.
26 """
26 """
27 super(ChannelQObject, self).start()
27 super(ChannelQObject, self).start()
28 self.started.emit()
28 self.started.emit()
29
29
30 def stop(self):
30 def stop(self):
31 """ Reimplemented to emit signal.
31 """ Reimplemented to emit signal.
32 """
32 """
33 super(ChannelQObject, self).stop()
33 super(ChannelQObject, self).stop()
34 self.stopped.emit()
34 self.stopped.emit()
35
35
36 #---------------------------------------------------------------------------
36 #---------------------------------------------------------------------------
37 # InProcessChannel interface
37 # InProcessChannel interface
38 #---------------------------------------------------------------------------
38 #---------------------------------------------------------------------------
39
39
40 def call_handlers_later(self, *args, **kwds):
40 def call_handlers_later(self, *args, **kwds):
41 """ Call the message handlers later.
41 """ Call the message handlers later.
42 """
42 """
43 do_later = lambda: self.call_handlers(*args, **kwds)
43 do_later = lambda: self.call_handlers(*args, **kwds)
44 QtCore.QTimer.singleShot(0, do_later)
44 QtCore.QTimer.singleShot(0, do_later)
45
45
46 def process_events(self):
46 def process_events(self):
47 """ Process any pending GUI events.
47 """ Process any pending GUI events.
48 """
48 """
49 QtCore.QCoreApplication.instance().processEvents()
49 QtCore.QCoreApplication.instance().processEvents()
50
50
51
51
52 class QtShellChannelMixin(ChannelQObject):
52 class QtShellChannelMixin(ChannelQObject):
53
53
54 # Emitted when any message is received.
54 # Emitted when any message is received.
55 message_received = QtCore.Signal(object)
55 message_received = QtCore.Signal(object)
56
56
57 # Emitted when a reply has been received for the corresponding request type.
57 # Emitted when a reply has been received for the corresponding request type.
58 execute_reply = QtCore.Signal(object)
58 execute_reply = QtCore.Signal(object)
59 complete_reply = QtCore.Signal(object)
59 complete_reply = QtCore.Signal(object)
60 object_info_reply = QtCore.Signal(object)
60 object_info_reply = QtCore.Signal(object)
61 history_reply = QtCore.Signal(object)
61 history_reply = QtCore.Signal(object)
62
62
63 #---------------------------------------------------------------------------
63 #---------------------------------------------------------------------------
64 # 'ShellChannel' interface
64 # 'ShellChannel' interface
65 #---------------------------------------------------------------------------
65 #---------------------------------------------------------------------------
66
66
67 def call_handlers(self, msg):
67 def call_handlers(self, msg):
68 """ Reimplemented to emit signals instead of making callbacks.
68 """ Reimplemented to emit signals instead of making callbacks.
69 """
69 """
70 # Emit the generic signal.
70 # Emit the generic signal.
71 self.message_received.emit(msg)
71 self.message_received.emit(msg)
72
72
73 # Emit signals for specialized message types.
73 # Emit signals for specialized message types.
74 msg_type = msg['header']['msg_type']
74 msg_type = msg['header']['msg_type']
75 signal = getattr(self, msg_type, None)
75 signal = getattr(self, msg_type, None)
76 if signal:
76 if signal:
77 signal.emit(msg)
77 signal.emit(msg)
78
78
79
79
80 class QtIOPubChannelMixin(ChannelQObject):
80 class QtIOPubChannelMixin(ChannelQObject):
81
81
82 # Emitted when any message is received.
82 # Emitted when any message is received.
83 message_received = QtCore.Signal(object)
83 message_received = QtCore.Signal(object)
84
84
85 # Emitted when a message of type 'stream' is received.
85 # Emitted when a message of type 'stream' is received.
86 stream_received = QtCore.Signal(object)
86 stream_received = QtCore.Signal(object)
87
87
88 # Emitted when a message of type 'execute_input' is received.
88 # Emitted when a message of type 'execute_input' is received.
89 execute_input_received = QtCore.Signal(object)
89 execute_input_received = QtCore.Signal(object)
90
90
91 # Emitted when a message of type 'execute_result' is received.
91 # Emitted when a message of type 'execute_result' is received.
92 execute_result_received = QtCore.Signal(object)
92 execute_result_received = QtCore.Signal(object)
93
93
94 # Emitted when a message of type 'pyerr' is received.
94 # Emitted when a message of type 'error' is received.
95 pyerr_received = QtCore.Signal(object)
95 error_received = QtCore.Signal(object)
96
96
97 # Emitted when a message of type 'display_data' is received
97 # Emitted when a message of type 'display_data' is received
98 display_data_received = QtCore.Signal(object)
98 display_data_received = QtCore.Signal(object)
99
99
100 # Emitted when a crash report message is received from the kernel's
100 # Emitted when a crash report message is received from the kernel's
101 # last-resort sys.excepthook.
101 # last-resort sys.excepthook.
102 crash_received = QtCore.Signal(object)
102 crash_received = QtCore.Signal(object)
103
103
104 # Emitted when a shutdown is noticed.
104 # Emitted when a shutdown is noticed.
105 shutdown_reply_received = QtCore.Signal(object)
105 shutdown_reply_received = QtCore.Signal(object)
106
106
107 #---------------------------------------------------------------------------
107 #---------------------------------------------------------------------------
108 # 'IOPubChannel' interface
108 # 'IOPubChannel' interface
109 #---------------------------------------------------------------------------
109 #---------------------------------------------------------------------------
110
110
111 def call_handlers(self, msg):
111 def call_handlers(self, msg):
112 """ Reimplemented to emit signals instead of making callbacks.
112 """ Reimplemented to emit signals instead of making callbacks.
113 """
113 """
114 # Emit the generic signal.
114 # Emit the generic signal.
115 self.message_received.emit(msg)
115 self.message_received.emit(msg)
116 # Emit signals for specialized message types.
116 # Emit signals for specialized message types.
117 msg_type = msg['header']['msg_type']
117 msg_type = msg['header']['msg_type']
118 signal = getattr(self, msg_type + '_received', None)
118 signal = getattr(self, msg_type + '_received', None)
119 if signal:
119 if signal:
120 signal.emit(msg)
120 signal.emit(msg)
121 elif msg_type in ('stdout', 'stderr'):
121 elif msg_type in ('stdout', 'stderr'):
122 self.stream_received.emit(msg)
122 self.stream_received.emit(msg)
123
123
124 def flush(self):
124 def flush(self):
125 """ Reimplemented to ensure that signals are dispatched immediately.
125 """ Reimplemented to ensure that signals are dispatched immediately.
126 """
126 """
127 super(QtIOPubChannelMixin, self).flush()
127 super(QtIOPubChannelMixin, self).flush()
128 QtCore.QCoreApplication.instance().processEvents()
128 QtCore.QCoreApplication.instance().processEvents()
129
129
130
130
131 class QtStdInChannelMixin(ChannelQObject):
131 class QtStdInChannelMixin(ChannelQObject):
132
132
133 # Emitted when any message is received.
133 # Emitted when any message is received.
134 message_received = QtCore.Signal(object)
134 message_received = QtCore.Signal(object)
135
135
136 # Emitted when an input request is received.
136 # Emitted when an input request is received.
137 input_requested = QtCore.Signal(object)
137 input_requested = QtCore.Signal(object)
138
138
139 #---------------------------------------------------------------------------
139 #---------------------------------------------------------------------------
140 # 'StdInChannel' interface
140 # 'StdInChannel' interface
141 #---------------------------------------------------------------------------
141 #---------------------------------------------------------------------------
142
142
143 def call_handlers(self, msg):
143 def call_handlers(self, msg):
144 """ Reimplemented to emit signals instead of making callbacks.
144 """ Reimplemented to emit signals instead of making callbacks.
145 """
145 """
146 # Emit the generic signal.
146 # Emit the generic signal.
147 self.message_received.emit(msg)
147 self.message_received.emit(msg)
148
148
149 # Emit signals for specialized message types.
149 # Emit signals for specialized message types.
150 msg_type = msg['header']['msg_type']
150 msg_type = msg['header']['msg_type']
151 if msg_type == 'input_request':
151 if msg_type == 'input_request':
152 self.input_requested.emit(msg)
152 self.input_requested.emit(msg)
153
153
154
154
155 class QtHBChannelMixin(ChannelQObject):
155 class QtHBChannelMixin(ChannelQObject):
156
156
157 # Emitted when the kernel has died.
157 # Emitted when the kernel has died.
158 kernel_died = QtCore.Signal(object)
158 kernel_died = QtCore.Signal(object)
159
159
160 #---------------------------------------------------------------------------
160 #---------------------------------------------------------------------------
161 # 'HBChannel' interface
161 # 'HBChannel' interface
162 #---------------------------------------------------------------------------
162 #---------------------------------------------------------------------------
163
163
164 def call_handlers(self, since_last_heartbeat):
164 def call_handlers(self, since_last_heartbeat):
165 """ Reimplemented to emit signals instead of making callbacks.
165 """ Reimplemented to emit signals instead of making callbacks.
166 """
166 """
167 # Emit the generic signal.
167 # Emit the generic signal.
168 self.kernel_died.emit(since_last_heartbeat)
168 self.kernel_died.emit(since_last_heartbeat)
169
169
170
170
171 class QtKernelRestarterMixin(MetaQObjectHasTraits('NewBase', (HasTraits, SuperQObject), {})):
171 class QtKernelRestarterMixin(MetaQObjectHasTraits('NewBase', (HasTraits, SuperQObject), {})):
172
172
173 _timer = None
173 _timer = None
174
174
175
175
176 class QtKernelManagerMixin(MetaQObjectHasTraits('NewBase', (HasTraits, SuperQObject), {})):
176 class QtKernelManagerMixin(MetaQObjectHasTraits('NewBase', (HasTraits, SuperQObject), {})):
177 """ A KernelClient that provides signals and slots.
177 """ A KernelClient that provides signals and slots.
178 """
178 """
179
179
180 kernel_restarted = QtCore.Signal()
180 kernel_restarted = QtCore.Signal()
181
181
182
182
183 class QtKernelClientMixin(MetaQObjectHasTraits('NewBase', (HasTraits, SuperQObject), {})):
183 class QtKernelClientMixin(MetaQObjectHasTraits('NewBase', (HasTraits, SuperQObject), {})):
184 """ A KernelClient that provides signals and slots.
184 """ A KernelClient that provides signals and slots.
185 """
185 """
186
186
187 # Emitted when the kernel client has started listening.
187 # Emitted when the kernel client has started listening.
188 started_channels = QtCore.Signal()
188 started_channels = QtCore.Signal()
189
189
190 # Emitted when the kernel client has stopped listening.
190 # Emitted when the kernel client has stopped listening.
191 stopped_channels = QtCore.Signal()
191 stopped_channels = QtCore.Signal()
192
192
193 # Use Qt-specific channel classes that emit signals.
193 # Use Qt-specific channel classes that emit signals.
194 iopub_channel_class = Type(QtIOPubChannelMixin)
194 iopub_channel_class = Type(QtIOPubChannelMixin)
195 shell_channel_class = Type(QtShellChannelMixin)
195 shell_channel_class = Type(QtShellChannelMixin)
196 stdin_channel_class = Type(QtStdInChannelMixin)
196 stdin_channel_class = Type(QtStdInChannelMixin)
197 hb_channel_class = Type(QtHBChannelMixin)
197 hb_channel_class = Type(QtHBChannelMixin)
198
198
199 #---------------------------------------------------------------------------
199 #---------------------------------------------------------------------------
200 # 'KernelClient' interface
200 # 'KernelClient' interface
201 #---------------------------------------------------------------------------
201 #---------------------------------------------------------------------------
202
202
203 #------ Channel management -------------------------------------------------
203 #------ Channel management -------------------------------------------------
204
204
205 def start_channels(self, *args, **kw):
205 def start_channels(self, *args, **kw):
206 """ Reimplemented to emit signal.
206 """ Reimplemented to emit signal.
207 """
207 """
208 super(QtKernelClientMixin, self).start_channels(*args, **kw)
208 super(QtKernelClientMixin, self).start_channels(*args, **kw)
209 self.started_channels.emit()
209 self.started_channels.emit()
210
210
211 def stop_channels(self):
211 def stop_channels(self):
212 """ Reimplemented to emit signal.
212 """ Reimplemented to emit signal.
213 """
213 """
214 super(QtKernelClientMixin, self).stop_channels()
214 super(QtKernelClientMixin, self).stop_channels()
215 self.stopped_channels.emit()
215 self.stopped_channels.emit()
General Comments 0
You need to be logged in to leave comments. Login now