Show More
@@ -1,76 +1,77 b'' | |||||
1 | """ |
|
1 | """ | |
2 | Base front end class for all async frontends. |
|
2 | Base front end class for all async frontends. | |
3 | """ |
|
3 | """ | |
4 | __docformat__ = "restructuredtext en" |
|
4 | __docformat__ = "restructuredtext en" | |
5 |
|
5 | |||
6 | #------------------------------------------------------------------------------- |
|
6 | #------------------------------------------------------------------------------- | |
7 | # Copyright (C) 2008 The IPython Development Team |
|
7 | # Copyright (C) 2008 The IPython Development Team | |
8 | # |
|
8 | # | |
9 | # Distributed under the terms of the BSD License. The full license is in |
|
9 | # Distributed under the terms of the BSD License. The full license is in | |
10 | # the file COPYING, distributed as part of this software. |
|
10 | # the file COPYING, distributed as part of this software. | |
11 | #------------------------------------------------------------------------------- |
|
11 | #------------------------------------------------------------------------------- | |
12 |
|
12 | |||
13 |
|
13 | |||
14 | #------------------------------------------------------------------------------- |
|
14 | #------------------------------------------------------------------------------- | |
15 | # Imports |
|
15 | # Imports | |
16 | #------------------------------------------------------------------------------- |
|
16 | #------------------------------------------------------------------------------- | |
17 | from IPython.external import guid |
|
|||
18 |
|
17 | |||
|
18 | from IPython.external import guid | |||
19 |
|
19 | |||
20 | from zope.interface import Interface, Attribute, implements, classProvides |
|
20 | from zope.interface import Interface, Attribute, implements, classProvides | |
21 | from twisted.python.failure import Failure |
|
21 | from twisted.python.failure import Failure | |
22 |
from IPython.frontend.frontendbase import |
|
22 | from IPython.frontend.frontendbase import ( | |
|
23 | FrontEndBase, IFrontEnd, IFrontEndFactory) | |||
23 | from IPython.kernel.core.history import FrontEndHistory |
|
24 | from IPython.kernel.core.history import FrontEndHistory | |
24 | from IPython.kernel.engineservice import IEngineCore |
|
25 | from IPython.kernel.engineservice import IEngineCore | |
25 |
|
26 | |||
26 |
|
27 | |||
27 | class AsyncFrontEndBase(FrontEndBase): |
|
28 | class AsyncFrontEndBase(FrontEndBase): | |
28 | """ |
|
29 | """ | |
29 | Overrides FrontEndBase to wrap execute in a deferred result. |
|
30 | Overrides FrontEndBase to wrap execute in a deferred result. | |
30 | All callbacks are made as callbacks on the deferred result. |
|
31 | All callbacks are made as callbacks on the deferred result. | |
31 | """ |
|
32 | """ | |
32 |
|
33 | |||
33 | implements(IFrontEnd) |
|
34 | implements(IFrontEnd) | |
34 | classProvides(IFrontEndFactory) |
|
35 | classProvides(IFrontEndFactory) | |
35 |
|
36 | |||
36 | def __init__(self, engine=None, history=None): |
|
37 | def __init__(self, engine=None, history=None): | |
37 | assert(engine==None or IEngineCore.providedBy(engine)) |
|
38 | assert(engine==None or IEngineCore.providedBy(engine)) | |
38 | self.engine = IEngineCore(engine) |
|
39 | self.engine = IEngineCore(engine) | |
39 | if history is None: |
|
40 | if history is None: | |
40 | self.history = FrontEndHistory(input_cache=['']) |
|
41 | self.history = FrontEndHistory(input_cache=['']) | |
41 | else: |
|
42 | else: | |
42 | self.history = history |
|
43 | self.history = history | |
43 |
|
44 | |||
44 |
|
45 | |||
45 | def execute(self, block, blockID=None): |
|
46 | def execute(self, block, blockID=None): | |
46 | """Execute the block and return the deferred result. |
|
47 | """Execute the block and return the deferred result. | |
47 |
|
48 | |||
48 | Parameters: |
|
49 | Parameters: | |
49 | block : {str, AST} |
|
50 | block : {str, AST} | |
50 | blockID : any |
|
51 | blockID : any | |
51 | Caller may provide an ID to identify this block. |
|
52 | Caller may provide an ID to identify this block. | |
52 | result['blockID'] := blockID |
|
53 | result['blockID'] := blockID | |
53 |
|
54 | |||
54 | Result: |
|
55 | Result: | |
55 | Deferred result of self.interpreter.execute |
|
56 | Deferred result of self.interpreter.execute | |
56 | """ |
|
57 | """ | |
57 |
|
58 | |||
58 | if(not self.is_complete(block)): |
|
59 | if(not self.is_complete(block)): | |
59 | return Failure(Exception("Block is not compilable")) |
|
60 | return Failure(Exception("Block is not compilable")) | |
60 |
|
61 | |||
61 | if(blockID == None): |
|
62 | if(blockID == None): | |
62 | blockID = guid.generate() |
|
63 | blockID = guid.generate() | |
63 |
|
64 | |||
64 | d = self.engine.execute(block) |
|
65 | d = self.engine.execute(block) | |
65 | d.addCallback(self._add_history, block=block) |
|
66 | d.addCallback(self._add_history, block=block) | |
66 | d.addCallbacks(self._add_block_id_for_result, |
|
67 | d.addCallbacks(self._add_block_id_for_result, | |
67 | errback=self._add_block_id_for_failure, |
|
68 | errback=self._add_block_id_for_failure, | |
68 | callbackArgs=(blockID,), |
|
69 | callbackArgs=(blockID,), | |
69 | errbackArgs=(blockID,)) |
|
70 | errbackArgs=(blockID,)) | |
70 | d.addBoth(self.update_cell_prompt, blockID=blockID) |
|
71 | d.addBoth(self.update_cell_prompt, blockID=blockID) | |
71 | d.addCallbacks(self.render_result, |
|
72 | d.addCallbacks(self.render_result, | |
72 | errback=self.render_error) |
|
73 | errback=self.render_error) | |
73 |
|
74 | |||
74 | return d |
|
75 | return d | |
75 |
|
76 | |||
76 |
|
77 |
@@ -1,94 +1,100 b'' | |||||
1 | # encoding: utf-8 |
|
1 | # encoding: utf-8 | |
2 | """This file contains unittests for the |
|
2 | """This file contains unittests for the | |
3 | IPython.frontend.cocoa.cocoa_frontend module. |
|
3 | IPython.frontend.cocoa.cocoa_frontend module. | |
4 | """ |
|
4 | """ | |
5 | __docformat__ = "restructuredtext en" |
|
5 | __docformat__ = "restructuredtext en" | |
6 |
|
6 | |||
7 | #--------------------------------------------------------------------------- |
|
7 | #--------------------------------------------------------------------------- | |
8 | # Copyright (C) 2005 The IPython Development Team |
|
8 | # Copyright (C) 2005 The IPython Development Team | |
9 | # |
|
9 | # | |
10 | # Distributed under the terms of the BSD License. The full license is in |
|
10 | # Distributed under the terms of the BSD License. The full license is in | |
11 | # the file COPYING, distributed as part of this software. |
|
11 | # the file COPYING, distributed as part of this software. | |
12 | #--------------------------------------------------------------------------- |
|
12 | #--------------------------------------------------------------------------- | |
13 |
|
13 | |||
14 | #--------------------------------------------------------------------------- |
|
14 | #--------------------------------------------------------------------------- | |
15 | # Imports |
|
15 | # Imports | |
16 | #--------------------------------------------------------------------------- |
|
16 | #--------------------------------------------------------------------------- | |
17 |
|
17 | |||
|
18 | # Tell nose to skip this module | |||
|
19 | __test__ = {} | |||
|
20 | ||||
|
21 | from twisted.trial import unittest | |||
|
22 | from twisted.internet.defer import succeed | |||
|
23 | ||||
|
24 | from IPython.kernel.core.interpreter import Interpreter | |||
|
25 | import IPython.kernel.engineservice as es | |||
|
26 | ||||
18 | try: |
|
27 | try: | |
19 | from IPython.kernel.core.interpreter import Interpreter |
|
28 | from IPython.frontend.cocoa.cocoa_frontend import IPythonCocoaController | |
20 | import IPython.kernel.engineservice as es |
|
|||
21 | from IPython.testing.util import DeferredTestCase |
|
|||
22 | from twisted.internet.defer import succeed |
|
|||
23 | from IPython.frontend.cocoa.cocoa_frontend import IPythonCocoaController |
|
|||
24 | from Foundation import NSMakeRect |
|
29 | from Foundation import NSMakeRect | |
25 |
from AppKit import NSTextView, NSScrollView |
|
30 | from AppKit import NSTextView, NSScrollView | |
26 | except ImportError: |
|
31 | except ImportError: | |
27 | import nose |
|
32 | # This tells twisted.trial to skip this module if PyObjC is not found | |
28 | raise nose.SkipTest("This test requires zope.interface, Twisted, Foolscap and PyObjC") |
|
33 | skip = True | |
29 |
|
34 | |||
30 | class TestIPythonCocoaControler(DeferredTestCase): |
|
35 | #--------------------------------------------------------------------------- | |
|
36 | # Tests | |||
|
37 | #--------------------------------------------------------------------------- | |||
|
38 | class TestIPythonCocoaControler(unittest.TestCase): | |||
31 | """Tests for IPythonCocoaController""" |
|
39 | """Tests for IPythonCocoaController""" | |
32 |
|
40 | |||
33 | def setUp(self): |
|
41 | def setUp(self): | |
34 | self.controller = IPythonCocoaController.alloc().init() |
|
42 | self.controller = IPythonCocoaController.alloc().init() | |
35 | self.engine = es.EngineService() |
|
43 | self.engine = es.EngineService() | |
36 | self.engine.startService() |
|
44 | self.engine.startService() | |
37 |
|
45 | |||
38 | def tearDown(self): |
|
46 | def tearDown(self): | |
39 | self.controller = None |
|
47 | self.controller = None | |
40 | self.engine.stopService() |
|
48 | self.engine.stopService() | |
41 |
|
49 | |||
42 | def testControllerExecutesCode(self): |
|
50 | def testControllerExecutesCode(self): | |
43 | code ="""5+5""" |
|
51 | code ="""5+5""" | |
44 | expected = Interpreter().execute(code) |
|
52 | expected = Interpreter().execute(code) | |
45 | del expected['number'] |
|
53 | del expected['number'] | |
46 | def removeNumberAndID(result): |
|
54 | def removeNumberAndID(result): | |
47 | del result['number'] |
|
55 | del result['number'] | |
48 | del result['id'] |
|
56 | del result['id'] | |
49 | return result |
|
57 | return result | |
50 | self.assertDeferredEquals( |
|
58 | d = self.controller.execute(code) | |
51 |
|
|
59 | d.addCallback(removeNumberAndID) | |
52 | expected) |
|
60 | d.addCallback(lambda r: self.assertEquals(r, expected)) | |
53 |
|
61 | |||
54 | def testControllerMirrorsUserNSWithValuesAsStrings(self): |
|
62 | def testControllerMirrorsUserNSWithValuesAsStrings(self): | |
55 | code = """userns1=1;userns2=2""" |
|
63 | code = """userns1=1;userns2=2""" | |
56 | def testControllerUserNS(result): |
|
64 | def testControllerUserNS(result): | |
57 | self.assertEquals(self.controller.userNS['userns1'], 1) |
|
65 | self.assertEquals(self.controller.userNS['userns1'], 1) | |
58 | self.assertEquals(self.controller.userNS['userns2'], 2) |
|
66 | self.assertEquals(self.controller.userNS['userns2'], 2) | |
59 |
|
||||
60 | self.controller.execute(code).addCallback(testControllerUserNS) |
|
67 | self.controller.execute(code).addCallback(testControllerUserNS) | |
61 |
|
68 | |||
62 |
|
||||
63 | def testControllerInstantiatesIEngine(self): |
|
69 | def testControllerInstantiatesIEngine(self): | |
64 | self.assert_(es.IEngineBase.providedBy(self.controller.engine)) |
|
70 | self.assert_(es.IEngineBase.providedBy(self.controller.engine)) | |
65 |
|
71 | |||
66 | def testControllerCompletesToken(self): |
|
72 | def testControllerCompletesToken(self): | |
67 | code = """longNameVariable=10""" |
|
73 | code = """longNameVariable=10""" | |
68 | def testCompletes(result): |
|
74 | def testCompletes(result): | |
69 | self.assert_("longNameVariable" in result) |
|
75 | self.assert_("longNameVariable" in result) | |
70 |
|
76 | |||
71 | def testCompleteToken(result): |
|
77 | def testCompleteToken(result): | |
72 | self.controller.complete("longNa").addCallback(testCompletes) |
|
78 | self.controller.complete("longNa").addCallback(testCompletes) | |
73 |
|
79 | |||
74 | self.controller.execute(code).addCallback(testCompletes) |
|
80 | self.controller.execute(code).addCallback(testCompletes) | |
75 |
|
81 | |||
76 |
|
82 | |||
77 | def testCurrentIndent(self): |
|
83 | def testCurrentIndent(self): | |
78 | """test that current_indent_string returns current indent or None. |
|
84 | """test that current_indent_string returns current indent or None. | |
79 | Uses _indent_for_block for direct unit testing. |
|
85 | Uses _indent_for_block for direct unit testing. | |
80 | """ |
|
86 | """ | |
81 |
|
87 | |||
82 | self.controller.tabUsesSpaces = True |
|
88 | self.controller.tabUsesSpaces = True | |
83 | self.assert_(self.controller._indent_for_block("""a=3""") == None) |
|
89 | self.assert_(self.controller._indent_for_block("""a=3""") == None) | |
84 | self.assert_(self.controller._indent_for_block("") == None) |
|
90 | self.assert_(self.controller._indent_for_block("") == None) | |
85 | block = """def test():\n a=3""" |
|
91 | block = """def test():\n a=3""" | |
86 | self.assert_(self.controller._indent_for_block(block) == \ |
|
92 | self.assert_(self.controller._indent_for_block(block) == \ | |
87 | ' ' * self.controller.tabSpaces) |
|
93 | ' ' * self.controller.tabSpaces) | |
88 |
|
94 | |||
89 | block = """if(True):\n%sif(False):\n%spass""" % \ |
|
95 | block = """if(True):\n%sif(False):\n%spass""" % \ | |
90 | (' '*self.controller.tabSpaces, |
|
96 | (' '*self.controller.tabSpaces, | |
91 | 2*' '*self.controller.tabSpaces) |
|
97 | 2*' '*self.controller.tabSpaces) | |
92 | self.assert_(self.controller._indent_for_block(block) == \ |
|
98 | self.assert_(self.controller._indent_for_block(block) == \ | |
93 | 2*(' '*self.controller.tabSpaces)) |
|
99 | 2*(' '*self.controller.tabSpaces)) | |
94 |
|
100 |
@@ -1,111 +1,109 b'' | |||||
1 | # encoding: utf-8 |
|
1 | # encoding: utf-8 | |
2 |
|
2 | |||
3 | """This file contains unittests for the asyncfrontendbase module.""" |
|
3 | """This file contains unittests for the asyncfrontendbase module.""" | |
4 |
|
4 | |||
5 | __docformat__ = "restructuredtext en" |
|
5 | __docformat__ = "restructuredtext en" | |
6 |
|
6 | |||
7 | #--------------------------------------------------------------------------- |
|
7 | #--------------------------------------------------------------------------- | |
8 | # Copyright (C) 2008 The IPython Development Team |
|
8 | # Copyright (C) 2008 The IPython Development Team | |
9 | # |
|
9 | # | |
10 | # Distributed under the terms of the BSD License. The full license is in |
|
10 | # Distributed under the terms of the BSD License. The full license is in | |
11 | # the file COPYING, distributed as part of this software. |
|
11 | # the file COPYING, distributed as part of this software. | |
12 | #--------------------------------------------------------------------------- |
|
12 | #--------------------------------------------------------------------------- | |
13 |
|
13 | |||
14 | #--------------------------------------------------------------------------- |
|
14 | #--------------------------------------------------------------------------- | |
15 | # Imports |
|
15 | # Imports | |
16 | #--------------------------------------------------------------------------- |
|
16 | #--------------------------------------------------------------------------- | |
17 |
|
17 | |||
|
18 | # Tell nose to skip this module | |||
|
19 | __test__ = {} | |||
18 |
|
20 | |||
19 | try: |
|
21 | from twisted.trial import unittest | |
20 | from twisted.trial import unittest |
|
22 | from IPython.frontend.asyncfrontendbase import AsyncFrontEndBase | |
21 |
|
|
23 | from IPython.frontend import frontendbase | |
22 | from IPython.frontend import frontendbase |
|
24 | from IPython.kernel.engineservice import EngineService | |
23 | from IPython.kernel.engineservice import EngineService |
|
25 | from IPython.testing.parametric import Parametric, parametric | |
24 | from IPython.testing.parametric import Parametric, parametric |
|
|||
25 | except ImportError: |
|
|||
26 | import nose |
|
|||
27 | raise nose.SkipTest("This test requires zope.interface, Twisted and Foolscap") |
|
|||
28 |
|
26 | |||
29 |
|
27 | |||
30 | class FrontEndCallbackChecker(AsyncFrontEndBase): |
|
28 | class FrontEndCallbackChecker(AsyncFrontEndBase): | |
31 | """FrontEndBase subclass for checking callbacks""" |
|
29 | """FrontEndBase subclass for checking callbacks""" | |
32 | def __init__(self, engine=None, history=None): |
|
30 | def __init__(self, engine=None, history=None): | |
33 | super(FrontEndCallbackChecker, self).__init__(engine=engine, |
|
31 | super(FrontEndCallbackChecker, self).__init__(engine=engine, | |
34 | history=history) |
|
32 | history=history) | |
35 | self.updateCalled = False |
|
33 | self.updateCalled = False | |
36 | self.renderResultCalled = False |
|
34 | self.renderResultCalled = False | |
37 | self.renderErrorCalled = False |
|
35 | self.renderErrorCalled = False | |
38 |
|
36 | |||
39 | def update_cell_prompt(self, result, blockID=None): |
|
37 | def update_cell_prompt(self, result, blockID=None): | |
40 | self.updateCalled = True |
|
38 | self.updateCalled = True | |
41 | return result |
|
39 | return result | |
42 |
|
40 | |||
43 | def render_result(self, result): |
|
41 | def render_result(self, result): | |
44 | self.renderResultCalled = True |
|
42 | self.renderResultCalled = True | |
45 | return result |
|
43 | return result | |
46 |
|
44 | |||
47 | def render_error(self, failure): |
|
45 | def render_error(self, failure): | |
48 | self.renderErrorCalled = True |
|
46 | self.renderErrorCalled = True | |
49 | return failure |
|
47 | return failure | |
50 |
|
48 | |||
51 |
|
49 | |||
52 | class TestAsyncFrontendBase(unittest.TestCase): |
|
50 | class TestAsyncFrontendBase(unittest.TestCase): | |
53 | def setUp(self): |
|
51 | def setUp(self): | |
54 | """Setup the EngineService and FrontEndBase""" |
|
52 | """Setup the EngineService and FrontEndBase""" | |
55 |
|
53 | |||
56 | self.fb = FrontEndCallbackChecker(engine=EngineService()) |
|
54 | self.fb = FrontEndCallbackChecker(engine=EngineService()) | |
57 |
|
55 | |||
58 | def test_implements_IFrontEnd(self): |
|
56 | def test_implements_IFrontEnd(self): | |
59 | self.assert_(frontendbase.IFrontEnd.implementedBy( |
|
57 | self.assert_(frontendbase.IFrontEnd.implementedBy( | |
60 | AsyncFrontEndBase)) |
|
58 | AsyncFrontEndBase)) | |
61 |
|
59 | |||
62 | def test_is_complete_returns_False_for_incomplete_block(self): |
|
60 | def test_is_complete_returns_False_for_incomplete_block(self): | |
63 | block = """def test(a):""" |
|
61 | block = """def test(a):""" | |
64 | self.assert_(self.fb.is_complete(block) == False) |
|
62 | self.assert_(self.fb.is_complete(block) == False) | |
65 |
|
63 | |||
66 | def test_is_complete_returns_True_for_complete_block(self): |
|
64 | def test_is_complete_returns_True_for_complete_block(self): | |
67 | block = """def test(a): pass""" |
|
65 | block = """def test(a): pass""" | |
68 | self.assert_(self.fb.is_complete(block)) |
|
66 | self.assert_(self.fb.is_complete(block)) | |
69 | block = """a=3""" |
|
67 | block = """a=3""" | |
70 | self.assert_(self.fb.is_complete(block)) |
|
68 | self.assert_(self.fb.is_complete(block)) | |
71 |
|
69 | |||
72 | def test_blockID_added_to_result(self): |
|
70 | def test_blockID_added_to_result(self): | |
73 | block = """3+3""" |
|
71 | block = """3+3""" | |
74 | d = self.fb.execute(block, blockID='TEST_ID') |
|
72 | d = self.fb.execute(block, blockID='TEST_ID') | |
75 | d.addCallback(lambda r: self.assert_(r['blockID']=='TEST_ID')) |
|
73 | d.addCallback(lambda r: self.assert_(r['blockID']=='TEST_ID')) | |
76 | return d |
|
74 | return d | |
77 |
|
75 | |||
78 | def test_blockID_added_to_failure(self): |
|
76 | def test_blockID_added_to_failure(self): | |
79 | block = "raise Exception()" |
|
77 | block = "raise Exception()" | |
80 | d = self.fb.execute(block,blockID='TEST_ID') |
|
78 | d = self.fb.execute(block,blockID='TEST_ID') | |
81 | d.addErrback(lambda f: self.assert_(f.blockID=='TEST_ID')) |
|
79 | d.addErrback(lambda f: self.assert_(f.blockID=='TEST_ID')) | |
82 | return d |
|
80 | return d | |
83 |
|
81 | |||
84 | def test_callbacks_added_to_execute(self): |
|
82 | def test_callbacks_added_to_execute(self): | |
85 | d = self.fb.execute("10+10") |
|
83 | d = self.fb.execute("10+10") | |
86 | d.addCallback(lambda r: self.assert_(self.fb.updateCalled and self.fb.renderResultCalled)) |
|
84 | d.addCallback(lambda r: self.assert_(self.fb.updateCalled and self.fb.renderResultCalled)) | |
87 | return d |
|
85 | return d | |
88 |
|
86 | |||
89 | def test_error_callback_added_to_execute(self): |
|
87 | def test_error_callback_added_to_execute(self): | |
90 | """Test that render_error called on execution error.""" |
|
88 | """Test that render_error called on execution error.""" | |
91 |
|
89 | |||
92 | d = self.fb.execute("raise Exception()") |
|
90 | d = self.fb.execute("raise Exception()") | |
93 | d.addErrback(lambda f: self.assert_(self.fb.renderErrorCalled)) |
|
91 | d.addErrback(lambda f: self.assert_(self.fb.renderErrorCalled)) | |
94 | return d |
|
92 | return d | |
95 |
|
93 | |||
96 | def test_history_returns_expected_block(self): |
|
94 | def test_history_returns_expected_block(self): | |
97 | """Make sure history browsing doesn't fail.""" |
|
95 | """Make sure history browsing doesn't fail.""" | |
98 |
|
96 | |||
99 | blocks = ["a=1","a=2","a=3"] |
|
97 | blocks = ["a=1","a=2","a=3"] | |
100 | d = self.fb.execute(blocks[0]) |
|
98 | d = self.fb.execute(blocks[0]) | |
101 | d.addCallback(lambda _: self.fb.execute(blocks[1])) |
|
99 | d.addCallback(lambda _: self.fb.execute(blocks[1])) | |
102 | d.addCallback(lambda _: self.fb.execute(blocks[2])) |
|
100 | d.addCallback(lambda _: self.fb.execute(blocks[2])) | |
103 | d.addCallback(lambda _: self.assert_(self.fb.get_history_previous("")==blocks[-2])) |
|
101 | d.addCallback(lambda _: self.assert_(self.fb.get_history_previous("")==blocks[-2])) | |
104 | d.addCallback(lambda _: self.assert_(self.fb.get_history_previous("")==blocks[-3])) |
|
102 | d.addCallback(lambda _: self.assert_(self.fb.get_history_previous("")==blocks[-3])) | |
105 | d.addCallback(lambda _: self.assert_(self.fb.get_history_next()==blocks[-2])) |
|
103 | d.addCallback(lambda _: self.assert_(self.fb.get_history_next()==blocks[-2])) | |
106 | return d |
|
104 | return d | |
107 |
|
105 | |||
108 | def test_history_returns_none_at_startup(self): |
|
106 | def test_history_returns_none_at_startup(self): | |
109 | self.assert_(self.fb.get_history_previous("")==None) |
|
107 | self.assert_(self.fb.get_history_previous("")==None) | |
110 | self.assert_(self.fb.get_history_next()==None) |
|
108 | self.assert_(self.fb.get_history_next()==None) | |
111 |
|
109 |
@@ -1,67 +1,67 b'' | |||||
1 | # encoding: utf-8 |
|
1 | # encoding: utf-8 | |
2 | """ |
|
2 | """ | |
3 | Test process execution and IO redirection. |
|
3 | Test process execution and IO redirection. | |
4 | """ |
|
4 | """ | |
5 |
|
5 | |||
6 | __docformat__ = "restructuredtext en" |
|
6 | __docformat__ = "restructuredtext en" | |
7 |
|
7 | |||
8 |
#----------------------------------------------------------------------------- |
|
8 | #----------------------------------------------------------------------------- | |
9 | # Copyright (C) 2008 The IPython Development Team |
|
9 | # Copyright (C) 2008-2009 The IPython Development Team | |
10 | # |
|
10 | # | |
11 | # Distributed under the terms of the BSD License. The full license is |
|
11 | # Distributed under the terms of the BSD License. The full license is | |
12 | # in the file COPYING, distributed as part of this software. |
|
12 | # in the file COPYING, distributed as part of this software. | |
13 |
#----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
14 |
|
14 | |||
15 | from cStringIO import StringIO |
|
15 | from cStringIO import StringIO | |
16 | from time import sleep |
|
16 | from time import sleep | |
17 | import sys |
|
17 | import sys | |
18 |
|
18 | |||
19 | from IPython.frontend.process import PipedProcess |
|
19 | from IPython.frontend.process import PipedProcess | |
20 | from IPython.testing import decorators as testdec |
|
20 | from IPython.testing import decorators as testdec | |
21 |
|
21 | |||
22 |
|
22 | |||
23 | def test_capture_out(): |
|
23 | def test_capture_out(): | |
24 | """ A simple test to see if we can execute a process and get the output. |
|
24 | """ A simple test to see if we can execute a process and get the output. | |
25 | """ |
|
25 | """ | |
26 | s = StringIO() |
|
26 | s = StringIO() | |
27 | p = PipedProcess('echo 1', out_callback=s.write, ) |
|
27 | p = PipedProcess('echo 1', out_callback=s.write, ) | |
28 | p.start() |
|
28 | p.start() | |
29 | p.join() |
|
29 | p.join() | |
30 | result = s.getvalue().rstrip() |
|
30 | result = s.getvalue().rstrip() | |
31 | assert result == '1' |
|
31 | assert result == '1' | |
32 |
|
32 | |||
33 |
|
33 | |||
34 | def test_io(): |
|
34 | def test_io(): | |
35 | """ Checks that we can send characters on stdin to the process. |
|
35 | """ Checks that we can send characters on stdin to the process. | |
36 | """ |
|
36 | """ | |
37 | s = StringIO() |
|
37 | s = StringIO() | |
38 | p = PipedProcess(sys.executable + ' -c "a = raw_input(); print a"', |
|
38 | p = PipedProcess(sys.executable + ' -c "a = raw_input(); print a"', | |
39 | out_callback=s.write, ) |
|
39 | out_callback=s.write, ) | |
40 | p.start() |
|
40 | p.start() | |
41 | test_string = '12345\n' |
|
41 | test_string = '12345\n' | |
42 | while not hasattr(p, 'process'): |
|
42 | while not hasattr(p, 'process'): | |
43 | sleep(0.1) |
|
43 | sleep(0.1) | |
44 | p.process.stdin.write(test_string) |
|
44 | p.process.stdin.write(test_string) | |
45 | p.join() |
|
45 | p.join() | |
46 | result = s.getvalue() |
|
46 | result = s.getvalue() | |
47 | assert result == test_string |
|
47 | assert result == test_string | |
48 |
|
48 | |||
49 |
|
49 | |||
50 | def test_kill(): |
|
50 | def test_kill(): | |
51 | """ Check that we can kill a process, and its subprocess. |
|
51 | """ Check that we can kill a process, and its subprocess. | |
52 | """ |
|
52 | """ | |
53 | s = StringIO() |
|
53 | s = StringIO() | |
54 | p = PipedProcess(sys.executable + ' -c "a = raw_input();"', |
|
54 | p = PipedProcess(sys.executable + ' -c "a = raw_input();"', | |
55 | out_callback=s.write, ) |
|
55 | out_callback=s.write, ) | |
56 | p.start() |
|
56 | p.start() | |
57 | while not hasattr(p, 'process'): |
|
57 | while not hasattr(p, 'process'): | |
58 | sleep(0.1) |
|
58 | sleep(0.1) | |
59 | p.process.kill() |
|
59 | p.process.kill() | |
60 | assert p.process.poll() is not None |
|
60 | assert p.process.poll() is not None | |
61 |
|
61 | |||
62 |
|
62 | |||
63 | if __name__ == '__main__': |
|
63 | if __name__ == '__main__': | |
64 | test_capture_out() |
|
64 | test_capture_out() | |
65 | test_io() |
|
65 | test_io() | |
66 | test_kill() |
|
66 | test_kill() | |
67 |
|
67 |
@@ -1,61 +1,64 b'' | |||||
1 | # encoding: utf-8 |
|
1 | # encoding: utf-8 | |
2 |
|
2 | |||
3 | """This file contains unittests for the interpreter.py module.""" |
|
3 | """This file contains unittests for the interpreter.py module.""" | |
4 |
|
4 | |||
5 | __docformat__ = "restructuredtext en" |
|
5 | __docformat__ = "restructuredtext en" | |
6 |
|
6 | |||
7 | #----------------------------------------------------------------------------- |
|
7 | #----------------------------------------------------------------------------- | |
8 | # Copyright (C) 2008-2009 The IPython Development Team |
|
8 | # Copyright (C) 2008-2009 The IPython Development Team | |
9 | # |
|
9 | # | |
10 | # Distributed under the terms of the BSD License. The full license is in |
|
10 | # Distributed under the terms of the BSD License. The full license is in | |
11 | # the file COPYING, distributed as part of this software. |
|
11 | # the file COPYING, distributed as part of this software. | |
12 | #----------------------------------------------------------------------------- |
|
12 | #----------------------------------------------------------------------------- | |
13 |
|
13 | |||
14 | #----------------------------------------------------------------------------- |
|
14 | #----------------------------------------------------------------------------- | |
15 | # Imports |
|
15 | # Imports | |
16 | #----------------------------------------------------------------------------- |
|
16 | #----------------------------------------------------------------------------- | |
17 |
|
17 | |||
18 | import unittest |
|
18 | import unittest | |
19 | from IPython.kernel.core.interpreter import Interpreter |
|
19 | from IPython.kernel.core.interpreter import Interpreter | |
20 |
|
20 | |||
21 | #----------------------------------------------------------------------------- |
|
21 | #----------------------------------------------------------------------------- | |
22 | # Tests |
|
22 | # Tests | |
23 | #----------------------------------------------------------------------------- |
|
23 | #----------------------------------------------------------------------------- | |
24 |
|
24 | |||
|
25 | # Tell nose to skip this module | |||
|
26 | __test__ = {} | |||
|
27 | ||||
25 | class TestInterpreter(unittest.TestCase): |
|
28 | class TestInterpreter(unittest.TestCase): | |
26 |
|
29 | |||
27 | def test_unicode(self): |
|
30 | def test_unicode(self): | |
28 | """ Test unicode handling with the interpreter.""" |
|
31 | """ Test unicode handling with the interpreter.""" | |
29 | i = Interpreter() |
|
32 | i = Interpreter() | |
30 | i.execute_python(u'print "ΓΉ"') |
|
33 | i.execute_python(u'print "ΓΉ"') | |
31 | i.execute_python('print "ΓΉ"') |
|
34 | i.execute_python('print "ΓΉ"') | |
32 |
|
35 | |||
33 | def test_ticket266993(self): |
|
36 | def test_ticket266993(self): | |
34 | """ Test for ticket 266993.""" |
|
37 | """ Test for ticket 266993.""" | |
35 | i = Interpreter() |
|
38 | i = Interpreter() | |
36 | i.execute('str("""a\nb""")') |
|
39 | i.execute('str("""a\nb""")') | |
37 |
|
40 | |||
38 | def test_ticket364347(self): |
|
41 | def test_ticket364347(self): | |
39 | """Test for ticket 364347.""" |
|
42 | """Test for ticket 364347.""" | |
40 | i = Interpreter() |
|
43 | i = Interpreter() | |
41 | i.split_commands('str("a\\nb")') |
|
44 | i.split_commands('str("a\\nb")') | |
42 |
|
45 | |||
43 | def test_split_commands(self): |
|
46 | def test_split_commands(self): | |
44 | """ Test that commands are indeed individually split.""" |
|
47 | """ Test that commands are indeed individually split.""" | |
45 | i = Interpreter() |
|
48 | i = Interpreter() | |
46 | test_atoms = [('(1\n + 1)', ), |
|
49 | test_atoms = [('(1\n + 1)', ), | |
47 | ('1', '1', ), |
|
50 | ('1', '1', ), | |
48 | ] |
|
51 | ] | |
49 | for atoms in test_atoms: |
|
52 | for atoms in test_atoms: | |
50 | atoms = [atom.rstrip() + '\n' for atom in atoms] |
|
53 | atoms = [atom.rstrip() + '\n' for atom in atoms] | |
51 | self.assertEquals(i.split_commands(''.join(atoms)),atoms) |
|
54 | self.assertEquals(i.split_commands(''.join(atoms)),atoms) | |
52 |
|
55 | |||
53 | def test_long_lines(self): |
|
56 | def test_long_lines(self): | |
54 | """ Test for spurious syntax error created by the interpreter.""" |
|
57 | """ Test for spurious syntax error created by the interpreter.""" | |
55 | test_strings = [u'( 1 +\n 1\n )\n\n', |
|
58 | test_strings = [u'( 1 +\n 1\n )\n\n', | |
56 | u'(1 \n + 1\n )\n\n', |
|
59 | u'(1 \n + 1\n )\n\n', | |
57 | ] |
|
60 | ] | |
58 | i = Interpreter() |
|
61 | i = Interpreter() | |
59 | for s in test_strings: |
|
62 | for s in test_strings: | |
60 | i.execute(s) |
|
63 | i.execute(s) | |
61 |
|
64 |
@@ -1,171 +1,174 b'' | |||||
1 | # encoding: utf-8 |
|
1 | # encoding: utf-8 | |
2 |
|
2 | |||
3 | """This file contains unittests for the notification.py module.""" |
|
3 | """This file contains unittests for the notification.py module.""" | |
4 |
|
4 | |||
5 | __docformat__ = "restructuredtext en" |
|
5 | __docformat__ = "restructuredtext en" | |
6 |
|
6 | |||
7 | #----------------------------------------------------------------------------- |
|
7 | #----------------------------------------------------------------------------- | |
8 | # Copyright (C) 2008 The IPython Development Team |
|
8 | # Copyright (C) 2008 The IPython Development Team | |
9 | # |
|
9 | # | |
10 | # Distributed under the terms of the BSD License. The full license is in |
|
10 | # Distributed under the terms of the BSD License. The full license is in | |
11 | # the file COPYING, distributed as part of this software. |
|
11 | # the file COPYING, distributed as part of this software. | |
12 | #----------------------------------------------------------------------------- |
|
12 | #----------------------------------------------------------------------------- | |
13 |
|
13 | |||
14 | #----------------------------------------------------------------------------- |
|
14 | #----------------------------------------------------------------------------- | |
15 | # Imports |
|
15 | # Imports | |
16 | #----------------------------------------------------------------------------- |
|
16 | #----------------------------------------------------------------------------- | |
17 |
|
17 | |||
|
18 | # Tell nose to skip this module | |||
|
19 | __test__ = {} | |||
|
20 | ||||
18 | import unittest |
|
21 | import unittest | |
19 | import IPython.kernel.core.notification as notification |
|
22 | import IPython.kernel.core.notification as notification | |
20 | from nose.tools import timed |
|
23 | from nose.tools import timed | |
21 |
|
24 | |||
22 | # |
|
25 | # | |
23 | # Supporting test classes |
|
26 | # Supporting test classes | |
24 | # |
|
27 | # | |
25 |
|
28 | |||
26 | class Observer(object): |
|
29 | class Observer(object): | |
27 | """docstring for Observer""" |
|
30 | """docstring for Observer""" | |
28 | def __init__(self, expectedType, expectedSender, |
|
31 | def __init__(self, expectedType, expectedSender, | |
29 | center=notification.sharedCenter, **kwargs): |
|
32 | center=notification.sharedCenter, **kwargs): | |
30 | super(Observer, self).__init__() |
|
33 | super(Observer, self).__init__() | |
31 | self.expectedType = expectedType |
|
34 | self.expectedType = expectedType | |
32 | self.expectedSender = expectedSender |
|
35 | self.expectedSender = expectedSender | |
33 | self.expectedKwArgs = kwargs |
|
36 | self.expectedKwArgs = kwargs | |
34 | self.recieved = False |
|
37 | self.recieved = False | |
35 | center.add_observer(self.callback, |
|
38 | center.add_observer(self.callback, | |
36 | self.expectedType, |
|
39 | self.expectedType, | |
37 | self.expectedSender) |
|
40 | self.expectedSender) | |
38 |
|
41 | |||
39 |
|
42 | |||
40 | def callback(self, theType, sender, args={}): |
|
43 | def callback(self, theType, sender, args={}): | |
41 | """callback""" |
|
44 | """callback""" | |
42 |
|
45 | |||
43 | assert(theType == self.expectedType or |
|
46 | assert(theType == self.expectedType or | |
44 | self.expectedType == None) |
|
47 | self.expectedType == None) | |
45 | assert(sender == self.expectedSender or |
|
48 | assert(sender == self.expectedSender or | |
46 | self.expectedSender == None) |
|
49 | self.expectedSender == None) | |
47 | assert(args == self.expectedKwArgs) |
|
50 | assert(args == self.expectedKwArgs) | |
48 | self.recieved = True |
|
51 | self.recieved = True | |
49 |
|
52 | |||
50 |
|
53 | |||
51 | def verify(self): |
|
54 | def verify(self): | |
52 | """verify""" |
|
55 | """verify""" | |
53 |
|
56 | |||
54 | assert(self.recieved) |
|
57 | assert(self.recieved) | |
55 |
|
58 | |||
56 | def reset(self): |
|
59 | def reset(self): | |
57 | """reset""" |
|
60 | """reset""" | |
58 |
|
61 | |||
59 | self.recieved = False |
|
62 | self.recieved = False | |
60 |
|
63 | |||
61 |
|
64 | |||
62 |
|
65 | |||
63 | class Notifier(object): |
|
66 | class Notifier(object): | |
64 | """docstring for Notifier""" |
|
67 | """docstring for Notifier""" | |
65 | def __init__(self, theType, **kwargs): |
|
68 | def __init__(self, theType, **kwargs): | |
66 | super(Notifier, self).__init__() |
|
69 | super(Notifier, self).__init__() | |
67 | self.theType = theType |
|
70 | self.theType = theType | |
68 | self.kwargs = kwargs |
|
71 | self.kwargs = kwargs | |
69 |
|
72 | |||
70 | def post(self, center=notification.sharedCenter): |
|
73 | def post(self, center=notification.sharedCenter): | |
71 | """fire""" |
|
74 | """fire""" | |
72 |
|
75 | |||
73 | center.post_notification(self.theType, self, |
|
76 | center.post_notification(self.theType, self, | |
74 | **self.kwargs) |
|
77 | **self.kwargs) | |
75 |
|
78 | |||
76 |
|
79 | |||
77 | # |
|
80 | # | |
78 | # Test Cases |
|
81 | # Test Cases | |
79 | # |
|
82 | # | |
80 |
|
83 | |||
81 | class NotificationTests(unittest.TestCase): |
|
84 | class NotificationTests(unittest.TestCase): | |
82 | """docstring for NotificationTests""" |
|
85 | """docstring for NotificationTests""" | |
83 |
|
86 | |||
84 | def tearDown(self): |
|
87 | def tearDown(self): | |
85 | notification.sharedCenter.remove_all_observers() |
|
88 | notification.sharedCenter.remove_all_observers() | |
86 |
|
89 | |||
87 | def test_notification_delivered(self): |
|
90 | def test_notification_delivered(self): | |
88 | """Test that notifications are delivered""" |
|
91 | """Test that notifications are delivered""" | |
89 | expectedType = 'EXPECTED_TYPE' |
|
92 | expectedType = 'EXPECTED_TYPE' | |
90 | sender = Notifier(expectedType) |
|
93 | sender = Notifier(expectedType) | |
91 | observer = Observer(expectedType, sender) |
|
94 | observer = Observer(expectedType, sender) | |
92 |
|
95 | |||
93 | sender.post() |
|
96 | sender.post() | |
94 |
|
97 | |||
95 | observer.verify() |
|
98 | observer.verify() | |
96 |
|
99 | |||
97 |
|
100 | |||
98 | def test_type_specificity(self): |
|
101 | def test_type_specificity(self): | |
99 | """Test that observers are registered by type""" |
|
102 | """Test that observers are registered by type""" | |
100 |
|
103 | |||
101 | expectedType = 1 |
|
104 | expectedType = 1 | |
102 | unexpectedType = "UNEXPECTED_TYPE" |
|
105 | unexpectedType = "UNEXPECTED_TYPE" | |
103 | sender = Notifier(expectedType) |
|
106 | sender = Notifier(expectedType) | |
104 | unexpectedSender = Notifier(unexpectedType) |
|
107 | unexpectedSender = Notifier(unexpectedType) | |
105 | observer = Observer(expectedType, sender) |
|
108 | observer = Observer(expectedType, sender) | |
106 |
|
109 | |||
107 | sender.post() |
|
110 | sender.post() | |
108 | unexpectedSender.post() |
|
111 | unexpectedSender.post() | |
109 |
|
112 | |||
110 | observer.verify() |
|
113 | observer.verify() | |
111 |
|
114 | |||
112 |
|
115 | |||
113 | def test_sender_specificity(self): |
|
116 | def test_sender_specificity(self): | |
114 | """Test that observers are registered by sender""" |
|
117 | """Test that observers are registered by sender""" | |
115 |
|
118 | |||
116 | expectedType = "EXPECTED_TYPE" |
|
119 | expectedType = "EXPECTED_TYPE" | |
117 | sender1 = Notifier(expectedType) |
|
120 | sender1 = Notifier(expectedType) | |
118 | sender2 = Notifier(expectedType) |
|
121 | sender2 = Notifier(expectedType) | |
119 | observer = Observer(expectedType, sender1) |
|
122 | observer = Observer(expectedType, sender1) | |
120 |
|
123 | |||
121 | sender1.post() |
|
124 | sender1.post() | |
122 | sender2.post() |
|
125 | sender2.post() | |
123 |
|
126 | |||
124 | observer.verify() |
|
127 | observer.verify() | |
125 |
|
128 | |||
126 |
|
129 | |||
127 | def test_remove_all_observers(self): |
|
130 | def test_remove_all_observers(self): | |
128 | """White-box test for remove_all_observers""" |
|
131 | """White-box test for remove_all_observers""" | |
129 |
|
132 | |||
130 | for i in xrange(10): |
|
133 | for i in xrange(10): | |
131 | Observer('TYPE', None, center=notification.sharedCenter) |
|
134 | Observer('TYPE', None, center=notification.sharedCenter) | |
132 |
|
135 | |||
133 | self.assert_(len(notification.sharedCenter.observers[('TYPE',None)]) >= 10, |
|
136 | self.assert_(len(notification.sharedCenter.observers[('TYPE',None)]) >= 10, | |
134 | "observers registered") |
|
137 | "observers registered") | |
135 |
|
138 | |||
136 | notification.sharedCenter.remove_all_observers() |
|
139 | notification.sharedCenter.remove_all_observers() | |
137 |
|
140 | |||
138 | self.assert_(len(notification.sharedCenter.observers) == 0, "observers removed") |
|
141 | self.assert_(len(notification.sharedCenter.observers) == 0, "observers removed") | |
139 |
|
142 | |||
140 |
|
143 | |||
141 | def test_any_sender(self): |
|
144 | def test_any_sender(self): | |
142 | """test_any_sender""" |
|
145 | """test_any_sender""" | |
143 |
|
146 | |||
144 | expectedType = "EXPECTED_TYPE" |
|
147 | expectedType = "EXPECTED_TYPE" | |
145 | sender1 = Notifier(expectedType) |
|
148 | sender1 = Notifier(expectedType) | |
146 | sender2 = Notifier(expectedType) |
|
149 | sender2 = Notifier(expectedType) | |
147 | observer = Observer(expectedType, None) |
|
150 | observer = Observer(expectedType, None) | |
148 |
|
151 | |||
149 |
|
152 | |||
150 | sender1.post() |
|
153 | sender1.post() | |
151 | observer.verify() |
|
154 | observer.verify() | |
152 |
|
155 | |||
153 | observer.reset() |
|
156 | observer.reset() | |
154 | sender2.post() |
|
157 | sender2.post() | |
155 | observer.verify() |
|
158 | observer.verify() | |
156 |
|
159 | |||
157 |
|
160 | |||
158 | @timed(.01) |
|
161 | @timed(.01) | |
159 | def test_post_performance(self): |
|
162 | def test_post_performance(self): | |
160 | """Test that post_notification, even with many registered irrelevant |
|
163 | """Test that post_notification, even with many registered irrelevant | |
161 | observers is fast""" |
|
164 | observers is fast""" | |
162 |
|
165 | |||
163 | for i in xrange(10): |
|
166 | for i in xrange(10): | |
164 | Observer("UNRELATED_TYPE", None) |
|
167 | Observer("UNRELATED_TYPE", None) | |
165 |
|
168 | |||
166 | o = Observer('EXPECTED_TYPE', None) |
|
169 | o = Observer('EXPECTED_TYPE', None) | |
167 |
|
170 | |||
168 | notification.sharedCenter.post_notification('EXPECTED_TYPE', self) |
|
171 | notification.sharedCenter.post_notification('EXPECTED_TYPE', self) | |
169 |
|
172 | |||
170 | o.verify() |
|
173 | o.verify() | |
171 |
|
174 |
@@ -1,70 +1,70 b'' | |||||
1 | # encoding: utf-8 |
|
1 | # encoding: utf-8 | |
2 | """ |
|
2 | """ | |
3 | Test the output capture at the OS level, using file descriptors. |
|
3 | Test the output capture at the OS level, using file descriptors. | |
4 | """ |
|
4 | """ | |
5 |
|
5 | |||
6 | __docformat__ = "restructuredtext en" |
|
6 | __docformat__ = "restructuredtext en" | |
7 |
|
7 | |||
8 | #------------------------------------------------------------------------------- |
|
8 | #------------------------------------------------------------------------------- | |
9 | # Copyright (C) 2008 The IPython Development Team |
|
9 | # Copyright (C) 2008 The IPython Development Team | |
10 | # |
|
10 | # | |
11 | # Distributed under the terms of the BSD License. The full license is |
|
11 | # Distributed under the terms of the BSD License. The full license is | |
12 | # in the file COPYING, distributed as part of this software. |
|
12 | # in the file COPYING, distributed as part of this software. | |
13 | #------------------------------------------------------------------------------- |
|
13 | #------------------------------------------------------------------------------- | |
14 |
|
14 | |||
|
15 | # Tell nose to skip this module | |||
|
16 | __test__ = {} | |||
15 |
|
17 | |||
16 | # Stdlib imports |
|
|||
17 | import os |
|
18 | import os | |
18 | from cStringIO import StringIO |
|
19 | from cStringIO import StringIO | |
19 |
|
20 | |||
20 | # Our own imports |
|
|||
21 | from IPython.testing import decorators as dec |
|
21 | from IPython.testing import decorators as dec | |
22 |
|
22 | |||
23 | #----------------------------------------------------------------------------- |
|
23 | #----------------------------------------------------------------------------- | |
24 | # Test functions |
|
24 | # Test functions | |
25 |
|
25 | |||
26 | @dec.skip_win32 |
|
26 | @dec.skip_win32 | |
27 | def test_redirector(): |
|
27 | def test_redirector(): | |
28 | """ Checks that the redirector can be used to do synchronous capture. |
|
28 | """ Checks that the redirector can be used to do synchronous capture. | |
29 | """ |
|
29 | """ | |
30 | from IPython.kernel.core.fd_redirector import FDRedirector |
|
30 | from IPython.kernel.core.fd_redirector import FDRedirector | |
31 | r = FDRedirector() |
|
31 | r = FDRedirector() | |
32 | out = StringIO() |
|
32 | out = StringIO() | |
33 | try: |
|
33 | try: | |
34 | r.start() |
|
34 | r.start() | |
35 | for i in range(10): |
|
35 | for i in range(10): | |
36 | os.system('echo %ic' % i) |
|
36 | os.system('echo %ic' % i) | |
37 | print >>out, r.getvalue(), |
|
37 | print >>out, r.getvalue(), | |
38 | print >>out, i |
|
38 | print >>out, i | |
39 | except: |
|
39 | except: | |
40 | r.stop() |
|
40 | r.stop() | |
41 | raise |
|
41 | raise | |
42 | r.stop() |
|
42 | r.stop() | |
43 | result1 = out.getvalue() |
|
43 | result1 = out.getvalue() | |
44 | result2 = "".join("%ic\n%i\n" %(i, i) for i in range(10)) |
|
44 | result2 = "".join("%ic\n%i\n" %(i, i) for i in range(10)) | |
45 | assert result1 == result2 |
|
45 | assert result1 == result2 | |
46 |
|
46 | |||
47 |
|
47 | |||
48 | @dec.skip_win32 |
|
48 | @dec.skip_win32 | |
49 | def test_redirector_output_trap(): |
|
49 | def test_redirector_output_trap(): | |
50 | """ This test check not only that the redirector_output_trap does |
|
50 | """ This test check not only that the redirector_output_trap does | |
51 | trap the output, but also that it does it in a gready way, that |
|
51 | trap the output, but also that it does it in a gready way, that | |
52 | is by calling the callback ASAP. |
|
52 | is by calling the callback ASAP. | |
53 | """ |
|
53 | """ | |
54 | from IPython.kernel.core.redirector_output_trap import RedirectorOutputTrap |
|
54 | from IPython.kernel.core.redirector_output_trap import RedirectorOutputTrap | |
55 | out = StringIO() |
|
55 | out = StringIO() | |
56 | trap = RedirectorOutputTrap(out.write, out.write) |
|
56 | trap = RedirectorOutputTrap(out.write, out.write) | |
57 | try: |
|
57 | try: | |
58 | trap.set() |
|
58 | trap.set() | |
59 | for i in range(10): |
|
59 | for i in range(10): | |
60 | os.system('echo %ic' % i) |
|
60 | os.system('echo %ic' % i) | |
61 | print "%ip" % i |
|
61 | print "%ip" % i | |
62 | print >>out, i |
|
62 | print >>out, i | |
63 | except: |
|
63 | except: | |
64 | trap.unset() |
|
64 | trap.unset() | |
65 | raise |
|
65 | raise | |
66 | trap.unset() |
|
66 | trap.unset() | |
67 | result1 = out.getvalue() |
|
67 | result1 = out.getvalue() | |
68 | result2 = "".join("%ic\n%ip\n%i\n" %(i, i, i) for i in range(10)) |
|
68 | result2 = "".join("%ic\n%ip\n%i\n" %(i, i, i) for i in range(10)) | |
69 | assert result1 == result2 |
|
69 | assert result1 == result2 | |
70 |
|
70 |
@@ -1,1064 +1,1066 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | ultraTB.py -- Spice up your tracebacks! |
|
3 | ultraTB.py -- Spice up your tracebacks! | |
4 |
|
4 | |||
5 | * ColorTB |
|
5 | * ColorTB | |
6 | I've always found it a bit hard to visually parse tracebacks in Python. The |
|
6 | I've always found it a bit hard to visually parse tracebacks in Python. The | |
7 | ColorTB class is a solution to that problem. It colors the different parts of a |
|
7 | ColorTB class is a solution to that problem. It colors the different parts of a | |
8 | traceback in a manner similar to what you would expect from a syntax-highlighting |
|
8 | traceback in a manner similar to what you would expect from a syntax-highlighting | |
9 | text editor. |
|
9 | text editor. | |
10 |
|
10 | |||
11 | Installation instructions for ColorTB: |
|
11 | Installation instructions for ColorTB: | |
12 | import sys,ultraTB |
|
12 | import sys,ultraTB | |
13 | sys.excepthook = ultraTB.ColorTB() |
|
13 | sys.excepthook = ultraTB.ColorTB() | |
14 |
|
14 | |||
15 | * VerboseTB |
|
15 | * VerboseTB | |
16 | I've also included a port of Ka-Ping Yee's "cgitb.py" that produces all kinds |
|
16 | I've also included a port of Ka-Ping Yee's "cgitb.py" that produces all kinds | |
17 | of useful info when a traceback occurs. Ping originally had it spit out HTML |
|
17 | of useful info when a traceback occurs. Ping originally had it spit out HTML | |
18 | and intended it for CGI programmers, but why should they have all the fun? I |
|
18 | and intended it for CGI programmers, but why should they have all the fun? I | |
19 | altered it to spit out colored text to the terminal. It's a bit overwhelming, |
|
19 | altered it to spit out colored text to the terminal. It's a bit overwhelming, | |
20 | but kind of neat, and maybe useful for long-running programs that you believe |
|
20 | but kind of neat, and maybe useful for long-running programs that you believe | |
21 | are bug-free. If a crash *does* occur in that type of program you want details. |
|
21 | are bug-free. If a crash *does* occur in that type of program you want details. | |
22 | Give it a shot--you'll love it or you'll hate it. |
|
22 | Give it a shot--you'll love it or you'll hate it. | |
23 |
|
23 | |||
24 | Note: |
|
24 | Note: | |
25 |
|
25 | |||
26 | The Verbose mode prints the variables currently visible where the exception |
|
26 | The Verbose mode prints the variables currently visible where the exception | |
27 | happened (shortening their strings if too long). This can potentially be |
|
27 | happened (shortening their strings if too long). This can potentially be | |
28 | very slow, if you happen to have a huge data structure whose string |
|
28 | very slow, if you happen to have a huge data structure whose string | |
29 | representation is complex to compute. Your computer may appear to freeze for |
|
29 | representation is complex to compute. Your computer may appear to freeze for | |
30 | a while with cpu usage at 100%. If this occurs, you can cancel the traceback |
|
30 | a while with cpu usage at 100%. If this occurs, you can cancel the traceback | |
31 | with Ctrl-C (maybe hitting it more than once). |
|
31 | with Ctrl-C (maybe hitting it more than once). | |
32 |
|
32 | |||
33 | If you encounter this kind of situation often, you may want to use the |
|
33 | If you encounter this kind of situation often, you may want to use the | |
34 | Verbose_novars mode instead of the regular Verbose, which avoids formatting |
|
34 | Verbose_novars mode instead of the regular Verbose, which avoids formatting | |
35 | variables (but otherwise includes the information and context given by |
|
35 | variables (but otherwise includes the information and context given by | |
36 | Verbose). |
|
36 | Verbose). | |
37 |
|
37 | |||
38 |
|
38 | |||
39 | Installation instructions for ColorTB: |
|
39 | Installation instructions for ColorTB: | |
40 | import sys,ultraTB |
|
40 | import sys,ultraTB | |
41 | sys.excepthook = ultraTB.VerboseTB() |
|
41 | sys.excepthook = ultraTB.VerboseTB() | |
42 |
|
42 | |||
43 | Note: Much of the code in this module was lifted verbatim from the standard |
|
43 | Note: Much of the code in this module was lifted verbatim from the standard | |
44 | library module 'traceback.py' and Ka-Ping Yee's 'cgitb.py'. |
|
44 | library module 'traceback.py' and Ka-Ping Yee's 'cgitb.py'. | |
45 |
|
45 | |||
46 | * Color schemes |
|
46 | * Color schemes | |
47 | The colors are defined in the class TBTools through the use of the |
|
47 | The colors are defined in the class TBTools through the use of the | |
48 | ColorSchemeTable class. Currently the following exist: |
|
48 | ColorSchemeTable class. Currently the following exist: | |
49 |
|
49 | |||
50 | - NoColor: allows all of this module to be used in any terminal (the color |
|
50 | - NoColor: allows all of this module to be used in any terminal (the color | |
51 | escapes are just dummy blank strings). |
|
51 | escapes are just dummy blank strings). | |
52 |
|
52 | |||
53 | - Linux: is meant to look good in a terminal like the Linux console (black |
|
53 | - Linux: is meant to look good in a terminal like the Linux console (black | |
54 | or very dark background). |
|
54 | or very dark background). | |
55 |
|
55 | |||
56 | - LightBG: similar to Linux but swaps dark/light colors to be more readable |
|
56 | - LightBG: similar to Linux but swaps dark/light colors to be more readable | |
57 | in light background terminals. |
|
57 | in light background terminals. | |
58 |
|
58 | |||
59 | You can implement other color schemes easily, the syntax is fairly |
|
59 | You can implement other color schemes easily, the syntax is fairly | |
60 | self-explanatory. Please send back new schemes you develop to the author for |
|
60 | self-explanatory. Please send back new schemes you develop to the author for | |
61 | possible inclusion in future releases. |
|
61 | possible inclusion in future releases. | |
62 | """ |
|
62 | """ | |
63 |
|
63 | |||
64 | #***************************************************************************** |
|
64 | #***************************************************************************** | |
65 | # Copyright (C) 2001 Nathaniel Gray <n8gray@caltech.edu> |
|
65 | # Copyright (C) 2001 Nathaniel Gray <n8gray@caltech.edu> | |
66 | # Copyright (C) 2001-2004 Fernando Perez <fperez@colorado.edu> |
|
66 | # Copyright (C) 2001-2004 Fernando Perez <fperez@colorado.edu> | |
67 | # |
|
67 | # | |
68 | # Distributed under the terms of the BSD License. The full license is in |
|
68 | # Distributed under the terms of the BSD License. The full license is in | |
69 | # the file COPYING, distributed as part of this software. |
|
69 | # the file COPYING, distributed as part of this software. | |
70 | #***************************************************************************** |
|
70 | #***************************************************************************** | |
71 |
|
71 | |||
72 | # Required modules |
|
72 | # Required modules | |
73 | import inspect |
|
73 | import inspect | |
74 | import keyword |
|
74 | import keyword | |
75 | import linecache |
|
75 | import linecache | |
76 | import os |
|
76 | import os | |
77 | import pydoc |
|
77 | import pydoc | |
78 | import re |
|
78 | import re | |
79 | import string |
|
79 | import string | |
80 | import sys |
|
80 | import sys | |
81 | import time |
|
81 | import time | |
82 | import tokenize |
|
82 | import tokenize | |
83 | import traceback |
|
83 | import traceback | |
84 | import types |
|
84 | import types | |
85 |
|
85 | |||
86 | # For purposes of monkeypatching inspect to fix a bug in it. |
|
86 | # For purposes of monkeypatching inspect to fix a bug in it. | |
87 | from inspect import getsourcefile, getfile, getmodule,\ |
|
87 | from inspect import getsourcefile, getfile, getmodule,\ | |
88 | ismodule, isclass, ismethod, isfunction, istraceback, isframe, iscode |
|
88 | ismodule, isclass, ismethod, isfunction, istraceback, isframe, iscode | |
89 |
|
89 | |||
90 |
|
90 | |||
91 | # IPython's own modules |
|
91 | # IPython's own modules | |
92 | # Modified pdb which doesn't damage IPython's readline handling |
|
92 | # Modified pdb which doesn't damage IPython's readline handling | |
93 | from IPython import Debugger, PyColorize |
|
93 | from IPython import Debugger, PyColorize | |
94 | from IPython.ipstruct import Struct |
|
94 | from IPython.ipstruct import Struct | |
95 | from IPython.excolors import exception_colors |
|
95 | from IPython.excolors import exception_colors | |
96 | from IPython.genutils import Term,uniq_stable,error,info |
|
96 | from IPython.genutils import Term,uniq_stable,error,info | |
97 |
|
97 | |||
98 | # Globals |
|
98 | # Globals | |
99 | # amount of space to put line numbers before verbose tracebacks |
|
99 | # amount of space to put line numbers before verbose tracebacks | |
100 | INDENT_SIZE = 8 |
|
100 | INDENT_SIZE = 8 | |
101 |
|
101 | |||
102 | # Default color scheme. This is used, for example, by the traceback |
|
102 | # Default color scheme. This is used, for example, by the traceback | |
103 | # formatter. When running in an actual IPython instance, the user's rc.colors |
|
103 | # formatter. When running in an actual IPython instance, the user's rc.colors | |
104 | # value is used, but havinga module global makes this functionality available |
|
104 | # value is used, but havinga module global makes this functionality available | |
105 | # to users of ultraTB who are NOT running inside ipython. |
|
105 | # to users of ultraTB who are NOT running inside ipython. | |
106 | DEFAULT_SCHEME = 'NoColor' |
|
106 | DEFAULT_SCHEME = 'NoColor' | |
107 |
|
107 | |||
108 | #--------------------------------------------------------------------------- |
|
108 | #--------------------------------------------------------------------------- | |
109 | # Code begins |
|
109 | # Code begins | |
110 |
|
110 | |||
111 | # Utility functions |
|
111 | # Utility functions | |
112 | def inspect_error(): |
|
112 | def inspect_error(): | |
113 | """Print a message about internal inspect errors. |
|
113 | """Print a message about internal inspect errors. | |
114 |
|
114 | |||
115 | These are unfortunately quite common.""" |
|
115 | These are unfortunately quite common.""" | |
116 |
|
116 | |||
117 | error('Internal Python error in the inspect module.\n' |
|
117 | error('Internal Python error in the inspect module.\n' | |
118 | 'Below is the traceback from this internal error.\n') |
|
118 | 'Below is the traceback from this internal error.\n') | |
119 |
|
119 | |||
120 |
|
120 | |||
121 | def findsource(object): |
|
121 | def findsource(object): | |
122 | """Return the entire source file and starting line number for an object. |
|
122 | """Return the entire source file and starting line number for an object. | |
123 |
|
123 | |||
124 | The argument may be a module, class, method, function, traceback, frame, |
|
124 | The argument may be a module, class, method, function, traceback, frame, | |
125 | or code object. The source code is returned as a list of all the lines |
|
125 | or code object. The source code is returned as a list of all the lines | |
126 | in the file and the line number indexes a line in that list. An IOError |
|
126 | in the file and the line number indexes a line in that list. An IOError | |
127 | is raised if the source code cannot be retrieved. |
|
127 | is raised if the source code cannot be retrieved. | |
128 |
|
128 | |||
129 | FIXED version with which we monkeypatch the stdlib to work around a bug.""" |
|
129 | FIXED version with which we monkeypatch the stdlib to work around a bug.""" | |
130 |
|
130 | |||
131 | file = getsourcefile(object) or getfile(object) |
|
131 | file = getsourcefile(object) or getfile(object) | |
132 | # If the object is a frame, then trying to get the globals dict from its |
|
132 | # If the object is a frame, then trying to get the globals dict from its | |
133 | # module won't work. Instead, the frame object itself has the globals |
|
133 | # module won't work. Instead, the frame object itself has the globals | |
134 | # dictionary. |
|
134 | # dictionary. | |
135 | globals_dict = None |
|
135 | globals_dict = None | |
136 | if inspect.isframe(object): |
|
136 | if inspect.isframe(object): | |
137 | # XXX: can this ever be false? |
|
137 | # XXX: can this ever be false? | |
138 | globals_dict = object.f_globals |
|
138 | globals_dict = object.f_globals | |
139 | else: |
|
139 | else: | |
140 | module = getmodule(object, file) |
|
140 | module = getmodule(object, file) | |
141 | if module: |
|
141 | if module: | |
142 | globals_dict = module.__dict__ |
|
142 | globals_dict = module.__dict__ | |
143 | lines = linecache.getlines(file, globals_dict) |
|
143 | lines = linecache.getlines(file, globals_dict) | |
144 | if not lines: |
|
144 | if not lines: | |
145 | raise IOError('could not get source code') |
|
145 | raise IOError('could not get source code') | |
146 |
|
146 | |||
147 | if ismodule(object): |
|
147 | if ismodule(object): | |
148 | return lines, 0 |
|
148 | return lines, 0 | |
149 |
|
149 | |||
150 | if isclass(object): |
|
150 | if isclass(object): | |
151 | name = object.__name__ |
|
151 | name = object.__name__ | |
152 | pat = re.compile(r'^(\s*)class\s*' + name + r'\b') |
|
152 | pat = re.compile(r'^(\s*)class\s*' + name + r'\b') | |
153 | # make some effort to find the best matching class definition: |
|
153 | # make some effort to find the best matching class definition: | |
154 | # use the one with the least indentation, which is the one |
|
154 | # use the one with the least indentation, which is the one | |
155 | # that's most probably not inside a function definition. |
|
155 | # that's most probably not inside a function definition. | |
156 | candidates = [] |
|
156 | candidates = [] | |
157 | for i in range(len(lines)): |
|
157 | for i in range(len(lines)): | |
158 | match = pat.match(lines[i]) |
|
158 | match = pat.match(lines[i]) | |
159 | if match: |
|
159 | if match: | |
160 | # if it's at toplevel, it's already the best one |
|
160 | # if it's at toplevel, it's already the best one | |
161 | if lines[i][0] == 'c': |
|
161 | if lines[i][0] == 'c': | |
162 | return lines, i |
|
162 | return lines, i | |
163 | # else add whitespace to candidate list |
|
163 | # else add whitespace to candidate list | |
164 | candidates.append((match.group(1), i)) |
|
164 | candidates.append((match.group(1), i)) | |
165 | if candidates: |
|
165 | if candidates: | |
166 | # this will sort by whitespace, and by line number, |
|
166 | # this will sort by whitespace, and by line number, | |
167 | # less whitespace first |
|
167 | # less whitespace first | |
168 | candidates.sort() |
|
168 | candidates.sort() | |
169 | return lines, candidates[0][1] |
|
169 | return lines, candidates[0][1] | |
170 | else: |
|
170 | else: | |
171 | raise IOError('could not find class definition') |
|
171 | raise IOError('could not find class definition') | |
172 |
|
172 | |||
173 | if ismethod(object): |
|
173 | if ismethod(object): | |
174 | object = object.im_func |
|
174 | object = object.im_func | |
175 | if isfunction(object): |
|
175 | if isfunction(object): | |
176 | object = object.func_code |
|
176 | object = object.func_code | |
177 | if istraceback(object): |
|
177 | if istraceback(object): | |
178 | object = object.tb_frame |
|
178 | object = object.tb_frame | |
179 | if isframe(object): |
|
179 | if isframe(object): | |
180 | object = object.f_code |
|
180 | object = object.f_code | |
181 | if iscode(object): |
|
181 | if iscode(object): | |
182 | if not hasattr(object, 'co_firstlineno'): |
|
182 | if not hasattr(object, 'co_firstlineno'): | |
183 | raise IOError('could not find function definition') |
|
183 | raise IOError('could not find function definition') | |
184 | pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)') |
|
184 | pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)') | |
185 | pmatch = pat.match |
|
185 | pmatch = pat.match | |
186 | # fperez - fix: sometimes, co_firstlineno can give a number larger than |
|
186 | # fperez - fix: sometimes, co_firstlineno can give a number larger than | |
187 | # the length of lines, which causes an error. Safeguard against that. |
|
187 | # the length of lines, which causes an error. Safeguard against that. | |
188 | lnum = min(object.co_firstlineno,len(lines))-1 |
|
188 | lnum = min(object.co_firstlineno,len(lines))-1 | |
189 | while lnum > 0: |
|
189 | while lnum > 0: | |
190 | if pmatch(lines[lnum]): break |
|
190 | if pmatch(lines[lnum]): break | |
191 | lnum -= 1 |
|
191 | lnum -= 1 | |
192 |
|
192 | |||
193 | return lines, lnum |
|
193 | return lines, lnum | |
194 | raise IOError('could not find code object') |
|
194 | raise IOError('could not find code object') | |
195 |
|
195 | |||
196 | # Monkeypatch inspect to apply our bugfix. This code only works with py25 |
|
196 | # Monkeypatch inspect to apply our bugfix. This code only works with py25 | |
197 | if sys.version_info[:2] >= (2,5): |
|
197 | if sys.version_info[:2] >= (2,5): | |
198 | inspect.findsource = findsource |
|
198 | inspect.findsource = findsource | |
199 |
|
199 | |||
200 | def fix_frame_records_filenames(records): |
|
200 | def fix_frame_records_filenames(records): | |
201 | """Try to fix the filenames in each record from inspect.getinnerframes(). |
|
201 | """Try to fix the filenames in each record from inspect.getinnerframes(). | |
202 |
|
202 | |||
203 | Particularly, modules loaded from within zip files have useless filenames |
|
203 | Particularly, modules loaded from within zip files have useless filenames | |
204 | attached to their code object, and inspect.getinnerframes() just uses it. |
|
204 | attached to their code object, and inspect.getinnerframes() just uses it. | |
205 | """ |
|
205 | """ | |
206 | fixed_records = [] |
|
206 | fixed_records = [] | |
207 | for frame, filename, line_no, func_name, lines, index in records: |
|
207 | for frame, filename, line_no, func_name, lines, index in records: | |
208 | # Look inside the frame's globals dictionary for __file__, which should |
|
208 | # Look inside the frame's globals dictionary for __file__, which should | |
209 | # be better. |
|
209 | # be better. | |
210 | better_fn = frame.f_globals.get('__file__', None) |
|
210 | better_fn = frame.f_globals.get('__file__', None) | |
211 | if isinstance(better_fn, str): |
|
211 | if isinstance(better_fn, str): | |
212 | # Check the type just in case someone did something weird with |
|
212 | # Check the type just in case someone did something weird with | |
213 | # __file__. It might also be None if the error occurred during |
|
213 | # __file__. It might also be None if the error occurred during | |
214 | # import. |
|
214 | # import. | |
215 | filename = better_fn |
|
215 | filename = better_fn | |
216 | fixed_records.append((frame, filename, line_no, func_name, lines, index)) |
|
216 | fixed_records.append((frame, filename, line_no, func_name, lines, index)) | |
217 | return fixed_records |
|
217 | return fixed_records | |
218 |
|
218 | |||
219 |
|
219 | |||
220 | def _fixed_getinnerframes(etb, context=1,tb_offset=0): |
|
220 | def _fixed_getinnerframes(etb, context=1,tb_offset=0): | |
221 | import linecache |
|
221 | import linecache | |
222 | LNUM_POS, LINES_POS, INDEX_POS = 2, 4, 5 |
|
222 | LNUM_POS, LINES_POS, INDEX_POS = 2, 4, 5 | |
223 |
|
223 | |||
224 | records = fix_frame_records_filenames(inspect.getinnerframes(etb, context)) |
|
224 | records = fix_frame_records_filenames(inspect.getinnerframes(etb, context)) | |
225 |
|
225 | |||
226 | # If the error is at the console, don't build any context, since it would |
|
226 | # If the error is at the console, don't build any context, since it would | |
227 | # otherwise produce 5 blank lines printed out (there is no file at the |
|
227 | # otherwise produce 5 blank lines printed out (there is no file at the | |
228 | # console) |
|
228 | # console) | |
229 | rec_check = records[tb_offset:] |
|
229 | rec_check = records[tb_offset:] | |
230 | try: |
|
230 | try: | |
231 | rname = rec_check[0][1] |
|
231 | rname = rec_check[0][1] | |
232 | if rname == '<ipython console>' or rname.endswith('<string>'): |
|
232 | if rname == '<ipython console>' or rname.endswith('<string>'): | |
233 | return rec_check |
|
233 | return rec_check | |
234 | except IndexError: |
|
234 | except IndexError: | |
235 | pass |
|
235 | pass | |
236 |
|
236 | |||
237 | aux = traceback.extract_tb(etb) |
|
237 | aux = traceback.extract_tb(etb) | |
238 | assert len(records) == len(aux) |
|
238 | assert len(records) == len(aux) | |
239 | for i, (file, lnum, _, _) in zip(range(len(records)), aux): |
|
239 | for i, (file, lnum, _, _) in zip(range(len(records)), aux): | |
240 | maybeStart = lnum-1 - context//2 |
|
240 | maybeStart = lnum-1 - context//2 | |
241 | start = max(maybeStart, 0) |
|
241 | start = max(maybeStart, 0) | |
242 | end = start + context |
|
242 | end = start + context | |
243 | lines = linecache.getlines(file)[start:end] |
|
243 | lines = linecache.getlines(file)[start:end] | |
244 | # pad with empty lines if necessary |
|
244 | # pad with empty lines if necessary | |
245 | if maybeStart < 0: |
|
245 | if maybeStart < 0: | |
246 | lines = (['\n'] * -maybeStart) + lines |
|
246 | lines = (['\n'] * -maybeStart) + lines | |
247 | if len(lines) < context: |
|
247 | if len(lines) < context: | |
248 | lines += ['\n'] * (context - len(lines)) |
|
248 | lines += ['\n'] * (context - len(lines)) | |
249 | buf = list(records[i]) |
|
249 | buf = list(records[i]) | |
250 | buf[LNUM_POS] = lnum |
|
250 | buf[LNUM_POS] = lnum | |
251 | buf[INDEX_POS] = lnum - 1 - start |
|
251 | buf[INDEX_POS] = lnum - 1 - start | |
252 | buf[LINES_POS] = lines |
|
252 | buf[LINES_POS] = lines | |
253 | records[i] = tuple(buf) |
|
253 | records[i] = tuple(buf) | |
254 | return records[tb_offset:] |
|
254 | return records[tb_offset:] | |
255 |
|
255 | |||
256 | # Helper function -- largely belongs to VerboseTB, but we need the same |
|
256 | # Helper function -- largely belongs to VerboseTB, but we need the same | |
257 | # functionality to produce a pseudo verbose TB for SyntaxErrors, so that they |
|
257 | # functionality to produce a pseudo verbose TB for SyntaxErrors, so that they | |
258 | # can be recognized properly by ipython.el's py-traceback-line-re |
|
258 | # can be recognized properly by ipython.el's py-traceback-line-re | |
259 | # (SyntaxErrors have to be treated specially because they have no traceback) |
|
259 | # (SyntaxErrors have to be treated specially because they have no traceback) | |
260 |
|
260 | |||
261 | _parser = PyColorize.Parser() |
|
261 | _parser = PyColorize.Parser() | |
262 |
|
262 | |||
263 | def _formatTracebackLines(lnum, index, lines, Colors, lvals=None,scheme=None): |
|
263 | def _formatTracebackLines(lnum, index, lines, Colors, lvals=None,scheme=None): | |
264 | numbers_width = INDENT_SIZE - 1 |
|
264 | numbers_width = INDENT_SIZE - 1 | |
265 | res = [] |
|
265 | res = [] | |
266 | i = lnum - index |
|
266 | i = lnum - index | |
267 |
|
267 | |||
268 | # This lets us get fully syntax-highlighted tracebacks. |
|
268 | # This lets us get fully syntax-highlighted tracebacks. | |
269 | if scheme is None: |
|
269 | if scheme is None: | |
270 | try: |
|
270 | try: | |
|
271 | # Again, reference to a global __IPYTHON__ that doesn't exist. | |||
|
272 | # XXX | |||
271 | scheme = __IPYTHON__.rc.colors |
|
273 | scheme = __IPYTHON__.rc.colors | |
272 | except: |
|
274 | except: | |
273 | scheme = DEFAULT_SCHEME |
|
275 | scheme = DEFAULT_SCHEME | |
274 | _line_format = _parser.format2 |
|
276 | _line_format = _parser.format2 | |
275 |
|
277 | |||
276 | for line in lines: |
|
278 | for line in lines: | |
277 | new_line, err = _line_format(line,'str',scheme) |
|
279 | new_line, err = _line_format(line,'str',scheme) | |
278 | if not err: line = new_line |
|
280 | if not err: line = new_line | |
279 |
|
281 | |||
280 | if i == lnum: |
|
282 | if i == lnum: | |
281 | # This is the line with the error |
|
283 | # This is the line with the error | |
282 | pad = numbers_width - len(str(i)) |
|
284 | pad = numbers_width - len(str(i)) | |
283 | if pad >= 3: |
|
285 | if pad >= 3: | |
284 | marker = '-'*(pad-3) + '-> ' |
|
286 | marker = '-'*(pad-3) + '-> ' | |
285 | elif pad == 2: |
|
287 | elif pad == 2: | |
286 | marker = '> ' |
|
288 | marker = '> ' | |
287 | elif pad == 1: |
|
289 | elif pad == 1: | |
288 | marker = '>' |
|
290 | marker = '>' | |
289 | else: |
|
291 | else: | |
290 | marker = '' |
|
292 | marker = '' | |
291 | num = marker + str(i) |
|
293 | num = marker + str(i) | |
292 | line = '%s%s%s %s%s' %(Colors.linenoEm, num, |
|
294 | line = '%s%s%s %s%s' %(Colors.linenoEm, num, | |
293 | Colors.line, line, Colors.Normal) |
|
295 | Colors.line, line, Colors.Normal) | |
294 | else: |
|
296 | else: | |
295 | num = '%*s' % (numbers_width,i) |
|
297 | num = '%*s' % (numbers_width,i) | |
296 | line = '%s%s%s %s' %(Colors.lineno, num, |
|
298 | line = '%s%s%s %s' %(Colors.lineno, num, | |
297 | Colors.Normal, line) |
|
299 | Colors.Normal, line) | |
298 |
|
300 | |||
299 | res.append(line) |
|
301 | res.append(line) | |
300 | if lvals and i == lnum: |
|
302 | if lvals and i == lnum: | |
301 | res.append(lvals + '\n') |
|
303 | res.append(lvals + '\n') | |
302 | i = i + 1 |
|
304 | i = i + 1 | |
303 | return res |
|
305 | return res | |
304 |
|
306 | |||
305 |
|
307 | |||
306 | #--------------------------------------------------------------------------- |
|
308 | #--------------------------------------------------------------------------- | |
307 | # Module classes |
|
309 | # Module classes | |
308 | class TBTools: |
|
310 | class TBTools: | |
309 | """Basic tools used by all traceback printer classes.""" |
|
311 | """Basic tools used by all traceback printer classes.""" | |
310 |
|
312 | |||
311 | def __init__(self,color_scheme = 'NoColor',call_pdb=False): |
|
313 | def __init__(self,color_scheme = 'NoColor',call_pdb=False): | |
312 | # Whether to call the interactive pdb debugger after printing |
|
314 | # Whether to call the interactive pdb debugger after printing | |
313 | # tracebacks or not |
|
315 | # tracebacks or not | |
314 | self.call_pdb = call_pdb |
|
316 | self.call_pdb = call_pdb | |
315 |
|
317 | |||
316 | # Create color table |
|
318 | # Create color table | |
317 | self.color_scheme_table = exception_colors() |
|
319 | self.color_scheme_table = exception_colors() | |
318 |
|
320 | |||
319 | self.set_colors(color_scheme) |
|
321 | self.set_colors(color_scheme) | |
320 | self.old_scheme = color_scheme # save initial value for toggles |
|
322 | self.old_scheme = color_scheme # save initial value for toggles | |
321 |
|
323 | |||
322 | if call_pdb: |
|
324 | if call_pdb: | |
323 | self.pdb = Debugger.Pdb(self.color_scheme_table.active_scheme_name) |
|
325 | self.pdb = Debugger.Pdb(self.color_scheme_table.active_scheme_name) | |
324 | else: |
|
326 | else: | |
325 | self.pdb = None |
|
327 | self.pdb = None | |
326 |
|
328 | |||
327 | def set_colors(self,*args,**kw): |
|
329 | def set_colors(self,*args,**kw): | |
328 | """Shorthand access to the color table scheme selector method.""" |
|
330 | """Shorthand access to the color table scheme selector method.""" | |
329 |
|
331 | |||
330 | # Set own color table |
|
332 | # Set own color table | |
331 | self.color_scheme_table.set_active_scheme(*args,**kw) |
|
333 | self.color_scheme_table.set_active_scheme(*args,**kw) | |
332 | # for convenience, set Colors to the active scheme |
|
334 | # for convenience, set Colors to the active scheme | |
333 | self.Colors = self.color_scheme_table.active_colors |
|
335 | self.Colors = self.color_scheme_table.active_colors | |
334 | # Also set colors of debugger |
|
336 | # Also set colors of debugger | |
335 | if hasattr(self,'pdb') and self.pdb is not None: |
|
337 | if hasattr(self,'pdb') and self.pdb is not None: | |
336 | self.pdb.set_colors(*args,**kw) |
|
338 | self.pdb.set_colors(*args,**kw) | |
337 |
|
339 | |||
338 | def color_toggle(self): |
|
340 | def color_toggle(self): | |
339 | """Toggle between the currently active color scheme and NoColor.""" |
|
341 | """Toggle between the currently active color scheme and NoColor.""" | |
340 |
|
342 | |||
341 | if self.color_scheme_table.active_scheme_name == 'NoColor': |
|
343 | if self.color_scheme_table.active_scheme_name == 'NoColor': | |
342 | self.color_scheme_table.set_active_scheme(self.old_scheme) |
|
344 | self.color_scheme_table.set_active_scheme(self.old_scheme) | |
343 | self.Colors = self.color_scheme_table.active_colors |
|
345 | self.Colors = self.color_scheme_table.active_colors | |
344 | else: |
|
346 | else: | |
345 | self.old_scheme = self.color_scheme_table.active_scheme_name |
|
347 | self.old_scheme = self.color_scheme_table.active_scheme_name | |
346 | self.color_scheme_table.set_active_scheme('NoColor') |
|
348 | self.color_scheme_table.set_active_scheme('NoColor') | |
347 | self.Colors = self.color_scheme_table.active_colors |
|
349 | self.Colors = self.color_scheme_table.active_colors | |
348 |
|
350 | |||
349 | #--------------------------------------------------------------------------- |
|
351 | #--------------------------------------------------------------------------- | |
350 | class ListTB(TBTools): |
|
352 | class ListTB(TBTools): | |
351 | """Print traceback information from a traceback list, with optional color. |
|
353 | """Print traceback information from a traceback list, with optional color. | |
352 |
|
354 | |||
353 | Calling: requires 3 arguments: |
|
355 | Calling: requires 3 arguments: | |
354 | (etype, evalue, elist) |
|
356 | (etype, evalue, elist) | |
355 | as would be obtained by: |
|
357 | as would be obtained by: | |
356 | etype, evalue, tb = sys.exc_info() |
|
358 | etype, evalue, tb = sys.exc_info() | |
357 | if tb: |
|
359 | if tb: | |
358 | elist = traceback.extract_tb(tb) |
|
360 | elist = traceback.extract_tb(tb) | |
359 | else: |
|
361 | else: | |
360 | elist = None |
|
362 | elist = None | |
361 |
|
363 | |||
362 | It can thus be used by programs which need to process the traceback before |
|
364 | It can thus be used by programs which need to process the traceback before | |
363 | printing (such as console replacements based on the code module from the |
|
365 | printing (such as console replacements based on the code module from the | |
364 | standard library). |
|
366 | standard library). | |
365 |
|
367 | |||
366 | Because they are meant to be called without a full traceback (only a |
|
368 | Because they are meant to be called without a full traceback (only a | |
367 | list), instances of this class can't call the interactive pdb debugger.""" |
|
369 | list), instances of this class can't call the interactive pdb debugger.""" | |
368 |
|
370 | |||
369 | def __init__(self,color_scheme = 'NoColor'): |
|
371 | def __init__(self,color_scheme = 'NoColor'): | |
370 | TBTools.__init__(self,color_scheme = color_scheme,call_pdb=0) |
|
372 | TBTools.__init__(self,color_scheme = color_scheme,call_pdb=0) | |
371 |
|
373 | |||
372 | def __call__(self, etype, value, elist): |
|
374 | def __call__(self, etype, value, elist): | |
373 | Term.cout.flush() |
|
375 | Term.cout.flush() | |
374 | print >> Term.cerr, self.text(etype,value,elist) |
|
376 | print >> Term.cerr, self.text(etype,value,elist) | |
375 | Term.cerr.flush() |
|
377 | Term.cerr.flush() | |
376 |
|
378 | |||
377 | def text(self,etype, value, elist,context=5): |
|
379 | def text(self,etype, value, elist,context=5): | |
378 | """Return a color formatted string with the traceback info.""" |
|
380 | """Return a color formatted string with the traceback info.""" | |
379 |
|
381 | |||
380 | Colors = self.Colors |
|
382 | Colors = self.Colors | |
381 | out_string = ['%s%s%s\n' % (Colors.topline,'-'*60,Colors.Normal)] |
|
383 | out_string = ['%s%s%s\n' % (Colors.topline,'-'*60,Colors.Normal)] | |
382 | if elist: |
|
384 | if elist: | |
383 | out_string.append('Traceback %s(most recent call last)%s:' % \ |
|
385 | out_string.append('Traceback %s(most recent call last)%s:' % \ | |
384 | (Colors.normalEm, Colors.Normal) + '\n') |
|
386 | (Colors.normalEm, Colors.Normal) + '\n') | |
385 | out_string.extend(self._format_list(elist)) |
|
387 | out_string.extend(self._format_list(elist)) | |
386 | lines = self._format_exception_only(etype, value) |
|
388 | lines = self._format_exception_only(etype, value) | |
387 | for line in lines[:-1]: |
|
389 | for line in lines[:-1]: | |
388 | out_string.append(" "+line) |
|
390 | out_string.append(" "+line) | |
389 | out_string.append(lines[-1]) |
|
391 | out_string.append(lines[-1]) | |
390 | return ''.join(out_string) |
|
392 | return ''.join(out_string) | |
391 |
|
393 | |||
392 | def _format_list(self, extracted_list): |
|
394 | def _format_list(self, extracted_list): | |
393 | """Format a list of traceback entry tuples for printing. |
|
395 | """Format a list of traceback entry tuples for printing. | |
394 |
|
396 | |||
395 | Given a list of tuples as returned by extract_tb() or |
|
397 | Given a list of tuples as returned by extract_tb() or | |
396 | extract_stack(), return a list of strings ready for printing. |
|
398 | extract_stack(), return a list of strings ready for printing. | |
397 | Each string in the resulting list corresponds to the item with the |
|
399 | Each string in the resulting list corresponds to the item with the | |
398 | same index in the argument list. Each string ends in a newline; |
|
400 | same index in the argument list. Each string ends in a newline; | |
399 | the strings may contain internal newlines as well, for those items |
|
401 | the strings may contain internal newlines as well, for those items | |
400 | whose source text line is not None. |
|
402 | whose source text line is not None. | |
401 |
|
403 | |||
402 | Lifted almost verbatim from traceback.py |
|
404 | Lifted almost verbatim from traceback.py | |
403 | """ |
|
405 | """ | |
404 |
|
406 | |||
405 | Colors = self.Colors |
|
407 | Colors = self.Colors | |
406 | list = [] |
|
408 | list = [] | |
407 | for filename, lineno, name, line in extracted_list[:-1]: |
|
409 | for filename, lineno, name, line in extracted_list[:-1]: | |
408 | item = ' File %s"%s"%s, line %s%d%s, in %s%s%s\n' % \ |
|
410 | item = ' File %s"%s"%s, line %s%d%s, in %s%s%s\n' % \ | |
409 | (Colors.filename, filename, Colors.Normal, |
|
411 | (Colors.filename, filename, Colors.Normal, | |
410 | Colors.lineno, lineno, Colors.Normal, |
|
412 | Colors.lineno, lineno, Colors.Normal, | |
411 | Colors.name, name, Colors.Normal) |
|
413 | Colors.name, name, Colors.Normal) | |
412 | if line: |
|
414 | if line: | |
413 | item = item + ' %s\n' % line.strip() |
|
415 | item = item + ' %s\n' % line.strip() | |
414 | list.append(item) |
|
416 | list.append(item) | |
415 | # Emphasize the last entry |
|
417 | # Emphasize the last entry | |
416 | filename, lineno, name, line = extracted_list[-1] |
|
418 | filename, lineno, name, line = extracted_list[-1] | |
417 | item = '%s File %s"%s"%s, line %s%d%s, in %s%s%s%s\n' % \ |
|
419 | item = '%s File %s"%s"%s, line %s%d%s, in %s%s%s%s\n' % \ | |
418 | (Colors.normalEm, |
|
420 | (Colors.normalEm, | |
419 | Colors.filenameEm, filename, Colors.normalEm, |
|
421 | Colors.filenameEm, filename, Colors.normalEm, | |
420 | Colors.linenoEm, lineno, Colors.normalEm, |
|
422 | Colors.linenoEm, lineno, Colors.normalEm, | |
421 | Colors.nameEm, name, Colors.normalEm, |
|
423 | Colors.nameEm, name, Colors.normalEm, | |
422 | Colors.Normal) |
|
424 | Colors.Normal) | |
423 | if line: |
|
425 | if line: | |
424 | item = item + '%s %s%s\n' % (Colors.line, line.strip(), |
|
426 | item = item + '%s %s%s\n' % (Colors.line, line.strip(), | |
425 | Colors.Normal) |
|
427 | Colors.Normal) | |
426 | list.append(item) |
|
428 | list.append(item) | |
427 | return list |
|
429 | return list | |
428 |
|
430 | |||
429 | def _format_exception_only(self, etype, value): |
|
431 | def _format_exception_only(self, etype, value): | |
430 | """Format the exception part of a traceback. |
|
432 | """Format the exception part of a traceback. | |
431 |
|
433 | |||
432 | The arguments are the exception type and value such as given by |
|
434 | The arguments are the exception type and value such as given by | |
433 | sys.exc_info()[:2]. The return value is a list of strings, each ending |
|
435 | sys.exc_info()[:2]. The return value is a list of strings, each ending | |
434 | in a newline. Normally, the list contains a single string; however, |
|
436 | in a newline. Normally, the list contains a single string; however, | |
435 | for SyntaxError exceptions, it contains several lines that (when |
|
437 | for SyntaxError exceptions, it contains several lines that (when | |
436 | printed) display detailed information about where the syntax error |
|
438 | printed) display detailed information about where the syntax error | |
437 | occurred. The message indicating which exception occurred is the |
|
439 | occurred. The message indicating which exception occurred is the | |
438 | always last string in the list. |
|
440 | always last string in the list. | |
439 |
|
441 | |||
440 | Also lifted nearly verbatim from traceback.py |
|
442 | Also lifted nearly verbatim from traceback.py | |
441 | """ |
|
443 | """ | |
442 |
|
444 | |||
443 | have_filedata = False |
|
445 | have_filedata = False | |
444 | Colors = self.Colors |
|
446 | Colors = self.Colors | |
445 | list = [] |
|
447 | list = [] | |
446 | try: |
|
448 | try: | |
447 | stype = Colors.excName + etype.__name__ + Colors.Normal |
|
449 | stype = Colors.excName + etype.__name__ + Colors.Normal | |
448 | except AttributeError: |
|
450 | except AttributeError: | |
449 | stype = etype # String exceptions don't get special coloring |
|
451 | stype = etype # String exceptions don't get special coloring | |
450 | if value is None: |
|
452 | if value is None: | |
451 | list.append( str(stype) + '\n') |
|
453 | list.append( str(stype) + '\n') | |
452 | else: |
|
454 | else: | |
453 | if etype is SyntaxError: |
|
455 | if etype is SyntaxError: | |
454 | try: |
|
456 | try: | |
455 | msg, (filename, lineno, offset, line) = value |
|
457 | msg, (filename, lineno, offset, line) = value | |
456 | except: |
|
458 | except: | |
457 | have_filedata = False |
|
459 | have_filedata = False | |
458 | else: |
|
460 | else: | |
459 | have_filedata = True |
|
461 | have_filedata = True | |
460 | #print 'filename is',filename # dbg |
|
462 | #print 'filename is',filename # dbg | |
461 | if not filename: filename = "<string>" |
|
463 | if not filename: filename = "<string>" | |
462 | list.append('%s File %s"%s"%s, line %s%d%s\n' % \ |
|
464 | list.append('%s File %s"%s"%s, line %s%d%s\n' % \ | |
463 | (Colors.normalEm, |
|
465 | (Colors.normalEm, | |
464 | Colors.filenameEm, filename, Colors.normalEm, |
|
466 | Colors.filenameEm, filename, Colors.normalEm, | |
465 | Colors.linenoEm, lineno, Colors.Normal )) |
|
467 | Colors.linenoEm, lineno, Colors.Normal )) | |
466 | if line is not None: |
|
468 | if line is not None: | |
467 | i = 0 |
|
469 | i = 0 | |
468 | while i < len(line) and line[i].isspace(): |
|
470 | while i < len(line) and line[i].isspace(): | |
469 | i = i+1 |
|
471 | i = i+1 | |
470 | list.append('%s %s%s\n' % (Colors.line, |
|
472 | list.append('%s %s%s\n' % (Colors.line, | |
471 | line.strip(), |
|
473 | line.strip(), | |
472 | Colors.Normal)) |
|
474 | Colors.Normal)) | |
473 | if offset is not None: |
|
475 | if offset is not None: | |
474 | s = ' ' |
|
476 | s = ' ' | |
475 | for c in line[i:offset-1]: |
|
477 | for c in line[i:offset-1]: | |
476 | if c.isspace(): |
|
478 | if c.isspace(): | |
477 | s = s + c |
|
479 | s = s + c | |
478 | else: |
|
480 | else: | |
479 | s = s + ' ' |
|
481 | s = s + ' ' | |
480 | list.append('%s%s^%s\n' % (Colors.caret, s, |
|
482 | list.append('%s%s^%s\n' % (Colors.caret, s, | |
481 | Colors.Normal) ) |
|
483 | Colors.Normal) ) | |
482 | value = msg |
|
484 | value = msg | |
483 | s = self._some_str(value) |
|
485 | s = self._some_str(value) | |
484 | if s: |
|
486 | if s: | |
485 | list.append('%s%s:%s %s\n' % (str(stype), Colors.excName, |
|
487 | list.append('%s%s:%s %s\n' % (str(stype), Colors.excName, | |
486 | Colors.Normal, s)) |
|
488 | Colors.Normal, s)) | |
487 | else: |
|
489 | else: | |
488 | list.append('%s\n' % str(stype)) |
|
490 | list.append('%s\n' % str(stype)) | |
489 |
|
491 | |||
490 | # This is being commented out for now as the __IPYTHON__ variable |
|
492 | # This is being commented out for now as the __IPYTHON__ variable | |
491 | # referenced here is not resolved and causes massive test failures |
|
493 | # referenced here is not resolved and causes massive test failures | |
492 | # and errors. B. Granger, 04/2009. |
|
494 | # and errors. B. Granger, 04/2009. XXX | |
493 | # See https://bugs.launchpad.net/bugs/362137 |
|
495 | # See https://bugs.launchpad.net/bugs/362137 | |
494 | # # vds:>> |
|
496 | # # vds:>> | |
495 | # if have_filedata: |
|
497 | # if have_filedata: | |
496 | # __IPYTHON__.hooks.synchronize_with_editor(filename, lineno, 0) |
|
498 | # __IPYTHON__.hooks.synchronize_with_editor(filename, lineno, 0) | |
497 | # # vds:<< |
|
499 | # # vds:<< | |
498 |
|
500 | |||
499 | return list |
|
501 | return list | |
500 |
|
502 | |||
501 | def _some_str(self, value): |
|
503 | def _some_str(self, value): | |
502 | # Lifted from traceback.py |
|
504 | # Lifted from traceback.py | |
503 | try: |
|
505 | try: | |
504 | return str(value) |
|
506 | return str(value) | |
505 | except: |
|
507 | except: | |
506 | return '<unprintable %s object>' % type(value).__name__ |
|
508 | return '<unprintable %s object>' % type(value).__name__ | |
507 |
|
509 | |||
508 | #---------------------------------------------------------------------------- |
|
510 | #---------------------------------------------------------------------------- | |
509 | class VerboseTB(TBTools): |
|
511 | class VerboseTB(TBTools): | |
510 | """A port of Ka-Ping Yee's cgitb.py module that outputs color text instead |
|
512 | """A port of Ka-Ping Yee's cgitb.py module that outputs color text instead | |
511 | of HTML. Requires inspect and pydoc. Crazy, man. |
|
513 | of HTML. Requires inspect and pydoc. Crazy, man. | |
512 |
|
514 | |||
513 | Modified version which optionally strips the topmost entries from the |
|
515 | Modified version which optionally strips the topmost entries from the | |
514 | traceback, to be used with alternate interpreters (because their own code |
|
516 | traceback, to be used with alternate interpreters (because their own code | |
515 | would appear in the traceback).""" |
|
517 | would appear in the traceback).""" | |
516 |
|
518 | |||
517 | def __init__(self,color_scheme = 'Linux',tb_offset=0,long_header=0, |
|
519 | def __init__(self,color_scheme = 'Linux',tb_offset=0,long_header=0, | |
518 | call_pdb = 0, include_vars=1): |
|
520 | call_pdb = 0, include_vars=1): | |
519 | """Specify traceback offset, headers and color scheme. |
|
521 | """Specify traceback offset, headers and color scheme. | |
520 |
|
522 | |||
521 | Define how many frames to drop from the tracebacks. Calling it with |
|
523 | Define how many frames to drop from the tracebacks. Calling it with | |
522 | tb_offset=1 allows use of this handler in interpreters which will have |
|
524 | tb_offset=1 allows use of this handler in interpreters which will have | |
523 | their own code at the top of the traceback (VerboseTB will first |
|
525 | their own code at the top of the traceback (VerboseTB will first | |
524 | remove that frame before printing the traceback info).""" |
|
526 | remove that frame before printing the traceback info).""" | |
525 | TBTools.__init__(self,color_scheme=color_scheme,call_pdb=call_pdb) |
|
527 | TBTools.__init__(self,color_scheme=color_scheme,call_pdb=call_pdb) | |
526 | self.tb_offset = tb_offset |
|
528 | self.tb_offset = tb_offset | |
527 | self.long_header = long_header |
|
529 | self.long_header = long_header | |
528 | self.include_vars = include_vars |
|
530 | self.include_vars = include_vars | |
529 |
|
531 | |||
530 | def text(self, etype, evalue, etb, context=5): |
|
532 | def text(self, etype, evalue, etb, context=5): | |
531 | """Return a nice text document describing the traceback.""" |
|
533 | """Return a nice text document describing the traceback.""" | |
532 |
|
534 | |||
533 | # some locals |
|
535 | # some locals | |
534 | try: |
|
536 | try: | |
535 | etype = etype.__name__ |
|
537 | etype = etype.__name__ | |
536 | except AttributeError: |
|
538 | except AttributeError: | |
537 | pass |
|
539 | pass | |
538 | Colors = self.Colors # just a shorthand + quicker name lookup |
|
540 | Colors = self.Colors # just a shorthand + quicker name lookup | |
539 | ColorsNormal = Colors.Normal # used a lot |
|
541 | ColorsNormal = Colors.Normal # used a lot | |
540 | col_scheme = self.color_scheme_table.active_scheme_name |
|
542 | col_scheme = self.color_scheme_table.active_scheme_name | |
541 | indent = ' '*INDENT_SIZE |
|
543 | indent = ' '*INDENT_SIZE | |
542 | em_normal = '%s\n%s%s' % (Colors.valEm, indent,ColorsNormal) |
|
544 | em_normal = '%s\n%s%s' % (Colors.valEm, indent,ColorsNormal) | |
543 | undefined = '%sundefined%s' % (Colors.em, ColorsNormal) |
|
545 | undefined = '%sundefined%s' % (Colors.em, ColorsNormal) | |
544 | exc = '%s%s%s' % (Colors.excName,etype,ColorsNormal) |
|
546 | exc = '%s%s%s' % (Colors.excName,etype,ColorsNormal) | |
545 |
|
547 | |||
546 | # some internal-use functions |
|
548 | # some internal-use functions | |
547 | def text_repr(value): |
|
549 | def text_repr(value): | |
548 | """Hopefully pretty robust repr equivalent.""" |
|
550 | """Hopefully pretty robust repr equivalent.""" | |
549 | # this is pretty horrible but should always return *something* |
|
551 | # this is pretty horrible but should always return *something* | |
550 | try: |
|
552 | try: | |
551 | return pydoc.text.repr(value) |
|
553 | return pydoc.text.repr(value) | |
552 | except KeyboardInterrupt: |
|
554 | except KeyboardInterrupt: | |
553 | raise |
|
555 | raise | |
554 | except: |
|
556 | except: | |
555 | try: |
|
557 | try: | |
556 | return repr(value) |
|
558 | return repr(value) | |
557 | except KeyboardInterrupt: |
|
559 | except KeyboardInterrupt: | |
558 | raise |
|
560 | raise | |
559 | except: |
|
561 | except: | |
560 | try: |
|
562 | try: | |
561 | # all still in an except block so we catch |
|
563 | # all still in an except block so we catch | |
562 | # getattr raising |
|
564 | # getattr raising | |
563 | name = getattr(value, '__name__', None) |
|
565 | name = getattr(value, '__name__', None) | |
564 | if name: |
|
566 | if name: | |
565 | # ick, recursion |
|
567 | # ick, recursion | |
566 | return text_repr(name) |
|
568 | return text_repr(name) | |
567 | klass = getattr(value, '__class__', None) |
|
569 | klass = getattr(value, '__class__', None) | |
568 | if klass: |
|
570 | if klass: | |
569 | return '%s instance' % text_repr(klass) |
|
571 | return '%s instance' % text_repr(klass) | |
570 | except KeyboardInterrupt: |
|
572 | except KeyboardInterrupt: | |
571 | raise |
|
573 | raise | |
572 | except: |
|
574 | except: | |
573 | return 'UNRECOVERABLE REPR FAILURE' |
|
575 | return 'UNRECOVERABLE REPR FAILURE' | |
574 | def eqrepr(value, repr=text_repr): return '=%s' % repr(value) |
|
576 | def eqrepr(value, repr=text_repr): return '=%s' % repr(value) | |
575 | def nullrepr(value, repr=text_repr): return '' |
|
577 | def nullrepr(value, repr=text_repr): return '' | |
576 |
|
578 | |||
577 | # meat of the code begins |
|
579 | # meat of the code begins | |
578 | try: |
|
580 | try: | |
579 | etype = etype.__name__ |
|
581 | etype = etype.__name__ | |
580 | except AttributeError: |
|
582 | except AttributeError: | |
581 | pass |
|
583 | pass | |
582 |
|
584 | |||
583 | if self.long_header: |
|
585 | if self.long_header: | |
584 | # Header with the exception type, python version, and date |
|
586 | # Header with the exception type, python version, and date | |
585 | pyver = 'Python ' + string.split(sys.version)[0] + ': ' + sys.executable |
|
587 | pyver = 'Python ' + string.split(sys.version)[0] + ': ' + sys.executable | |
586 | date = time.ctime(time.time()) |
|
588 | date = time.ctime(time.time()) | |
587 |
|
589 | |||
588 | head = '%s%s%s\n%s%s%s\n%s' % (Colors.topline, '-'*75, ColorsNormal, |
|
590 | head = '%s%s%s\n%s%s%s\n%s' % (Colors.topline, '-'*75, ColorsNormal, | |
589 | exc, ' '*(75-len(str(etype))-len(pyver)), |
|
591 | exc, ' '*(75-len(str(etype))-len(pyver)), | |
590 | pyver, string.rjust(date, 75) ) |
|
592 | pyver, string.rjust(date, 75) ) | |
591 | head += "\nA problem occured executing Python code. Here is the sequence of function"\ |
|
593 | head += "\nA problem occured executing Python code. Here is the sequence of function"\ | |
592 | "\ncalls leading up to the error, with the most recent (innermost) call last." |
|
594 | "\ncalls leading up to the error, with the most recent (innermost) call last." | |
593 | else: |
|
595 | else: | |
594 | # Simplified header |
|
596 | # Simplified header | |
595 | head = '%s%s%s\n%s%s' % (Colors.topline, '-'*75, ColorsNormal,exc, |
|
597 | head = '%s%s%s\n%s%s' % (Colors.topline, '-'*75, ColorsNormal,exc, | |
596 | string.rjust('Traceback (most recent call last)', |
|
598 | string.rjust('Traceback (most recent call last)', | |
597 | 75 - len(str(etype)) ) ) |
|
599 | 75 - len(str(etype)) ) ) | |
598 | frames = [] |
|
600 | frames = [] | |
599 | # Flush cache before calling inspect. This helps alleviate some of the |
|
601 | # Flush cache before calling inspect. This helps alleviate some of the | |
600 | # problems with python 2.3's inspect.py. |
|
602 | # problems with python 2.3's inspect.py. | |
601 | linecache.checkcache() |
|
603 | linecache.checkcache() | |
602 | # Drop topmost frames if requested |
|
604 | # Drop topmost frames if requested | |
603 | try: |
|
605 | try: | |
604 | # Try the default getinnerframes and Alex's: Alex's fixes some |
|
606 | # Try the default getinnerframes and Alex's: Alex's fixes some | |
605 | # problems, but it generates empty tracebacks for console errors |
|
607 | # problems, but it generates empty tracebacks for console errors | |
606 | # (5 blanks lines) where none should be returned. |
|
608 | # (5 blanks lines) where none should be returned. | |
607 | #records = inspect.getinnerframes(etb, context)[self.tb_offset:] |
|
609 | #records = inspect.getinnerframes(etb, context)[self.tb_offset:] | |
608 | #print 'python records:', records # dbg |
|
610 | #print 'python records:', records # dbg | |
609 | records = _fixed_getinnerframes(etb, context,self.tb_offset) |
|
611 | records = _fixed_getinnerframes(etb, context,self.tb_offset) | |
610 | #print 'alex records:', records # dbg |
|
612 | #print 'alex records:', records # dbg | |
611 | except: |
|
613 | except: | |
612 |
|
614 | |||
613 | # FIXME: I've been getting many crash reports from python 2.3 |
|
615 | # FIXME: I've been getting many crash reports from python 2.3 | |
614 | # users, traceable to inspect.py. If I can find a small test-case |
|
616 | # users, traceable to inspect.py. If I can find a small test-case | |
615 | # to reproduce this, I should either write a better workaround or |
|
617 | # to reproduce this, I should either write a better workaround or | |
616 | # file a bug report against inspect (if that's the real problem). |
|
618 | # file a bug report against inspect (if that's the real problem). | |
617 | # So far, I haven't been able to find an isolated example to |
|
619 | # So far, I haven't been able to find an isolated example to | |
618 | # reproduce the problem. |
|
620 | # reproduce the problem. | |
619 | inspect_error() |
|
621 | inspect_error() | |
620 | traceback.print_exc(file=Term.cerr) |
|
622 | traceback.print_exc(file=Term.cerr) | |
621 | info('\nUnfortunately, your original traceback can not be constructed.\n') |
|
623 | info('\nUnfortunately, your original traceback can not be constructed.\n') | |
622 | return '' |
|
624 | return '' | |
623 |
|
625 | |||
624 | # build some color string templates outside these nested loops |
|
626 | # build some color string templates outside these nested loops | |
625 | tpl_link = '%s%%s%s' % (Colors.filenameEm,ColorsNormal) |
|
627 | tpl_link = '%s%%s%s' % (Colors.filenameEm,ColorsNormal) | |
626 | tpl_call = 'in %s%%s%s%%s%s' % (Colors.vName, Colors.valEm, |
|
628 | tpl_call = 'in %s%%s%s%%s%s' % (Colors.vName, Colors.valEm, | |
627 | ColorsNormal) |
|
629 | ColorsNormal) | |
628 | tpl_call_fail = 'in %s%%s%s(***failed resolving arguments***)%s' % \ |
|
630 | tpl_call_fail = 'in %s%%s%s(***failed resolving arguments***)%s' % \ | |
629 | (Colors.vName, Colors.valEm, ColorsNormal) |
|
631 | (Colors.vName, Colors.valEm, ColorsNormal) | |
630 | tpl_local_var = '%s%%s%s' % (Colors.vName, ColorsNormal) |
|
632 | tpl_local_var = '%s%%s%s' % (Colors.vName, ColorsNormal) | |
631 | tpl_global_var = '%sglobal%s %s%%s%s' % (Colors.em, ColorsNormal, |
|
633 | tpl_global_var = '%sglobal%s %s%%s%s' % (Colors.em, ColorsNormal, | |
632 | Colors.vName, ColorsNormal) |
|
634 | Colors.vName, ColorsNormal) | |
633 | tpl_name_val = '%%s %s= %%s%s' % (Colors.valEm, ColorsNormal) |
|
635 | tpl_name_val = '%%s %s= %%s%s' % (Colors.valEm, ColorsNormal) | |
634 | tpl_line = '%s%%s%s %%s' % (Colors.lineno, ColorsNormal) |
|
636 | tpl_line = '%s%%s%s %%s' % (Colors.lineno, ColorsNormal) | |
635 | tpl_line_em = '%s%%s%s %%s%s' % (Colors.linenoEm,Colors.line, |
|
637 | tpl_line_em = '%s%%s%s %%s%s' % (Colors.linenoEm,Colors.line, | |
636 | ColorsNormal) |
|
638 | ColorsNormal) | |
637 |
|
639 | |||
638 | # now, loop over all records printing context and info |
|
640 | # now, loop over all records printing context and info | |
639 | abspath = os.path.abspath |
|
641 | abspath = os.path.abspath | |
640 | for frame, file, lnum, func, lines, index in records: |
|
642 | for frame, file, lnum, func, lines, index in records: | |
641 | #print '*** record:',file,lnum,func,lines,index # dbg |
|
643 | #print '*** record:',file,lnum,func,lines,index # dbg | |
642 | try: |
|
644 | try: | |
643 | file = file and abspath(file) or '?' |
|
645 | file = file and abspath(file) or '?' | |
644 | except OSError: |
|
646 | except OSError: | |
645 | # if file is '<console>' or something not in the filesystem, |
|
647 | # if file is '<console>' or something not in the filesystem, | |
646 | # the abspath call will throw an OSError. Just ignore it and |
|
648 | # the abspath call will throw an OSError. Just ignore it and | |
647 | # keep the original file string. |
|
649 | # keep the original file string. | |
648 | pass |
|
650 | pass | |
649 | link = tpl_link % file |
|
651 | link = tpl_link % file | |
650 | try: |
|
652 | try: | |
651 | args, varargs, varkw, locals = inspect.getargvalues(frame) |
|
653 | args, varargs, varkw, locals = inspect.getargvalues(frame) | |
652 | except: |
|
654 | except: | |
653 | # This can happen due to a bug in python2.3. We should be |
|
655 | # This can happen due to a bug in python2.3. We should be | |
654 | # able to remove this try/except when 2.4 becomes a |
|
656 | # able to remove this try/except when 2.4 becomes a | |
655 | # requirement. Bug details at http://python.org/sf/1005466 |
|
657 | # requirement. Bug details at http://python.org/sf/1005466 | |
656 | inspect_error() |
|
658 | inspect_error() | |
657 | traceback.print_exc(file=Term.cerr) |
|
659 | traceback.print_exc(file=Term.cerr) | |
658 | info("\nIPython's exception reporting continues...\n") |
|
660 | info("\nIPython's exception reporting continues...\n") | |
659 |
|
661 | |||
660 | if func == '?': |
|
662 | if func == '?': | |
661 | call = '' |
|
663 | call = '' | |
662 | else: |
|
664 | else: | |
663 | # Decide whether to include variable details or not |
|
665 | # Decide whether to include variable details or not | |
664 | var_repr = self.include_vars and eqrepr or nullrepr |
|
666 | var_repr = self.include_vars and eqrepr or nullrepr | |
665 | try: |
|
667 | try: | |
666 | call = tpl_call % (func,inspect.formatargvalues(args, |
|
668 | call = tpl_call % (func,inspect.formatargvalues(args, | |
667 | varargs, varkw, |
|
669 | varargs, varkw, | |
668 | locals,formatvalue=var_repr)) |
|
670 | locals,formatvalue=var_repr)) | |
669 | except KeyError: |
|
671 | except KeyError: | |
670 | # Very odd crash from inspect.formatargvalues(). The |
|
672 | # Very odd crash from inspect.formatargvalues(). The | |
671 | # scenario under which it appeared was a call to |
|
673 | # scenario under which it appeared was a call to | |
672 | # view(array,scale) in NumTut.view.view(), where scale had |
|
674 | # view(array,scale) in NumTut.view.view(), where scale had | |
673 | # been defined as a scalar (it should be a tuple). Somehow |
|
675 | # been defined as a scalar (it should be a tuple). Somehow | |
674 | # inspect messes up resolving the argument list of view() |
|
676 | # inspect messes up resolving the argument list of view() | |
675 | # and barfs out. At some point I should dig into this one |
|
677 | # and barfs out. At some point I should dig into this one | |
676 | # and file a bug report about it. |
|
678 | # and file a bug report about it. | |
677 | inspect_error() |
|
679 | inspect_error() | |
678 | traceback.print_exc(file=Term.cerr) |
|
680 | traceback.print_exc(file=Term.cerr) | |
679 | info("\nIPython's exception reporting continues...\n") |
|
681 | info("\nIPython's exception reporting continues...\n") | |
680 | call = tpl_call_fail % func |
|
682 | call = tpl_call_fail % func | |
681 |
|
683 | |||
682 | # Initialize a list of names on the current line, which the |
|
684 | # Initialize a list of names on the current line, which the | |
683 | # tokenizer below will populate. |
|
685 | # tokenizer below will populate. | |
684 | names = [] |
|
686 | names = [] | |
685 |
|
687 | |||
686 | def tokeneater(token_type, token, start, end, line): |
|
688 | def tokeneater(token_type, token, start, end, line): | |
687 | """Stateful tokeneater which builds dotted names. |
|
689 | """Stateful tokeneater which builds dotted names. | |
688 |
|
690 | |||
689 | The list of names it appends to (from the enclosing scope) can |
|
691 | The list of names it appends to (from the enclosing scope) can | |
690 | contain repeated composite names. This is unavoidable, since |
|
692 | contain repeated composite names. This is unavoidable, since | |
691 | there is no way to disambguate partial dotted structures until |
|
693 | there is no way to disambguate partial dotted structures until | |
692 | the full list is known. The caller is responsible for pruning |
|
694 | the full list is known. The caller is responsible for pruning | |
693 | the final list of duplicates before using it.""" |
|
695 | the final list of duplicates before using it.""" | |
694 |
|
696 | |||
695 | # build composite names |
|
697 | # build composite names | |
696 | if token == '.': |
|
698 | if token == '.': | |
697 | try: |
|
699 | try: | |
698 | names[-1] += '.' |
|
700 | names[-1] += '.' | |
699 | # store state so the next token is added for x.y.z names |
|
701 | # store state so the next token is added for x.y.z names | |
700 | tokeneater.name_cont = True |
|
702 | tokeneater.name_cont = True | |
701 | return |
|
703 | return | |
702 | except IndexError: |
|
704 | except IndexError: | |
703 | pass |
|
705 | pass | |
704 | if token_type == tokenize.NAME and token not in keyword.kwlist: |
|
706 | if token_type == tokenize.NAME and token not in keyword.kwlist: | |
705 | if tokeneater.name_cont: |
|
707 | if tokeneater.name_cont: | |
706 | # Dotted names |
|
708 | # Dotted names | |
707 | names[-1] += token |
|
709 | names[-1] += token | |
708 | tokeneater.name_cont = False |
|
710 | tokeneater.name_cont = False | |
709 | else: |
|
711 | else: | |
710 | # Regular new names. We append everything, the caller |
|
712 | # Regular new names. We append everything, the caller | |
711 | # will be responsible for pruning the list later. It's |
|
713 | # will be responsible for pruning the list later. It's | |
712 | # very tricky to try to prune as we go, b/c composite |
|
714 | # very tricky to try to prune as we go, b/c composite | |
713 | # names can fool us. The pruning at the end is easy |
|
715 | # names can fool us. The pruning at the end is easy | |
714 | # to do (or the caller can print a list with repeated |
|
716 | # to do (or the caller can print a list with repeated | |
715 | # names if so desired. |
|
717 | # names if so desired. | |
716 | names.append(token) |
|
718 | names.append(token) | |
717 | elif token_type == tokenize.NEWLINE: |
|
719 | elif token_type == tokenize.NEWLINE: | |
718 | raise IndexError |
|
720 | raise IndexError | |
719 | # we need to store a bit of state in the tokenizer to build |
|
721 | # we need to store a bit of state in the tokenizer to build | |
720 | # dotted names |
|
722 | # dotted names | |
721 | tokeneater.name_cont = False |
|
723 | tokeneater.name_cont = False | |
722 |
|
724 | |||
723 | def linereader(file=file, lnum=[lnum], getline=linecache.getline): |
|
725 | def linereader(file=file, lnum=[lnum], getline=linecache.getline): | |
724 | line = getline(file, lnum[0]) |
|
726 | line = getline(file, lnum[0]) | |
725 | lnum[0] += 1 |
|
727 | lnum[0] += 1 | |
726 | return line |
|
728 | return line | |
727 |
|
729 | |||
728 | # Build the list of names on this line of code where the exception |
|
730 | # Build the list of names on this line of code where the exception | |
729 | # occurred. |
|
731 | # occurred. | |
730 | try: |
|
732 | try: | |
731 | # This builds the names list in-place by capturing it from the |
|
733 | # This builds the names list in-place by capturing it from the | |
732 | # enclosing scope. |
|
734 | # enclosing scope. | |
733 | tokenize.tokenize(linereader, tokeneater) |
|
735 | tokenize.tokenize(linereader, tokeneater) | |
734 | except IndexError: |
|
736 | except IndexError: | |
735 | # signals exit of tokenizer |
|
737 | # signals exit of tokenizer | |
736 | pass |
|
738 | pass | |
737 | except tokenize.TokenError,msg: |
|
739 | except tokenize.TokenError,msg: | |
738 | _m = ("An unexpected error occurred while tokenizing input\n" |
|
740 | _m = ("An unexpected error occurred while tokenizing input\n" | |
739 | "The following traceback may be corrupted or invalid\n" |
|
741 | "The following traceback may be corrupted or invalid\n" | |
740 | "The error message is: %s\n" % msg) |
|
742 | "The error message is: %s\n" % msg) | |
741 | error(_m) |
|
743 | error(_m) | |
742 |
|
744 | |||
743 | # prune names list of duplicates, but keep the right order |
|
745 | # prune names list of duplicates, but keep the right order | |
744 | unique_names = uniq_stable(names) |
|
746 | unique_names = uniq_stable(names) | |
745 |
|
747 | |||
746 | # Start loop over vars |
|
748 | # Start loop over vars | |
747 | lvals = [] |
|
749 | lvals = [] | |
748 | if self.include_vars: |
|
750 | if self.include_vars: | |
749 | for name_full in unique_names: |
|
751 | for name_full in unique_names: | |
750 | name_base = name_full.split('.',1)[0] |
|
752 | name_base = name_full.split('.',1)[0] | |
751 | if name_base in frame.f_code.co_varnames: |
|
753 | if name_base in frame.f_code.co_varnames: | |
752 | if locals.has_key(name_base): |
|
754 | if locals.has_key(name_base): | |
753 | try: |
|
755 | try: | |
754 | value = repr(eval(name_full,locals)) |
|
756 | value = repr(eval(name_full,locals)) | |
755 | except: |
|
757 | except: | |
756 | value = undefined |
|
758 | value = undefined | |
757 | else: |
|
759 | else: | |
758 | value = undefined |
|
760 | value = undefined | |
759 | name = tpl_local_var % name_full |
|
761 | name = tpl_local_var % name_full | |
760 | else: |
|
762 | else: | |
761 | if frame.f_globals.has_key(name_base): |
|
763 | if frame.f_globals.has_key(name_base): | |
762 | try: |
|
764 | try: | |
763 | value = repr(eval(name_full,frame.f_globals)) |
|
765 | value = repr(eval(name_full,frame.f_globals)) | |
764 | except: |
|
766 | except: | |
765 | value = undefined |
|
767 | value = undefined | |
766 | else: |
|
768 | else: | |
767 | value = undefined |
|
769 | value = undefined | |
768 | name = tpl_global_var % name_full |
|
770 | name = tpl_global_var % name_full | |
769 | lvals.append(tpl_name_val % (name,value)) |
|
771 | lvals.append(tpl_name_val % (name,value)) | |
770 | if lvals: |
|
772 | if lvals: | |
771 | lvals = '%s%s' % (indent,em_normal.join(lvals)) |
|
773 | lvals = '%s%s' % (indent,em_normal.join(lvals)) | |
772 | else: |
|
774 | else: | |
773 | lvals = '' |
|
775 | lvals = '' | |
774 |
|
776 | |||
775 | level = '%s %s\n' % (link,call) |
|
777 | level = '%s %s\n' % (link,call) | |
776 |
|
778 | |||
777 | if index is None: |
|
779 | if index is None: | |
778 | frames.append(level) |
|
780 | frames.append(level) | |
779 | else: |
|
781 | else: | |
780 | frames.append('%s%s' % (level,''.join( |
|
782 | frames.append('%s%s' % (level,''.join( | |
781 | _formatTracebackLines(lnum,index,lines,Colors,lvals, |
|
783 | _formatTracebackLines(lnum,index,lines,Colors,lvals, | |
782 | col_scheme)))) |
|
784 | col_scheme)))) | |
783 |
|
785 | |||
784 | # Get (safely) a string form of the exception info |
|
786 | # Get (safely) a string form of the exception info | |
785 | try: |
|
787 | try: | |
786 | etype_str,evalue_str = map(str,(etype,evalue)) |
|
788 | etype_str,evalue_str = map(str,(etype,evalue)) | |
787 | except: |
|
789 | except: | |
788 | # User exception is improperly defined. |
|
790 | # User exception is improperly defined. | |
789 | etype,evalue = str,sys.exc_info()[:2] |
|
791 | etype,evalue = str,sys.exc_info()[:2] | |
790 | etype_str,evalue_str = map(str,(etype,evalue)) |
|
792 | etype_str,evalue_str = map(str,(etype,evalue)) | |
791 | # ... and format it |
|
793 | # ... and format it | |
792 | exception = ['%s%s%s: %s' % (Colors.excName, etype_str, |
|
794 | exception = ['%s%s%s: %s' % (Colors.excName, etype_str, | |
793 | ColorsNormal, evalue_str)] |
|
795 | ColorsNormal, evalue_str)] | |
794 | if type(evalue) is types.InstanceType: |
|
796 | if type(evalue) is types.InstanceType: | |
795 | try: |
|
797 | try: | |
796 | names = [w for w in dir(evalue) if isinstance(w, basestring)] |
|
798 | names = [w for w in dir(evalue) if isinstance(w, basestring)] | |
797 | except: |
|
799 | except: | |
798 | # Every now and then, an object with funny inernals blows up |
|
800 | # Every now and then, an object with funny inernals blows up | |
799 | # when dir() is called on it. We do the best we can to report |
|
801 | # when dir() is called on it. We do the best we can to report | |
800 | # the problem and continue |
|
802 | # the problem and continue | |
801 | _m = '%sException reporting error (object with broken dir())%s:' |
|
803 | _m = '%sException reporting error (object with broken dir())%s:' | |
802 | exception.append(_m % (Colors.excName,ColorsNormal)) |
|
804 | exception.append(_m % (Colors.excName,ColorsNormal)) | |
803 | etype_str,evalue_str = map(str,sys.exc_info()[:2]) |
|
805 | etype_str,evalue_str = map(str,sys.exc_info()[:2]) | |
804 | exception.append('%s%s%s: %s' % (Colors.excName,etype_str, |
|
806 | exception.append('%s%s%s: %s' % (Colors.excName,etype_str, | |
805 | ColorsNormal, evalue_str)) |
|
807 | ColorsNormal, evalue_str)) | |
806 | names = [] |
|
808 | names = [] | |
807 | for name in names: |
|
809 | for name in names: | |
808 | value = text_repr(getattr(evalue, name)) |
|
810 | value = text_repr(getattr(evalue, name)) | |
809 | exception.append('\n%s%s = %s' % (indent, name, value)) |
|
811 | exception.append('\n%s%s = %s' % (indent, name, value)) | |
810 |
|
812 | |||
811 | # This is being commented out for now as the __IPYTHON__ variable |
|
813 | # This is being commented out for now as the __IPYTHON__ variable | |
812 | # referenced here is not resolved and causes massive test failures |
|
814 | # referenced here is not resolved and causes massive test failures | |
813 | # and errors. B. Granger, 04/2009. |
|
815 | # and errors. B. Granger, 04/2009. XXX | |
814 | # See https://bugs.launchpad.net/bugs/362137 |
|
816 | # See https://bugs.launchpad.net/bugs/362137 | |
815 | # # vds: >> |
|
817 | # # vds: >> | |
816 | # if records: |
|
818 | # if records: | |
817 | # filepath, lnum = records[-1][1:3] |
|
819 | # filepath, lnum = records[-1][1:3] | |
818 | # #print "file:", str(file), "linenb", str(lnum) # dbg |
|
820 | # #print "file:", str(file), "linenb", str(lnum) # dbg | |
819 | # filepath = os.path.abspath(filepath) |
|
821 | # filepath = os.path.abspath(filepath) | |
820 | # __IPYTHON__.hooks.synchronize_with_editor(filepath, lnum, 0) |
|
822 | # __IPYTHON__.hooks.synchronize_with_editor(filepath, lnum, 0) | |
821 | # # vds: << |
|
823 | # # vds: << | |
822 |
|
824 | |||
823 | # return all our info assembled as a single string |
|
825 | # return all our info assembled as a single string | |
824 | return '%s\n\n%s\n%s' % (head,'\n'.join(frames),''.join(exception[0]) ) |
|
826 | return '%s\n\n%s\n%s' % (head,'\n'.join(frames),''.join(exception[0]) ) | |
825 |
|
827 | |||
826 | def debugger(self,force=False): |
|
828 | def debugger(self,force=False): | |
827 | """Call up the pdb debugger if desired, always clean up the tb |
|
829 | """Call up the pdb debugger if desired, always clean up the tb | |
828 | reference. |
|
830 | reference. | |
829 |
|
831 | |||
830 | Keywords: |
|
832 | Keywords: | |
831 |
|
833 | |||
832 | - force(False): by default, this routine checks the instance call_pdb |
|
834 | - force(False): by default, this routine checks the instance call_pdb | |
833 | flag and does not actually invoke the debugger if the flag is false. |
|
835 | flag and does not actually invoke the debugger if the flag is false. | |
834 | The 'force' option forces the debugger to activate even if the flag |
|
836 | The 'force' option forces the debugger to activate even if the flag | |
835 | is false. |
|
837 | is false. | |
836 |
|
838 | |||
837 | If the call_pdb flag is set, the pdb interactive debugger is |
|
839 | If the call_pdb flag is set, the pdb interactive debugger is | |
838 | invoked. In all cases, the self.tb reference to the current traceback |
|
840 | invoked. In all cases, the self.tb reference to the current traceback | |
839 | is deleted to prevent lingering references which hamper memory |
|
841 | is deleted to prevent lingering references which hamper memory | |
840 | management. |
|
842 | management. | |
841 |
|
843 | |||
842 | Note that each call to pdb() does an 'import readline', so if your app |
|
844 | Note that each call to pdb() does an 'import readline', so if your app | |
843 | requires a special setup for the readline completers, you'll have to |
|
845 | requires a special setup for the readline completers, you'll have to | |
844 | fix that by hand after invoking the exception handler.""" |
|
846 | fix that by hand after invoking the exception handler.""" | |
845 |
|
847 | |||
846 | if force or self.call_pdb: |
|
848 | if force or self.call_pdb: | |
847 | if self.pdb is None: |
|
849 | if self.pdb is None: | |
848 | self.pdb = Debugger.Pdb( |
|
850 | self.pdb = Debugger.Pdb( | |
849 | self.color_scheme_table.active_scheme_name) |
|
851 | self.color_scheme_table.active_scheme_name) | |
850 | # the system displayhook may have changed, restore the original |
|
852 | # the system displayhook may have changed, restore the original | |
851 | # for pdb |
|
853 | # for pdb | |
852 | dhook = sys.displayhook |
|
854 | dhook = sys.displayhook | |
853 | sys.displayhook = sys.__displayhook__ |
|
855 | sys.displayhook = sys.__displayhook__ | |
854 | self.pdb.reset() |
|
856 | self.pdb.reset() | |
855 | # Find the right frame so we don't pop up inside ipython itself |
|
857 | # Find the right frame so we don't pop up inside ipython itself | |
856 | if hasattr(self,'tb'): |
|
858 | if hasattr(self,'tb'): | |
857 | etb = self.tb |
|
859 | etb = self.tb | |
858 | else: |
|
860 | else: | |
859 | etb = self.tb = sys.last_traceback |
|
861 | etb = self.tb = sys.last_traceback | |
860 | while self.tb.tb_next is not None: |
|
862 | while self.tb.tb_next is not None: | |
861 | self.tb = self.tb.tb_next |
|
863 | self.tb = self.tb.tb_next | |
862 | try: |
|
864 | try: | |
863 | if etb and etb.tb_next: |
|
865 | if etb and etb.tb_next: | |
864 | etb = etb.tb_next |
|
866 | etb = etb.tb_next | |
865 | self.pdb.botframe = etb.tb_frame |
|
867 | self.pdb.botframe = etb.tb_frame | |
866 | self.pdb.interaction(self.tb.tb_frame, self.tb) |
|
868 | self.pdb.interaction(self.tb.tb_frame, self.tb) | |
867 | finally: |
|
869 | finally: | |
868 | sys.displayhook = dhook |
|
870 | sys.displayhook = dhook | |
869 |
|
871 | |||
870 | if hasattr(self,'tb'): |
|
872 | if hasattr(self,'tb'): | |
871 | del self.tb |
|
873 | del self.tb | |
872 |
|
874 | |||
873 | def handler(self, info=None): |
|
875 | def handler(self, info=None): | |
874 | (etype, evalue, etb) = info or sys.exc_info() |
|
876 | (etype, evalue, etb) = info or sys.exc_info() | |
875 | self.tb = etb |
|
877 | self.tb = etb | |
876 | Term.cout.flush() |
|
878 | Term.cout.flush() | |
877 | print >> Term.cerr, self.text(etype, evalue, etb) |
|
879 | print >> Term.cerr, self.text(etype, evalue, etb) | |
878 | Term.cerr.flush() |
|
880 | Term.cerr.flush() | |
879 |
|
881 | |||
880 | # Changed so an instance can just be called as VerboseTB_inst() and print |
|
882 | # Changed so an instance can just be called as VerboseTB_inst() and print | |
881 | # out the right info on its own. |
|
883 | # out the right info on its own. | |
882 | def __call__(self, etype=None, evalue=None, etb=None): |
|
884 | def __call__(self, etype=None, evalue=None, etb=None): | |
883 | """This hook can replace sys.excepthook (for Python 2.1 or higher).""" |
|
885 | """This hook can replace sys.excepthook (for Python 2.1 or higher).""" | |
884 | if etb is None: |
|
886 | if etb is None: | |
885 | self.handler() |
|
887 | self.handler() | |
886 | else: |
|
888 | else: | |
887 | self.handler((etype, evalue, etb)) |
|
889 | self.handler((etype, evalue, etb)) | |
888 | try: |
|
890 | try: | |
889 | self.debugger() |
|
891 | self.debugger() | |
890 | except KeyboardInterrupt: |
|
892 | except KeyboardInterrupt: | |
891 | print "\nKeyboardInterrupt" |
|
893 | print "\nKeyboardInterrupt" | |
892 |
|
894 | |||
893 | #---------------------------------------------------------------------------- |
|
895 | #---------------------------------------------------------------------------- | |
894 | class FormattedTB(VerboseTB,ListTB): |
|
896 | class FormattedTB(VerboseTB,ListTB): | |
895 | """Subclass ListTB but allow calling with a traceback. |
|
897 | """Subclass ListTB but allow calling with a traceback. | |
896 |
|
898 | |||
897 | It can thus be used as a sys.excepthook for Python > 2.1. |
|
899 | It can thus be used as a sys.excepthook for Python > 2.1. | |
898 |
|
900 | |||
899 | Also adds 'Context' and 'Verbose' modes, not available in ListTB. |
|
901 | Also adds 'Context' and 'Verbose' modes, not available in ListTB. | |
900 |
|
902 | |||
901 | Allows a tb_offset to be specified. This is useful for situations where |
|
903 | Allows a tb_offset to be specified. This is useful for situations where | |
902 | one needs to remove a number of topmost frames from the traceback (such as |
|
904 | one needs to remove a number of topmost frames from the traceback (such as | |
903 | occurs with python programs that themselves execute other python code, |
|
905 | occurs with python programs that themselves execute other python code, | |
904 | like Python shells). """ |
|
906 | like Python shells). """ | |
905 |
|
907 | |||
906 | def __init__(self, mode = 'Plain', color_scheme='Linux', |
|
908 | def __init__(self, mode = 'Plain', color_scheme='Linux', | |
907 | tb_offset = 0,long_header=0,call_pdb=0,include_vars=0): |
|
909 | tb_offset = 0,long_header=0,call_pdb=0,include_vars=0): | |
908 |
|
910 | |||
909 | # NEVER change the order of this list. Put new modes at the end: |
|
911 | # NEVER change the order of this list. Put new modes at the end: | |
910 | self.valid_modes = ['Plain','Context','Verbose'] |
|
912 | self.valid_modes = ['Plain','Context','Verbose'] | |
911 | self.verbose_modes = self.valid_modes[1:3] |
|
913 | self.verbose_modes = self.valid_modes[1:3] | |
912 |
|
914 | |||
913 | VerboseTB.__init__(self,color_scheme,tb_offset,long_header, |
|
915 | VerboseTB.__init__(self,color_scheme,tb_offset,long_header, | |
914 | call_pdb=call_pdb,include_vars=include_vars) |
|
916 | call_pdb=call_pdb,include_vars=include_vars) | |
915 | self.set_mode(mode) |
|
917 | self.set_mode(mode) | |
916 |
|
918 | |||
917 | def _extract_tb(self,tb): |
|
919 | def _extract_tb(self,tb): | |
918 | if tb: |
|
920 | if tb: | |
919 | return traceback.extract_tb(tb) |
|
921 | return traceback.extract_tb(tb) | |
920 | else: |
|
922 | else: | |
921 | return None |
|
923 | return None | |
922 |
|
924 | |||
923 | def text(self, etype, value, tb,context=5,mode=None): |
|
925 | def text(self, etype, value, tb,context=5,mode=None): | |
924 | """Return formatted traceback. |
|
926 | """Return formatted traceback. | |
925 |
|
927 | |||
926 | If the optional mode parameter is given, it overrides the current |
|
928 | If the optional mode parameter is given, it overrides the current | |
927 | mode.""" |
|
929 | mode.""" | |
928 |
|
930 | |||
929 | if mode is None: |
|
931 | if mode is None: | |
930 | mode = self.mode |
|
932 | mode = self.mode | |
931 | if mode in self.verbose_modes: |
|
933 | if mode in self.verbose_modes: | |
932 | # verbose modes need a full traceback |
|
934 | # verbose modes need a full traceback | |
933 | return VerboseTB.text(self,etype, value, tb,context=5) |
|
935 | return VerboseTB.text(self,etype, value, tb,context=5) | |
934 | else: |
|
936 | else: | |
935 | # We must check the source cache because otherwise we can print |
|
937 | # We must check the source cache because otherwise we can print | |
936 | # out-of-date source code. |
|
938 | # out-of-date source code. | |
937 | linecache.checkcache() |
|
939 | linecache.checkcache() | |
938 | # Now we can extract and format the exception |
|
940 | # Now we can extract and format the exception | |
939 | elist = self._extract_tb(tb) |
|
941 | elist = self._extract_tb(tb) | |
940 | if len(elist) > self.tb_offset: |
|
942 | if len(elist) > self.tb_offset: | |
941 | del elist[:self.tb_offset] |
|
943 | del elist[:self.tb_offset] | |
942 | return ListTB.text(self,etype,value,elist) |
|
944 | return ListTB.text(self,etype,value,elist) | |
943 |
|
945 | |||
944 | def set_mode(self,mode=None): |
|
946 | def set_mode(self,mode=None): | |
945 | """Switch to the desired mode. |
|
947 | """Switch to the desired mode. | |
946 |
|
948 | |||
947 | If mode is not specified, cycles through the available modes.""" |
|
949 | If mode is not specified, cycles through the available modes.""" | |
948 |
|
950 | |||
949 | if not mode: |
|
951 | if not mode: | |
950 | new_idx = ( self.valid_modes.index(self.mode) + 1 ) % \ |
|
952 | new_idx = ( self.valid_modes.index(self.mode) + 1 ) % \ | |
951 | len(self.valid_modes) |
|
953 | len(self.valid_modes) | |
952 | self.mode = self.valid_modes[new_idx] |
|
954 | self.mode = self.valid_modes[new_idx] | |
953 | elif mode not in self.valid_modes: |
|
955 | elif mode not in self.valid_modes: | |
954 | raise ValueError, 'Unrecognized mode in FormattedTB: <'+mode+'>\n'\ |
|
956 | raise ValueError, 'Unrecognized mode in FormattedTB: <'+mode+'>\n'\ | |
955 | 'Valid modes: '+str(self.valid_modes) |
|
957 | 'Valid modes: '+str(self.valid_modes) | |
956 | else: |
|
958 | else: | |
957 | self.mode = mode |
|
959 | self.mode = mode | |
958 | # include variable details only in 'Verbose' mode |
|
960 | # include variable details only in 'Verbose' mode | |
959 | self.include_vars = (self.mode == self.valid_modes[2]) |
|
961 | self.include_vars = (self.mode == self.valid_modes[2]) | |
960 |
|
962 | |||
961 | # some convenient shorcuts |
|
963 | # some convenient shorcuts | |
962 | def plain(self): |
|
964 | def plain(self): | |
963 | self.set_mode(self.valid_modes[0]) |
|
965 | self.set_mode(self.valid_modes[0]) | |
964 |
|
966 | |||
965 | def context(self): |
|
967 | def context(self): | |
966 | self.set_mode(self.valid_modes[1]) |
|
968 | self.set_mode(self.valid_modes[1]) | |
967 |
|
969 | |||
968 | def verbose(self): |
|
970 | def verbose(self): | |
969 | self.set_mode(self.valid_modes[2]) |
|
971 | self.set_mode(self.valid_modes[2]) | |
970 |
|
972 | |||
971 | #---------------------------------------------------------------------------- |
|
973 | #---------------------------------------------------------------------------- | |
972 | class AutoFormattedTB(FormattedTB): |
|
974 | class AutoFormattedTB(FormattedTB): | |
973 | """A traceback printer which can be called on the fly. |
|
975 | """A traceback printer which can be called on the fly. | |
974 |
|
976 | |||
975 | It will find out about exceptions by itself. |
|
977 | It will find out about exceptions by itself. | |
976 |
|
978 | |||
977 | A brief example: |
|
979 | A brief example: | |
978 |
|
980 | |||
979 | AutoTB = AutoFormattedTB(mode = 'Verbose',color_scheme='Linux') |
|
981 | AutoTB = AutoFormattedTB(mode = 'Verbose',color_scheme='Linux') | |
980 | try: |
|
982 | try: | |
981 | ... |
|
983 | ... | |
982 | except: |
|
984 | except: | |
983 | AutoTB() # or AutoTB(out=logfile) where logfile is an open file object |
|
985 | AutoTB() # or AutoTB(out=logfile) where logfile is an open file object | |
984 | """ |
|
986 | """ | |
985 | def __call__(self,etype=None,evalue=None,etb=None, |
|
987 | def __call__(self,etype=None,evalue=None,etb=None, | |
986 | out=None,tb_offset=None): |
|
988 | out=None,tb_offset=None): | |
987 | """Print out a formatted exception traceback. |
|
989 | """Print out a formatted exception traceback. | |
988 |
|
990 | |||
989 | Optional arguments: |
|
991 | Optional arguments: | |
990 | - out: an open file-like object to direct output to. |
|
992 | - out: an open file-like object to direct output to. | |
991 |
|
993 | |||
992 | - tb_offset: the number of frames to skip over in the stack, on a |
|
994 | - tb_offset: the number of frames to skip over in the stack, on a | |
993 | per-call basis (this overrides temporarily the instance's tb_offset |
|
995 | per-call basis (this overrides temporarily the instance's tb_offset | |
994 | given at initialization time. """ |
|
996 | given at initialization time. """ | |
995 |
|
997 | |||
996 | if out is None: |
|
998 | if out is None: | |
997 | out = Term.cerr |
|
999 | out = Term.cerr | |
998 | Term.cout.flush() |
|
1000 | Term.cout.flush() | |
999 | if tb_offset is not None: |
|
1001 | if tb_offset is not None: | |
1000 | tb_offset, self.tb_offset = self.tb_offset, tb_offset |
|
1002 | tb_offset, self.tb_offset = self.tb_offset, tb_offset | |
1001 | print >> out, self.text(etype, evalue, etb) |
|
1003 | print >> out, self.text(etype, evalue, etb) | |
1002 | self.tb_offset = tb_offset |
|
1004 | self.tb_offset = tb_offset | |
1003 | else: |
|
1005 | else: | |
1004 | print >> out, self.text(etype, evalue, etb) |
|
1006 | print >> out, self.text(etype, evalue, etb) | |
1005 | out.flush() |
|
1007 | out.flush() | |
1006 | try: |
|
1008 | try: | |
1007 | self.debugger() |
|
1009 | self.debugger() | |
1008 | except KeyboardInterrupt: |
|
1010 | except KeyboardInterrupt: | |
1009 | print "\nKeyboardInterrupt" |
|
1011 | print "\nKeyboardInterrupt" | |
1010 |
|
1012 | |||
1011 | def text(self,etype=None,value=None,tb=None,context=5,mode=None): |
|
1013 | def text(self,etype=None,value=None,tb=None,context=5,mode=None): | |
1012 | if etype is None: |
|
1014 | if etype is None: | |
1013 | etype,value,tb = sys.exc_info() |
|
1015 | etype,value,tb = sys.exc_info() | |
1014 | self.tb = tb |
|
1016 | self.tb = tb | |
1015 | return FormattedTB.text(self,etype,value,tb,context=5,mode=mode) |
|
1017 | return FormattedTB.text(self,etype,value,tb,context=5,mode=mode) | |
1016 |
|
1018 | |||
1017 | #--------------------------------------------------------------------------- |
|
1019 | #--------------------------------------------------------------------------- | |
1018 | # A simple class to preserve Nathan's original functionality. |
|
1020 | # A simple class to preserve Nathan's original functionality. | |
1019 | class ColorTB(FormattedTB): |
|
1021 | class ColorTB(FormattedTB): | |
1020 | """Shorthand to initialize a FormattedTB in Linux colors mode.""" |
|
1022 | """Shorthand to initialize a FormattedTB in Linux colors mode.""" | |
1021 | def __init__(self,color_scheme='Linux',call_pdb=0): |
|
1023 | def __init__(self,color_scheme='Linux',call_pdb=0): | |
1022 | FormattedTB.__init__(self,color_scheme=color_scheme, |
|
1024 | FormattedTB.__init__(self,color_scheme=color_scheme, | |
1023 | call_pdb=call_pdb) |
|
1025 | call_pdb=call_pdb) | |
1024 |
|
1026 | |||
1025 | #---------------------------------------------------------------------------- |
|
1027 | #---------------------------------------------------------------------------- | |
1026 | # module testing (minimal) |
|
1028 | # module testing (minimal) | |
1027 | if __name__ == "__main__": |
|
1029 | if __name__ == "__main__": | |
1028 | def spam(c, (d, e)): |
|
1030 | def spam(c, (d, e)): | |
1029 | x = c + d |
|
1031 | x = c + d | |
1030 | y = c * d |
|
1032 | y = c * d | |
1031 | foo(x, y) |
|
1033 | foo(x, y) | |
1032 |
|
1034 | |||
1033 | def foo(a, b, bar=1): |
|
1035 | def foo(a, b, bar=1): | |
1034 | eggs(a, b + bar) |
|
1036 | eggs(a, b + bar) | |
1035 |
|
1037 | |||
1036 | def eggs(f, g, z=globals()): |
|
1038 | def eggs(f, g, z=globals()): | |
1037 | h = f + g |
|
1039 | h = f + g | |
1038 | i = f - g |
|
1040 | i = f - g | |
1039 | return h / i |
|
1041 | return h / i | |
1040 |
|
1042 | |||
1041 | print '' |
|
1043 | print '' | |
1042 | print '*** Before ***' |
|
1044 | print '*** Before ***' | |
1043 | try: |
|
1045 | try: | |
1044 | print spam(1, (2, 3)) |
|
1046 | print spam(1, (2, 3)) | |
1045 | except: |
|
1047 | except: | |
1046 | traceback.print_exc() |
|
1048 | traceback.print_exc() | |
1047 | print '' |
|
1049 | print '' | |
1048 |
|
1050 | |||
1049 | handler = ColorTB() |
|
1051 | handler = ColorTB() | |
1050 | print '*** ColorTB ***' |
|
1052 | print '*** ColorTB ***' | |
1051 | try: |
|
1053 | try: | |
1052 | print spam(1, (2, 3)) |
|
1054 | print spam(1, (2, 3)) | |
1053 | except: |
|
1055 | except: | |
1054 | apply(handler, sys.exc_info() ) |
|
1056 | apply(handler, sys.exc_info() ) | |
1055 | print '' |
|
1057 | print '' | |
1056 |
|
1058 | |||
1057 | handler = VerboseTB() |
|
1059 | handler = VerboseTB() | |
1058 | print '*** VerboseTB ***' |
|
1060 | print '*** VerboseTB ***' | |
1059 | try: |
|
1061 | try: | |
1060 | print spam(1, (2, 3)) |
|
1062 | print spam(1, (2, 3)) | |
1061 | except: |
|
1063 | except: | |
1062 | apply(handler, sys.exc_info() ) |
|
1064 | apply(handler, sys.exc_info() ) | |
1063 | print '' |
|
1065 | print '' | |
1064 |
|
1066 |
@@ -1,814 +1,825 b'' | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
3 |
|
3 | |||
4 | """Start an IPython cluster = (controller + engines).""" |
|
4 | """Start an IPython cluster = (controller + engines).""" | |
5 |
|
5 | |||
6 | #----------------------------------------------------------------------------- |
|
6 | #----------------------------------------------------------------------------- | |
7 | # Copyright (C) 2008 The IPython Development Team |
|
7 | # Copyright (C) 2008 The IPython Development Team | |
8 | # |
|
8 | # | |
9 | # Distributed under the terms of the BSD License. The full license is in |
|
9 | # Distributed under the terms of the BSD License. The full license is in | |
10 | # the file COPYING, distributed as part of this software. |
|
10 | # the file COPYING, distributed as part of this software. | |
11 | #----------------------------------------------------------------------------- |
|
11 | #----------------------------------------------------------------------------- | |
12 |
|
12 | |||
13 | #----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
14 | # Imports |
|
14 | # Imports | |
15 | #----------------------------------------------------------------------------- |
|
15 | #----------------------------------------------------------------------------- | |
16 |
|
16 | |||
17 | import os |
|
17 | import os | |
18 | import re |
|
18 | import re | |
19 | import sys |
|
19 | import sys | |
20 | import signal |
|
20 | import signal | |
21 | import tempfile |
|
21 | import tempfile | |
22 | pjoin = os.path.join |
|
22 | pjoin = os.path.join | |
23 |
|
23 | |||
24 | from twisted.internet import reactor, defer |
|
24 | from twisted.internet import reactor, defer | |
25 | from twisted.internet.protocol import ProcessProtocol |
|
25 | from twisted.internet.protocol import ProcessProtocol | |
26 | from twisted.internet.error import ProcessDone, ProcessTerminated |
|
26 | from twisted.internet.error import ProcessDone, ProcessTerminated | |
27 | from twisted.internet.utils import getProcessOutput |
|
27 | from twisted.internet.utils import getProcessOutput | |
28 | from twisted.python import failure, log |
|
28 | from twisted.python import failure, log | |
29 |
|
29 | |||
30 | from IPython.external import argparse |
|
30 | from IPython.external import argparse | |
31 | from IPython.external import Itpl |
|
31 | from IPython.external import Itpl | |
32 | from IPython.genutils import get_ipython_dir, get_log_dir, get_security_dir |
|
32 | from IPython.genutils import ( | |
33 | from IPython.genutils import num_cpus |
|
33 | get_ipython_dir, | |
|
34 | get_log_dir, | |||
|
35 | get_security_dir, | |||
|
36 | num_cpus | |||
|
37 | ) | |||
34 | from IPython.kernel.fcutil import have_crypto |
|
38 | from IPython.kernel.fcutil import have_crypto | |
35 |
|
39 | |||
36 | # Create various ipython directories if they don't exist. |
|
40 | # Create various ipython directories if they don't exist. | |
37 | # This must be done before IPython.kernel.config is imported. |
|
41 | # This must be done before IPython.kernel.config is imported. | |
38 | from IPython.iplib import user_setup |
|
42 | from IPython.iplib import user_setup | |
39 | if os.name == 'posix': |
|
43 | if os.name == 'posix': | |
40 | rc_suffix = '' |
|
44 | rc_suffix = '' | |
41 | else: |
|
45 | else: | |
42 | rc_suffix = '.ini' |
|
46 | rc_suffix = '.ini' | |
43 | user_setup(get_ipython_dir(), rc_suffix, mode='install', interactive=False) |
|
47 | user_setup(get_ipython_dir(), rc_suffix, mode='install', interactive=False) | |
44 | get_log_dir() |
|
48 | get_log_dir() | |
45 | get_security_dir() |
|
49 | get_security_dir() | |
46 |
|
50 | |||
47 | from IPython.kernel.config import config_manager as kernel_config_manager |
|
51 | from IPython.kernel.config import config_manager as kernel_config_manager | |
48 | from IPython.kernel.error import SecurityError, FileTimeoutError |
|
52 | from IPython.kernel.error import SecurityError, FileTimeoutError | |
49 | from IPython.kernel.fcutil import have_crypto |
|
53 | from IPython.kernel.fcutil import have_crypto | |
50 | from IPython.kernel.twistedutil import gatherBoth, wait_for_file |
|
54 | from IPython.kernel.twistedutil import gatherBoth, wait_for_file | |
51 | from IPython.kernel.util import printer |
|
55 | from IPython.kernel.util import printer | |
52 |
|
56 | |||
53 |
|
57 | |||
54 | #----------------------------------------------------------------------------- |
|
58 | #----------------------------------------------------------------------------- | |
55 | # General process handling code |
|
59 | # General process handling code | |
56 | #----------------------------------------------------------------------------- |
|
60 | #----------------------------------------------------------------------------- | |
57 |
|
61 | |||
58 | def find_exe(cmd): |
|
62 | def find_exe(cmd): | |
59 | try: |
|
63 | try: | |
60 | import win32api |
|
64 | import win32api | |
61 | except ImportError: |
|
65 | except ImportError: | |
62 | raise ImportError('you need to have pywin32 installed for this to work') |
|
66 | raise ImportError('you need to have pywin32 installed for this to work') | |
63 | else: |
|
67 | else: | |
64 | try: |
|
68 | try: | |
65 | (path, offest) = win32api.SearchPath(os.environ['PATH'],cmd + '.exe') |
|
69 | (path, offest) = win32api.SearchPath(os.environ['PATH'],cmd + '.exe') | |
66 | except: |
|
70 | except: | |
67 | (path, offset) = win32api.SearchPath(os.environ['PATH'],cmd + '.bat') |
|
71 | (path, offset) = win32api.SearchPath(os.environ['PATH'],cmd + '.bat') | |
68 | return path |
|
72 | return path | |
69 |
|
73 | |||
70 | class ProcessStateError(Exception): |
|
74 | class ProcessStateError(Exception): | |
71 | pass |
|
75 | pass | |
72 |
|
76 | |||
73 | class UnknownStatus(Exception): |
|
77 | class UnknownStatus(Exception): | |
74 | pass |
|
78 | pass | |
75 |
|
79 | |||
76 | class LauncherProcessProtocol(ProcessProtocol): |
|
80 | class LauncherProcessProtocol(ProcessProtocol): | |
77 | """ |
|
81 | """ | |
78 | A ProcessProtocol to go with the ProcessLauncher. |
|
82 | A ProcessProtocol to go with the ProcessLauncher. | |
79 | """ |
|
83 | """ | |
80 | def __init__(self, process_launcher): |
|
84 | def __init__(self, process_launcher): | |
81 | self.process_launcher = process_launcher |
|
85 | self.process_launcher = process_launcher | |
82 |
|
86 | |||
83 | def connectionMade(self): |
|
87 | def connectionMade(self): | |
84 | self.process_launcher.fire_start_deferred(self.transport.pid) |
|
88 | self.process_launcher.fire_start_deferred(self.transport.pid) | |
85 |
|
89 | |||
86 | def processEnded(self, status): |
|
90 | def processEnded(self, status): | |
87 | value = status.value |
|
91 | value = status.value | |
88 | if isinstance(value, ProcessDone): |
|
92 | if isinstance(value, ProcessDone): | |
89 | self.process_launcher.fire_stop_deferred(0) |
|
93 | self.process_launcher.fire_stop_deferred(0) | |
90 | elif isinstance(value, ProcessTerminated): |
|
94 | elif isinstance(value, ProcessTerminated): | |
91 | self.process_launcher.fire_stop_deferred( |
|
95 | self.process_launcher.fire_stop_deferred( | |
92 | {'exit_code':value.exitCode, |
|
96 | {'exit_code':value.exitCode, | |
93 | 'signal':value.signal, |
|
97 | 'signal':value.signal, | |
94 | 'status':value.status |
|
98 | 'status':value.status | |
95 | } |
|
99 | } | |
96 | ) |
|
100 | ) | |
97 | else: |
|
101 | else: | |
98 | raise UnknownStatus("unknown exit status, this is probably a bug in Twisted") |
|
102 | raise UnknownStatus("unknown exit status, this is probably a bug in Twisted") | |
99 |
|
103 | |||
100 | def outReceived(self, data): |
|
104 | def outReceived(self, data): | |
101 | log.msg(data) |
|
105 | log.msg(data) | |
102 |
|
106 | |||
103 | def errReceived(self, data): |
|
107 | def errReceived(self, data): | |
104 | log.err(data) |
|
108 | log.err(data) | |
105 |
|
109 | |||
106 | class ProcessLauncher(object): |
|
110 | class ProcessLauncher(object): | |
107 | """ |
|
111 | """ | |
108 | Start and stop an external process in an asynchronous manner. |
|
112 | Start and stop an external process in an asynchronous manner. | |
109 |
|
113 | |||
110 | Currently this uses deferreds to notify other parties of process state |
|
114 | Currently this uses deferreds to notify other parties of process state | |
111 | changes. This is an awkward design and should be moved to using |
|
115 | changes. This is an awkward design and should be moved to using | |
112 | a formal NotificationCenter. |
|
116 | a formal NotificationCenter. | |
113 | """ |
|
117 | """ | |
114 | def __init__(self, cmd_and_args): |
|
118 | def __init__(self, cmd_and_args): | |
115 | self.cmd = cmd_and_args[0] |
|
119 | self.cmd = cmd_and_args[0] | |
116 | self.args = cmd_and_args |
|
120 | self.args = cmd_and_args | |
117 | self._reset() |
|
121 | self._reset() | |
118 |
|
122 | |||
119 | def _reset(self): |
|
123 | def _reset(self): | |
120 | self.process_protocol = None |
|
124 | self.process_protocol = None | |
121 | self.pid = None |
|
125 | self.pid = None | |
122 | self.start_deferred = None |
|
126 | self.start_deferred = None | |
123 | self.stop_deferreds = [] |
|
127 | self.stop_deferreds = [] | |
124 | self.state = 'before' # before, running, or after |
|
128 | self.state = 'before' # before, running, or after | |
125 |
|
129 | |||
126 | @property |
|
130 | @property | |
127 | def running(self): |
|
131 | def running(self): | |
128 | if self.state == 'running': |
|
132 | if self.state == 'running': | |
129 | return True |
|
133 | return True | |
130 | else: |
|
134 | else: | |
131 | return False |
|
135 | return False | |
132 |
|
136 | |||
133 | def fire_start_deferred(self, pid): |
|
137 | def fire_start_deferred(self, pid): | |
134 | self.pid = pid |
|
138 | self.pid = pid | |
135 | self.state = 'running' |
|
139 | self.state = 'running' | |
136 | log.msg('Process %r has started with pid=%i' % (self.args, pid)) |
|
140 | log.msg('Process %r has started with pid=%i' % (self.args, pid)) | |
137 | self.start_deferred.callback(pid) |
|
141 | self.start_deferred.callback(pid) | |
138 |
|
142 | |||
139 | def start(self): |
|
143 | def start(self): | |
140 | if self.state == 'before': |
|
144 | if self.state == 'before': | |
141 | self.process_protocol = LauncherProcessProtocol(self) |
|
145 | self.process_protocol = LauncherProcessProtocol(self) | |
142 | self.start_deferred = defer.Deferred() |
|
146 | self.start_deferred = defer.Deferred() | |
143 | self.process_transport = reactor.spawnProcess( |
|
147 | self.process_transport = reactor.spawnProcess( | |
144 | self.process_protocol, |
|
148 | self.process_protocol, | |
145 | self.cmd, |
|
149 | self.cmd, | |
146 | self.args, |
|
150 | self.args, | |
147 | env=os.environ |
|
151 | env=os.environ | |
148 | ) |
|
152 | ) | |
149 | return self.start_deferred |
|
153 | return self.start_deferred | |
150 | else: |
|
154 | else: | |
151 | s = 'the process has already been started and has state: %r' % \ |
|
155 | s = 'the process has already been started and has state: %r' % \ | |
152 | self.state |
|
156 | self.state | |
153 | return defer.fail(ProcessStateError(s)) |
|
157 | return defer.fail(ProcessStateError(s)) | |
154 |
|
158 | |||
155 | def get_stop_deferred(self): |
|
159 | def get_stop_deferred(self): | |
156 | if self.state == 'running' or self.state == 'before': |
|
160 | if self.state == 'running' or self.state == 'before': | |
157 | d = defer.Deferred() |
|
161 | d = defer.Deferred() | |
158 | self.stop_deferreds.append(d) |
|
162 | self.stop_deferreds.append(d) | |
159 | return d |
|
163 | return d | |
160 | else: |
|
164 | else: | |
161 | s = 'this process is already complete' |
|
165 | s = 'this process is already complete' | |
162 | return defer.fail(ProcessStateError(s)) |
|
166 | return defer.fail(ProcessStateError(s)) | |
163 |
|
167 | |||
164 | def fire_stop_deferred(self, exit_code): |
|
168 | def fire_stop_deferred(self, exit_code): | |
165 | log.msg('Process %r has stopped with %r' % (self.args, exit_code)) |
|
169 | log.msg('Process %r has stopped with %r' % (self.args, exit_code)) | |
166 | self.state = 'after' |
|
170 | self.state = 'after' | |
167 | for d in self.stop_deferreds: |
|
171 | for d in self.stop_deferreds: | |
168 | d.callback(exit_code) |
|
172 | d.callback(exit_code) | |
169 |
|
173 | |||
170 | def signal(self, sig): |
|
174 | def signal(self, sig): | |
171 | """ |
|
175 | """ | |
172 | Send a signal to the process. |
|
176 | Send a signal to the process. | |
173 |
|
177 | |||
174 | The argument sig can be ('KILL','INT', etc.) or any signal number. |
|
178 | The argument sig can be ('KILL','INT', etc.) or any signal number. | |
175 | """ |
|
179 | """ | |
176 | if self.state == 'running': |
|
180 | if self.state == 'running': | |
177 | self.process_transport.signalProcess(sig) |
|
181 | self.process_transport.signalProcess(sig) | |
178 |
|
182 | |||
179 | # def __del__(self): |
|
183 | # def __del__(self): | |
180 | # self.signal('KILL') |
|
184 | # self.signal('KILL') | |
181 |
|
185 | |||
182 | def interrupt_then_kill(self, delay=1.0): |
|
186 | def interrupt_then_kill(self, delay=1.0): | |
183 | self.signal('INT') |
|
187 | self.signal('INT') | |
184 | reactor.callLater(delay, self.signal, 'KILL') |
|
188 | reactor.callLater(delay, self.signal, 'KILL') | |
185 |
|
189 | |||
186 |
|
190 | |||
187 | #----------------------------------------------------------------------------- |
|
191 | #----------------------------------------------------------------------------- | |
188 | # Code for launching controller and engines |
|
192 | # Code for launching controller and engines | |
189 | #----------------------------------------------------------------------------- |
|
193 | #----------------------------------------------------------------------------- | |
190 |
|
194 | |||
191 |
|
195 | |||
192 | class ControllerLauncher(ProcessLauncher): |
|
196 | class ControllerLauncher(ProcessLauncher): | |
193 |
|
197 | |||
194 | def __init__(self, extra_args=None): |
|
198 | def __init__(self, extra_args=None): | |
195 | if sys.platform == 'win32': |
|
199 | if sys.platform == 'win32': | |
196 | # This logic is needed because the ipcontroller script doesn't |
|
200 | # This logic is needed because the ipcontroller script doesn't | |
197 | # always get installed in the same way or in the same location. |
|
201 | # always get installed in the same way or in the same location. | |
198 | from IPython.kernel.scripts import ipcontroller |
|
202 | from IPython.kernel.scripts import ipcontroller | |
199 | script_location = ipcontroller.__file__.replace('.pyc', '.py') |
|
203 | script_location = ipcontroller.__file__.replace('.pyc', '.py') | |
200 | # The -u option here turns on unbuffered output, which is required |
|
204 | # The -u option here turns on unbuffered output, which is required | |
201 | # on Win32 to prevent wierd conflict and problems with Twisted. |
|
205 | # on Win32 to prevent wierd conflict and problems with Twisted. | |
202 | # Also, use sys.executable to make sure we are picking up the |
|
206 | # Also, use sys.executable to make sure we are picking up the | |
203 | # right python exe. |
|
207 | # right python exe. | |
204 | args = [sys.executable, '-u', script_location] |
|
208 | args = [sys.executable, '-u', script_location] | |
205 | else: |
|
209 | else: | |
206 | args = ['ipcontroller'] |
|
210 | args = ['ipcontroller'] | |
207 | self.extra_args = extra_args |
|
211 | self.extra_args = extra_args | |
208 | if extra_args is not None: |
|
212 | if extra_args is not None: | |
209 | args.extend(extra_args) |
|
213 | args.extend(extra_args) | |
210 |
|
214 | |||
211 | ProcessLauncher.__init__(self, args) |
|
215 | ProcessLauncher.__init__(self, args) | |
212 |
|
216 | |||
213 |
|
217 | |||
214 | class EngineLauncher(ProcessLauncher): |
|
218 | class EngineLauncher(ProcessLauncher): | |
215 |
|
219 | |||
216 | def __init__(self, extra_args=None): |
|
220 | def __init__(self, extra_args=None): | |
217 | if sys.platform == 'win32': |
|
221 | if sys.platform == 'win32': | |
218 | # This logic is needed because the ipcontroller script doesn't |
|
222 | # This logic is needed because the ipcontroller script doesn't | |
219 | # always get installed in the same way or in the same location. |
|
223 | # always get installed in the same way or in the same location. | |
220 | from IPython.kernel.scripts import ipengine |
|
224 | from IPython.kernel.scripts import ipengine | |
221 | script_location = ipengine.__file__.replace('.pyc', '.py') |
|
225 | script_location = ipengine.__file__.replace('.pyc', '.py') | |
222 | # The -u option here turns on unbuffered output, which is required |
|
226 | # The -u option here turns on unbuffered output, which is required | |
223 | # on Win32 to prevent wierd conflict and problems with Twisted. |
|
227 | # on Win32 to prevent wierd conflict and problems with Twisted. | |
224 | # Also, use sys.executable to make sure we are picking up the |
|
228 | # Also, use sys.executable to make sure we are picking up the | |
225 | # right python exe. |
|
229 | # right python exe. | |
226 | args = [sys.executable, '-u', script_location] |
|
230 | args = [sys.executable, '-u', script_location] | |
227 | else: |
|
231 | else: | |
228 | args = ['ipengine'] |
|
232 | args = ['ipengine'] | |
229 | self.extra_args = extra_args |
|
233 | self.extra_args = extra_args | |
230 | if extra_args is not None: |
|
234 | if extra_args is not None: | |
231 | args.extend(extra_args) |
|
235 | args.extend(extra_args) | |
232 |
|
236 | |||
233 | ProcessLauncher.__init__(self, args) |
|
237 | ProcessLauncher.__init__(self, args) | |
234 |
|
238 | |||
235 |
|
239 | |||
236 | class LocalEngineSet(object): |
|
240 | class LocalEngineSet(object): | |
237 |
|
241 | |||
238 | def __init__(self, extra_args=None): |
|
242 | def __init__(self, extra_args=None): | |
239 | self.extra_args = extra_args |
|
243 | self.extra_args = extra_args | |
240 | self.launchers = [] |
|
244 | self.launchers = [] | |
241 |
|
245 | |||
242 | def start(self, n): |
|
246 | def start(self, n): | |
243 | dlist = [] |
|
247 | dlist = [] | |
244 | for i in range(n): |
|
248 | for i in range(n): | |
245 | el = EngineLauncher(extra_args=self.extra_args) |
|
249 | el = EngineLauncher(extra_args=self.extra_args) | |
246 | d = el.start() |
|
250 | d = el.start() | |
247 | self.launchers.append(el) |
|
251 | self.launchers.append(el) | |
248 | dlist.append(d) |
|
252 | dlist.append(d) | |
249 | dfinal = gatherBoth(dlist, consumeErrors=True) |
|
253 | dfinal = gatherBoth(dlist, consumeErrors=True) | |
250 | dfinal.addCallback(self._handle_start) |
|
254 | dfinal.addCallback(self._handle_start) | |
251 | return dfinal |
|
255 | return dfinal | |
252 |
|
256 | |||
253 | def _handle_start(self, r): |
|
257 | def _handle_start(self, r): | |
254 | log.msg('Engines started with pids: %r' % r) |
|
258 | log.msg('Engines started with pids: %r' % r) | |
255 | return r |
|
259 | return r | |
256 |
|
260 | |||
257 | def _handle_stop(self, r): |
|
261 | def _handle_stop(self, r): | |
258 | log.msg('Engines received signal: %r' % r) |
|
262 | log.msg('Engines received signal: %r' % r) | |
259 | return r |
|
263 | return r | |
260 |
|
264 | |||
261 | def signal(self, sig): |
|
265 | def signal(self, sig): | |
262 | dlist = [] |
|
266 | dlist = [] | |
263 | for el in self.launchers: |
|
267 | for el in self.launchers: | |
264 | d = el.get_stop_deferred() |
|
268 | d = el.get_stop_deferred() | |
265 | dlist.append(d) |
|
269 | dlist.append(d) | |
266 | el.signal(sig) |
|
270 | el.signal(sig) | |
267 | dfinal = gatherBoth(dlist, consumeErrors=True) |
|
271 | dfinal = gatherBoth(dlist, consumeErrors=True) | |
268 | dfinal.addCallback(self._handle_stop) |
|
272 | dfinal.addCallback(self._handle_stop) | |
269 | return dfinal |
|
273 | return dfinal | |
270 |
|
274 | |||
271 | def interrupt_then_kill(self, delay=1.0): |
|
275 | def interrupt_then_kill(self, delay=1.0): | |
272 | dlist = [] |
|
276 | dlist = [] | |
273 | for el in self.launchers: |
|
277 | for el in self.launchers: | |
274 | d = el.get_stop_deferred() |
|
278 | d = el.get_stop_deferred() | |
275 | dlist.append(d) |
|
279 | dlist.append(d) | |
276 | el.interrupt_then_kill(delay) |
|
280 | el.interrupt_then_kill(delay) | |
277 | dfinal = gatherBoth(dlist, consumeErrors=True) |
|
281 | dfinal = gatherBoth(dlist, consumeErrors=True) | |
278 | dfinal.addCallback(self._handle_stop) |
|
282 | dfinal.addCallback(self._handle_stop) | |
279 | return dfinal |
|
283 | return dfinal | |
280 |
|
284 | |||
281 |
|
285 | |||
282 | class BatchEngineSet(object): |
|
286 | class BatchEngineSet(object): | |
283 |
|
287 | |||
284 | # Subclasses must fill these in. See PBSEngineSet |
|
288 | # Subclasses must fill these in. See PBSEngineSet | |
285 | submit_command = '' |
|
289 | submit_command = '' | |
286 | delete_command = '' |
|
290 | delete_command = '' | |
287 | job_id_regexp = '' |
|
291 | job_id_regexp = '' | |
288 |
|
292 | |||
289 | def __init__(self, template_file, **kwargs): |
|
293 | def __init__(self, template_file, **kwargs): | |
290 | self.template_file = template_file |
|
294 | self.template_file = template_file | |
291 | self.context = {} |
|
295 | self.context = {} | |
292 | self.context.update(kwargs) |
|
296 | self.context.update(kwargs) | |
293 | self.batch_file = self.template_file+'-run' |
|
297 | self.batch_file = self.template_file+'-run' | |
294 |
|
298 | |||
295 | def parse_job_id(self, output): |
|
299 | def parse_job_id(self, output): | |
296 | m = re.match(self.job_id_regexp, output) |
|
300 | m = re.match(self.job_id_regexp, output) | |
297 | if m is not None: |
|
301 | if m is not None: | |
298 | job_id = m.group() |
|
302 | job_id = m.group() | |
299 | else: |
|
303 | else: | |
300 | raise Exception("job id couldn't be determined: %s" % output) |
|
304 | raise Exception("job id couldn't be determined: %s" % output) | |
301 | self.job_id = job_id |
|
305 | self.job_id = job_id | |
302 | log.msg('Job started with job id: %r' % job_id) |
|
306 | log.msg('Job started with job id: %r' % job_id) | |
303 | return job_id |
|
307 | return job_id | |
304 |
|
308 | |||
305 | def write_batch_script(self, n): |
|
309 | def write_batch_script(self, n): | |
306 | self.context['n'] = n |
|
310 | self.context['n'] = n | |
307 | template = open(self.template_file, 'r').read() |
|
311 | template = open(self.template_file, 'r').read() | |
308 | log.msg('Using template for batch script: %s' % self.template_file) |
|
312 | log.msg('Using template for batch script: %s' % self.template_file) | |
309 | script_as_string = Itpl.itplns(template, self.context) |
|
313 | script_as_string = Itpl.itplns(template, self.context) | |
310 | log.msg('Writing instantiated batch script: %s' % self.batch_file) |
|
314 | log.msg('Writing instantiated batch script: %s' % self.batch_file) | |
311 | f = open(self.batch_file,'w') |
|
315 | f = open(self.batch_file,'w') | |
312 | f.write(script_as_string) |
|
316 | f.write(script_as_string) | |
313 | f.close() |
|
317 | f.close() | |
314 |
|
318 | |||
315 | def handle_error(self, f): |
|
319 | def handle_error(self, f): | |
316 | f.printTraceback() |
|
320 | f.printTraceback() | |
317 | f.raiseException() |
|
321 | f.raiseException() | |
318 |
|
322 | |||
319 | def start(self, n): |
|
323 | def start(self, n): | |
320 | self.write_batch_script(n) |
|
324 | self.write_batch_script(n) | |
321 | d = getProcessOutput(self.submit_command, |
|
325 | d = getProcessOutput(self.submit_command, | |
322 | [self.batch_file],env=os.environ) |
|
326 | [self.batch_file],env=os.environ) | |
323 | d.addCallback(self.parse_job_id) |
|
327 | d.addCallback(self.parse_job_id) | |
324 | d.addErrback(self.handle_error) |
|
328 | d.addErrback(self.handle_error) | |
325 | return d |
|
329 | return d | |
326 |
|
330 | |||
327 | def kill(self): |
|
331 | def kill(self): | |
328 | d = getProcessOutput(self.delete_command, |
|
332 | d = getProcessOutput(self.delete_command, | |
329 | [self.job_id],env=os.environ) |
|
333 | [self.job_id],env=os.environ) | |
330 | return d |
|
334 | return d | |
331 |
|
335 | |||
332 | class PBSEngineSet(BatchEngineSet): |
|
336 | class PBSEngineSet(BatchEngineSet): | |
333 |
|
337 | |||
334 | submit_command = 'qsub' |
|
338 | submit_command = 'qsub' | |
335 | delete_command = 'qdel' |
|
339 | delete_command = 'qdel' | |
336 | job_id_regexp = '\d+' |
|
340 | job_id_regexp = '\d+' | |
337 |
|
341 | |||
338 | def __init__(self, template_file, **kwargs): |
|
342 | def __init__(self, template_file, **kwargs): | |
339 | BatchEngineSet.__init__(self, template_file, **kwargs) |
|
343 | BatchEngineSet.__init__(self, template_file, **kwargs) | |
340 |
|
344 | |||
341 |
|
345 | |||
342 | sshx_template="""#!/bin/sh |
|
346 | sshx_template="""#!/bin/sh | |
343 | "$@" &> /dev/null & |
|
347 | "$@" &> /dev/null & | |
344 | echo $! |
|
348 | echo $! | |
345 | """ |
|
349 | """ | |
346 |
|
350 | |||
347 | engine_killer_template="""#!/bin/sh |
|
351 | engine_killer_template="""#!/bin/sh | |
348 | ps -fu `whoami` | grep '[i]pengine' | awk '{print $2}' | xargs kill -TERM |
|
352 | ps -fu `whoami` | grep '[i]pengine' | awk '{print $2}' | xargs kill -TERM | |
349 | """ |
|
353 | """ | |
350 |
|
354 | |||
351 | class SSHEngineSet(object): |
|
355 | class SSHEngineSet(object): | |
352 | sshx_template=sshx_template |
|
356 | sshx_template=sshx_template | |
353 | engine_killer_template=engine_killer_template |
|
357 | engine_killer_template=engine_killer_template | |
354 |
|
358 | |||
355 | def __init__(self, engine_hosts, sshx=None, ipengine="ipengine"): |
|
359 | def __init__(self, engine_hosts, sshx=None, ipengine="ipengine"): | |
356 | """Start a controller on localhost and engines using ssh. |
|
360 | """Start a controller on localhost and engines using ssh. | |
357 |
|
361 | |||
358 | The engine_hosts argument is a dict with hostnames as keys and |
|
362 | The engine_hosts argument is a dict with hostnames as keys and | |
359 | the number of engine (int) as values. sshx is the name of a local |
|
363 | the number of engine (int) as values. sshx is the name of a local | |
360 | file that will be used to run remote commands. This file is used |
|
364 | file that will be used to run remote commands. This file is used | |
361 | to setup the environment properly. |
|
365 | to setup the environment properly. | |
362 | """ |
|
366 | """ | |
363 |
|
367 | |||
364 | self.temp_dir = tempfile.gettempdir() |
|
368 | self.temp_dir = tempfile.gettempdir() | |
365 | if sshx is not None: |
|
369 | if sshx is not None: | |
366 | self.sshx = sshx |
|
370 | self.sshx = sshx | |
367 | else: |
|
371 | else: | |
368 | # Write the sshx.sh file locally from our template. |
|
372 | # Write the sshx.sh file locally from our template. | |
369 | self.sshx = os.path.join( |
|
373 | self.sshx = os.path.join( | |
370 | self.temp_dir, |
|
374 | self.temp_dir, | |
371 | '%s-main-sshx.sh' % os.environ['USER'] |
|
375 | '%s-main-sshx.sh' % os.environ['USER'] | |
372 | ) |
|
376 | ) | |
373 | f = open(self.sshx, 'w') |
|
377 | f = open(self.sshx, 'w') | |
374 | f.writelines(self.sshx_template) |
|
378 | f.writelines(self.sshx_template) | |
375 | f.close() |
|
379 | f.close() | |
376 | self.engine_command = ipengine |
|
380 | self.engine_command = ipengine | |
377 | self.engine_hosts = engine_hosts |
|
381 | self.engine_hosts = engine_hosts | |
378 | # Write the engine killer script file locally from our template. |
|
382 | # Write the engine killer script file locally from our template. | |
379 | self.engine_killer = os.path.join( |
|
383 | self.engine_killer = os.path.join( | |
380 | self.temp_dir, |
|
384 | self.temp_dir, | |
381 | '%s-local-engine_killer.sh' % os.environ['USER'] |
|
385 | '%s-local-engine_killer.sh' % os.environ['USER'] | |
382 | ) |
|
386 | ) | |
383 | f = open(self.engine_killer, 'w') |
|
387 | f = open(self.engine_killer, 'w') | |
384 | f.writelines(self.engine_killer_template) |
|
388 | f.writelines(self.engine_killer_template) | |
385 | f.close() |
|
389 | f.close() | |
386 |
|
390 | |||
387 | def start(self, send_furl=False): |
|
391 | def start(self, send_furl=False): | |
388 | dlist = [] |
|
392 | dlist = [] | |
389 | for host in self.engine_hosts.keys(): |
|
393 | for host in self.engine_hosts.keys(): | |
390 | count = self.engine_hosts[host] |
|
394 | count = self.engine_hosts[host] | |
391 | d = self._start(host, count, send_furl) |
|
395 | d = self._start(host, count, send_furl) | |
392 | dlist.append(d) |
|
396 | dlist.append(d) | |
393 | return gatherBoth(dlist, consumeErrors=True) |
|
397 | return gatherBoth(dlist, consumeErrors=True) | |
394 |
|
398 | |||
395 | def _start(self, hostname, count=1, send_furl=False): |
|
399 | def _start(self, hostname, count=1, send_furl=False): | |
396 | if send_furl: |
|
400 | if send_furl: | |
397 | d = self._scp_furl(hostname) |
|
401 | d = self._scp_furl(hostname) | |
398 | else: |
|
402 | else: | |
399 | d = defer.succeed(None) |
|
403 | d = defer.succeed(None) | |
400 | d.addCallback(lambda r: self._scp_sshx(hostname)) |
|
404 | d.addCallback(lambda r: self._scp_sshx(hostname)) | |
401 | d.addCallback(lambda r: self._ssh_engine(hostname, count)) |
|
405 | d.addCallback(lambda r: self._ssh_engine(hostname, count)) | |
402 | return d |
|
406 | return d | |
403 |
|
407 | |||
404 | def _scp_furl(self, hostname): |
|
408 | def _scp_furl(self, hostname): | |
405 | scp_cmd = "scp ~/.ipython/security/ipcontroller-engine.furl %s:.ipython/security/" % (hostname) |
|
409 | scp_cmd = "scp ~/.ipython/security/ipcontroller-engine.furl %s:.ipython/security/" % (hostname) | |
406 | cmd_list = scp_cmd.split() |
|
410 | cmd_list = scp_cmd.split() | |
407 | cmd_list[1] = os.path.expanduser(cmd_list[1]) |
|
411 | cmd_list[1] = os.path.expanduser(cmd_list[1]) | |
408 | log.msg('Copying furl file: %s' % scp_cmd) |
|
412 | log.msg('Copying furl file: %s' % scp_cmd) | |
409 | d = getProcessOutput(cmd_list[0], cmd_list[1:], env=os.environ) |
|
413 | d = getProcessOutput(cmd_list[0], cmd_list[1:], env=os.environ) | |
410 | return d |
|
414 | return d | |
411 |
|
415 | |||
412 | def _scp_sshx(self, hostname): |
|
416 | def _scp_sshx(self, hostname): | |
413 | scp_cmd = "scp %s %s:%s/%s-sshx.sh" % ( |
|
417 | scp_cmd = "scp %s %s:%s/%s-sshx.sh" % ( | |
414 | self.sshx, hostname, |
|
418 | self.sshx, hostname, | |
415 | self.temp_dir, os.environ['USER'] |
|
419 | self.temp_dir, os.environ['USER'] | |
416 | ) |
|
420 | ) | |
417 |
|
421 | |||
418 | log.msg("Copying sshx: %s" % scp_cmd) |
|
422 | log.msg("Copying sshx: %s" % scp_cmd) | |
419 | sshx_scp = scp_cmd.split() |
|
423 | sshx_scp = scp_cmd.split() | |
420 | d = getProcessOutput(sshx_scp[0], sshx_scp[1:], env=os.environ) |
|
424 | d = getProcessOutput(sshx_scp[0], sshx_scp[1:], env=os.environ) | |
421 | return d |
|
425 | return d | |
422 |
|
426 | |||
423 | def _ssh_engine(self, hostname, count): |
|
427 | def _ssh_engine(self, hostname, count): | |
424 | exec_engine = "ssh %s sh %s/%s-sshx.sh %s" % ( |
|
428 | exec_engine = "ssh %s sh %s/%s-sshx.sh %s" % ( | |
425 | hostname, self.temp_dir, |
|
429 | hostname, self.temp_dir, | |
426 | os.environ['USER'], self.engine_command |
|
430 | os.environ['USER'], self.engine_command | |
427 | ) |
|
431 | ) | |
428 | cmds = exec_engine.split() |
|
432 | cmds = exec_engine.split() | |
429 | dlist = [] |
|
433 | dlist = [] | |
430 | log.msg("about to start engines...") |
|
434 | log.msg("about to start engines...") | |
431 | for i in range(count): |
|
435 | for i in range(count): | |
432 | log.msg('Starting engines: %s' % exec_engine) |
|
436 | log.msg('Starting engines: %s' % exec_engine) | |
433 | d = getProcessOutput(cmds[0], cmds[1:], env=os.environ) |
|
437 | d = getProcessOutput(cmds[0], cmds[1:], env=os.environ) | |
434 | dlist.append(d) |
|
438 | dlist.append(d) | |
435 | return gatherBoth(dlist, consumeErrors=True) |
|
439 | return gatherBoth(dlist, consumeErrors=True) | |
436 |
|
440 | |||
437 | def kill(self): |
|
441 | def kill(self): | |
438 | dlist = [] |
|
442 | dlist = [] | |
439 | for host in self.engine_hosts.keys(): |
|
443 | for host in self.engine_hosts.keys(): | |
440 | d = self._killall(host) |
|
444 | d = self._killall(host) | |
441 | dlist.append(d) |
|
445 | dlist.append(d) | |
442 | return gatherBoth(dlist, consumeErrors=True) |
|
446 | return gatherBoth(dlist, consumeErrors=True) | |
443 |
|
447 | |||
444 | def _killall(self, hostname): |
|
448 | def _killall(self, hostname): | |
445 | d = self._scp_engine_killer(hostname) |
|
449 | d = self._scp_engine_killer(hostname) | |
446 | d.addCallback(lambda r: self._ssh_kill(hostname)) |
|
450 | d.addCallback(lambda r: self._ssh_kill(hostname)) | |
447 | # d.addErrback(self._exec_err) |
|
451 | # d.addErrback(self._exec_err) | |
448 | return d |
|
452 | return d | |
449 |
|
453 | |||
450 | def _scp_engine_killer(self, hostname): |
|
454 | def _scp_engine_killer(self, hostname): | |
451 | scp_cmd = "scp %s %s:%s/%s-engine_killer.sh" % ( |
|
455 | scp_cmd = "scp %s %s:%s/%s-engine_killer.sh" % ( | |
452 | self.engine_killer, |
|
456 | self.engine_killer, | |
453 | hostname, |
|
457 | hostname, | |
454 | self.temp_dir, |
|
458 | self.temp_dir, | |
455 | os.environ['USER'] |
|
459 | os.environ['USER'] | |
456 | ) |
|
460 | ) | |
457 | cmds = scp_cmd.split() |
|
461 | cmds = scp_cmd.split() | |
458 | log.msg('Copying engine_killer: %s' % scp_cmd) |
|
462 | log.msg('Copying engine_killer: %s' % scp_cmd) | |
459 | d = getProcessOutput(cmds[0], cmds[1:], env=os.environ) |
|
463 | d = getProcessOutput(cmds[0], cmds[1:], env=os.environ) | |
460 | return d |
|
464 | return d | |
461 |
|
465 | |||
462 | def _ssh_kill(self, hostname): |
|
466 | def _ssh_kill(self, hostname): | |
463 | kill_cmd = "ssh %s sh %s/%s-engine_killer.sh" % ( |
|
467 | kill_cmd = "ssh %s sh %s/%s-engine_killer.sh" % ( | |
464 | hostname, |
|
468 | hostname, | |
465 | self.temp_dir, |
|
469 | self.temp_dir, | |
466 | os.environ['USER'] |
|
470 | os.environ['USER'] | |
467 | ) |
|
471 | ) | |
468 | log.msg('Killing engine: %s' % kill_cmd) |
|
472 | log.msg('Killing engine: %s' % kill_cmd) | |
469 | kill_cmd = kill_cmd.split() |
|
473 | kill_cmd = kill_cmd.split() | |
470 | d = getProcessOutput(kill_cmd[0], kill_cmd[1:], env=os.environ) |
|
474 | d = getProcessOutput(kill_cmd[0], kill_cmd[1:], env=os.environ) | |
471 | return d |
|
475 | return d | |
472 |
|
476 | |||
473 | def _exec_err(self, r): |
|
477 | def _exec_err(self, r): | |
474 | log.msg(r) |
|
478 | log.msg(r) | |
475 |
|
479 | |||
476 | #----------------------------------------------------------------------------- |
|
480 | #----------------------------------------------------------------------------- | |
477 | # Main functions for the different types of clusters |
|
481 | # Main functions for the different types of clusters | |
478 | #----------------------------------------------------------------------------- |
|
482 | #----------------------------------------------------------------------------- | |
479 |
|
483 | |||
480 | # TODO: |
|
484 | # TODO: | |
481 | # The logic in these codes should be moved into classes like LocalCluster |
|
485 | # The logic in these codes should be moved into classes like LocalCluster | |
482 | # MpirunCluster, PBSCluster, etc. This would remove alot of the duplications. |
|
486 | # MpirunCluster, PBSCluster, etc. This would remove alot of the duplications. | |
483 | # The main functions should then just parse the command line arguments, create |
|
487 | # The main functions should then just parse the command line arguments, create | |
484 | # the appropriate class and call a 'start' method. |
|
488 | # the appropriate class and call a 'start' method. | |
485 |
|
489 | |||
486 |
|
490 | |||
487 | def check_security(args, cont_args): |
|
491 | def check_security(args, cont_args): | |
|
492 | """Check to see if we should run with SSL support.""" | |||
488 | if (not args.x or not args.y) and not have_crypto: |
|
493 | if (not args.x or not args.y) and not have_crypto: | |
489 | log.err(""" |
|
494 | log.err(""" | |
490 | OpenSSL/pyOpenSSL is not available, so we can't run in secure mode. |
|
495 | OpenSSL/pyOpenSSL is not available, so we can't run in secure mode. | |
491 | Try running ipcluster with the -xy flags: ipcluster local -xy -n 4""") |
|
496 | Try running ipcluster with the -xy flags: ipcluster local -xy -n 4""") | |
492 | reactor.stop() |
|
497 | reactor.stop() | |
493 | return False |
|
498 | return False | |
494 | if args.x: |
|
499 | if args.x: | |
495 | cont_args.append('-x') |
|
500 | cont_args.append('-x') | |
496 | if args.y: |
|
501 | if args.y: | |
497 | cont_args.append('-y') |
|
502 | cont_args.append('-y') | |
498 | return True |
|
503 | return True | |
499 |
|
504 | |||
500 |
|
505 | |||
501 | def check_reuse(args, cont_args): |
|
506 | def check_reuse(args, cont_args): | |
|
507 | """Check to see if we should try to resuse FURL files.""" | |||
502 | if args.r: |
|
508 | if args.r: | |
503 | cont_args.append('-r') |
|
509 | cont_args.append('-r') | |
504 | if args.client_port == 0 or args.engine_port == 0: |
|
510 | if args.client_port == 0 or args.engine_port == 0: | |
505 | log.err(""" |
|
511 | log.err(""" | |
506 | To reuse FURL files, you must also set the client and engine ports using |
|
512 | To reuse FURL files, you must also set the client and engine ports using | |
507 | the --client-port and --engine-port options.""") |
|
513 | the --client-port and --engine-port options.""") | |
508 | reactor.stop() |
|
514 | reactor.stop() | |
509 | return False |
|
515 | return False | |
510 | cont_args.append('--client-port=%i' % args.client_port) |
|
516 | cont_args.append('--client-port=%i' % args.client_port) | |
511 | cont_args.append('--engine-port=%i' % args.engine_port) |
|
517 | cont_args.append('--engine-port=%i' % args.engine_port) | |
512 | return True |
|
518 | return True | |
513 |
|
519 | |||
514 |
|
520 | |||
515 | def _err_and_stop(f): |
|
521 | def _err_and_stop(f): | |
|
522 | """Errback to log a failure and halt the reactor on a fatal error.""" | |||
516 | log.err(f) |
|
523 | log.err(f) | |
517 | reactor.stop() |
|
524 | reactor.stop() | |
518 |
|
525 | |||
519 |
|
526 | |||
520 | def _delay_start(cont_pid, start_engines, furl_file, reuse): |
|
527 | def _delay_start(cont_pid, start_engines, furl_file, reuse): | |
|
528 | """Wait for controller to create FURL files and the start the engines.""" | |||
521 | if not reuse: |
|
529 | if not reuse: | |
522 | if os.path.isfile(furl_file): |
|
530 | if os.path.isfile(furl_file): | |
523 | os.unlink(furl_file) |
|
531 | os.unlink(furl_file) | |
524 | log.msg('Waiting for controller to finish starting...') |
|
532 | log.msg('Waiting for controller to finish starting...') | |
525 | d = wait_for_file(furl_file, delay=0.2, max_tries=50) |
|
533 | d = wait_for_file(furl_file, delay=0.2, max_tries=50) | |
526 | d.addCallback(lambda _: log.msg('Controller started')) |
|
534 | d.addCallback(lambda _: log.msg('Controller started')) | |
527 | d.addCallback(lambda _: start_engines(cont_pid)) |
|
535 | d.addCallback(lambda _: start_engines(cont_pid)) | |
528 | return d |
|
536 | return d | |
529 |
|
537 | |||
530 |
|
538 | |||
531 | def main_local(args): |
|
539 | def main_local(args): | |
532 | cont_args = [] |
|
540 | cont_args = [] | |
533 | cont_args.append('--logfile=%s' % pjoin(args.logdir,'ipcontroller')) |
|
541 | cont_args.append('--logfile=%s' % pjoin(args.logdir,'ipcontroller')) | |
534 |
|
542 | |||
535 | # Check security settings before proceeding |
|
543 | # Check security settings before proceeding | |
536 | if not check_security(args, cont_args): |
|
544 | if not check_security(args, cont_args): | |
537 | return |
|
545 | return | |
538 |
|
546 | |||
539 | # See if we are reusing FURL files |
|
547 | # See if we are reusing FURL files | |
540 | if not check_reuse(args, cont_args): |
|
548 | if not check_reuse(args, cont_args): | |
541 | return |
|
549 | return | |
542 |
|
550 | |||
543 | cl = ControllerLauncher(extra_args=cont_args) |
|
551 | cl = ControllerLauncher(extra_args=cont_args) | |
544 | dstart = cl.start() |
|
552 | dstart = cl.start() | |
545 | def start_engines(cont_pid): |
|
553 | def start_engines(cont_pid): | |
546 | engine_args = [] |
|
554 | engine_args = [] | |
547 | engine_args.append('--logfile=%s' % \ |
|
555 | engine_args.append('--logfile=%s' % \ | |
548 | pjoin(args.logdir,'ipengine%s-' % cont_pid)) |
|
556 | pjoin(args.logdir,'ipengine%s-' % cont_pid)) | |
549 | eset = LocalEngineSet(extra_args=engine_args) |
|
557 | eset = LocalEngineSet(extra_args=engine_args) | |
550 | def shutdown(signum, frame): |
|
558 | def shutdown(signum, frame): | |
551 | log.msg('Stopping local cluster') |
|
559 | log.msg('Stopping local cluster') | |
552 | # We are still playing with the times here, but these seem |
|
560 | # We are still playing with the times here, but these seem | |
553 | # to be reliable in allowing everything to exit cleanly. |
|
561 | # to be reliable in allowing everything to exit cleanly. | |
554 | eset.interrupt_then_kill(0.5) |
|
562 | eset.interrupt_then_kill(0.5) | |
555 | cl.interrupt_then_kill(0.5) |
|
563 | cl.interrupt_then_kill(0.5) | |
556 | reactor.callLater(1.0, reactor.stop) |
|
564 | reactor.callLater(1.0, reactor.stop) | |
557 | signal.signal(signal.SIGINT,shutdown) |
|
565 | signal.signal(signal.SIGINT,shutdown) | |
558 | d = eset.start(args.n) |
|
566 | d = eset.start(args.n) | |
559 | return d |
|
567 | return d | |
560 | config = kernel_config_manager.get_config_obj() |
|
568 | config = kernel_config_manager.get_config_obj() | |
561 | furl_file = config['controller']['engine_furl_file'] |
|
569 | furl_file = config['controller']['engine_furl_file'] | |
562 | dstart.addCallback(_delay_start, start_engines, furl_file, args.r) |
|
570 | dstart.addCallback(_delay_start, start_engines, furl_file, args.r) | |
563 | dstart.addErrback(_err_and_stop) |
|
571 | dstart.addErrback(_err_and_stop) | |
564 |
|
572 | |||
565 |
|
573 | |||
566 | def main_mpi(args): |
|
574 | def main_mpi(args): | |
567 | cont_args = [] |
|
575 | cont_args = [] | |
568 | cont_args.append('--logfile=%s' % pjoin(args.logdir,'ipcontroller')) |
|
576 | cont_args.append('--logfile=%s' % pjoin(args.logdir,'ipcontroller')) | |
569 |
|
577 | |||
570 | # Check security settings before proceeding |
|
578 | # Check security settings before proceeding | |
571 | if not check_security(args, cont_args): |
|
579 | if not check_security(args, cont_args): | |
572 | return |
|
580 | return | |
573 |
|
581 | |||
574 | # See if we are reusing FURL files |
|
582 | # See if we are reusing FURL files | |
575 | if not check_reuse(args, cont_args): |
|
583 | if not check_reuse(args, cont_args): | |
576 | return |
|
584 | return | |
577 |
|
585 | |||
578 | cl = ControllerLauncher(extra_args=cont_args) |
|
586 | cl = ControllerLauncher(extra_args=cont_args) | |
579 | dstart = cl.start() |
|
587 | dstart = cl.start() | |
580 | def start_engines(cont_pid): |
|
588 | def start_engines(cont_pid): | |
581 | raw_args = [args.cmd] |
|
589 | raw_args = [args.cmd] | |
582 | raw_args.extend(['-n',str(args.n)]) |
|
590 | raw_args.extend(['-n',str(args.n)]) | |
583 | raw_args.append('ipengine') |
|
591 | raw_args.append('ipengine') | |
584 | raw_args.append('-l') |
|
592 | raw_args.append('-l') | |
585 | raw_args.append(pjoin(args.logdir,'ipengine%s-' % cont_pid)) |
|
593 | raw_args.append(pjoin(args.logdir,'ipengine%s-' % cont_pid)) | |
586 | if args.mpi: |
|
594 | if args.mpi: | |
587 | raw_args.append('--mpi=%s' % args.mpi) |
|
595 | raw_args.append('--mpi=%s' % args.mpi) | |
588 | eset = ProcessLauncher(raw_args) |
|
596 | eset = ProcessLauncher(raw_args) | |
589 | def shutdown(signum, frame): |
|
597 | def shutdown(signum, frame): | |
590 | log.msg('Stopping local cluster') |
|
598 | log.msg('Stopping local cluster') | |
591 | # We are still playing with the times here, but these seem |
|
599 | # We are still playing with the times here, but these seem | |
592 | # to be reliable in allowing everything to exit cleanly. |
|
600 | # to be reliable in allowing everything to exit cleanly. | |
593 | eset.interrupt_then_kill(1.0) |
|
601 | eset.interrupt_then_kill(1.0) | |
594 | cl.interrupt_then_kill(1.0) |
|
602 | cl.interrupt_then_kill(1.0) | |
595 | reactor.callLater(2.0, reactor.stop) |
|
603 | reactor.callLater(2.0, reactor.stop) | |
596 | signal.signal(signal.SIGINT,shutdown) |
|
604 | signal.signal(signal.SIGINT,shutdown) | |
597 | d = eset.start() |
|
605 | d = eset.start() | |
598 | return d |
|
606 | return d | |
599 | config = kernel_config_manager.get_config_obj() |
|
607 | config = kernel_config_manager.get_config_obj() | |
600 | furl_file = config['controller']['engine_furl_file'] |
|
608 | furl_file = config['controller']['engine_furl_file'] | |
601 | dstart.addCallback(_delay_start, start_engines, furl_file, args.r) |
|
609 | dstart.addCallback(_delay_start, start_engines, furl_file, args.r) | |
602 | dstart.addErrback(_err_and_stop) |
|
610 | dstart.addErrback(_err_and_stop) | |
603 |
|
611 | |||
604 |
|
612 | |||
605 | def main_pbs(args): |
|
613 | def main_pbs(args): | |
606 | cont_args = [] |
|
614 | cont_args = [] | |
607 | cont_args.append('--logfile=%s' % pjoin(args.logdir,'ipcontroller')) |
|
615 | cont_args.append('--logfile=%s' % pjoin(args.logdir,'ipcontroller')) | |
608 |
|
616 | |||
609 | # Check security settings before proceeding |
|
617 | # Check security settings before proceeding | |
610 | if not check_security(args, cont_args): |
|
618 | if not check_security(args, cont_args): | |
611 | return |
|
619 | return | |
612 |
|
620 | |||
613 | # See if we are reusing FURL files |
|
621 | # See if we are reusing FURL files | |
614 | if not check_reuse(args, cont_args): |
|
622 | if not check_reuse(args, cont_args): | |
615 | return |
|
623 | return | |
616 |
|
624 | |||
617 | cl = ControllerLauncher(extra_args=cont_args) |
|
625 | cl = ControllerLauncher(extra_args=cont_args) | |
618 | dstart = cl.start() |
|
626 | dstart = cl.start() | |
619 | def start_engines(r): |
|
627 | def start_engines(r): | |
620 | pbs_set = PBSEngineSet(args.pbsscript) |
|
628 | pbs_set = PBSEngineSet(args.pbsscript) | |
621 | def shutdown(signum, frame): |
|
629 | def shutdown(signum, frame): | |
622 | log.msg('Stopping pbs cluster') |
|
630 | log.msg('Stopping pbs cluster') | |
623 | d = pbs_set.kill() |
|
631 | d = pbs_set.kill() | |
624 | d.addBoth(lambda _: cl.interrupt_then_kill(1.0)) |
|
632 | d.addBoth(lambda _: cl.interrupt_then_kill(1.0)) | |
625 | d.addBoth(lambda _: reactor.callLater(2.0, reactor.stop)) |
|
633 | d.addBoth(lambda _: reactor.callLater(2.0, reactor.stop)) | |
626 | signal.signal(signal.SIGINT,shutdown) |
|
634 | signal.signal(signal.SIGINT,shutdown) | |
627 | d = pbs_set.start(args.n) |
|
635 | d = pbs_set.start(args.n) | |
628 | return d |
|
636 | return d | |
629 | config = kernel_config_manager.get_config_obj() |
|
637 | config = kernel_config_manager.get_config_obj() | |
630 | furl_file = config['controller']['engine_furl_file'] |
|
638 | furl_file = config['controller']['engine_furl_file'] | |
631 | dstart.addCallback(_delay_start, start_engines, furl_file, args.r) |
|
639 | dstart.addCallback(_delay_start, start_engines, furl_file, args.r) | |
632 | dstart.addErrback(_err_and_stop) |
|
640 | dstart.addErrback(_err_and_stop) | |
633 |
|
641 | |||
634 |
|
642 | |||
635 | def main_ssh(args): |
|
643 | def main_ssh(args): | |
636 | """Start a controller on localhost and engines using ssh. |
|
644 | """Start a controller on localhost and engines using ssh. | |
637 |
|
645 | |||
638 | Your clusterfile should look like:: |
|
646 | Your clusterfile should look like:: | |
639 |
|
647 | |||
640 | send_furl = False # True, if you want |
|
648 | send_furl = False # True, if you want | |
641 | engines = { |
|
649 | engines = { | |
642 | 'engine_host1' : engine_count, |
|
650 | 'engine_host1' : engine_count, | |
643 | 'engine_host2' : engine_count2 |
|
651 | 'engine_host2' : engine_count2 | |
644 | } |
|
652 | } | |
645 | """ |
|
653 | """ | |
646 | clusterfile = {} |
|
654 | clusterfile = {} | |
647 | execfile(args.clusterfile, clusterfile) |
|
655 | execfile(args.clusterfile, clusterfile) | |
648 | if not clusterfile.has_key('send_furl'): |
|
656 | if not clusterfile.has_key('send_furl'): | |
649 | clusterfile['send_furl'] = False |
|
657 | clusterfile['send_furl'] = False | |
650 |
|
658 | |||
651 | cont_args = [] |
|
659 | cont_args = [] | |
652 | cont_args.append('--logfile=%s' % pjoin(args.logdir,'ipcontroller')) |
|
660 | cont_args.append('--logfile=%s' % pjoin(args.logdir,'ipcontroller')) | |
653 |
|
661 | |||
654 | # Check security settings before proceeding |
|
662 | # Check security settings before proceeding | |
655 | if not check_security(args, cont_args): |
|
663 | if not check_security(args, cont_args): | |
656 | return |
|
664 | return | |
657 |
|
665 | |||
658 | # See if we are reusing FURL files |
|
666 | # See if we are reusing FURL files | |
659 | if not check_reuse(args, cont_args): |
|
667 | if not check_reuse(args, cont_args): | |
660 | return |
|
668 | return | |
661 |
|
669 | |||
662 | cl = ControllerLauncher(extra_args=cont_args) |
|
670 | cl = ControllerLauncher(extra_args=cont_args) | |
663 | dstart = cl.start() |
|
671 | dstart = cl.start() | |
664 | def start_engines(cont_pid): |
|
672 | def start_engines(cont_pid): | |
665 | ssh_set = SSHEngineSet(clusterfile['engines'], sshx=args.sshx) |
|
673 | ssh_set = SSHEngineSet(clusterfile['engines'], sshx=args.sshx) | |
666 | def shutdown(signum, frame): |
|
674 | def shutdown(signum, frame): | |
667 | d = ssh_set.kill() |
|
675 | d = ssh_set.kill() | |
668 | cl.interrupt_then_kill(1.0) |
|
676 | cl.interrupt_then_kill(1.0) | |
669 | reactor.callLater(2.0, reactor.stop) |
|
677 | reactor.callLater(2.0, reactor.stop) | |
670 | signal.signal(signal.SIGINT,shutdown) |
|
678 | signal.signal(signal.SIGINT,shutdown) | |
671 | d = ssh_set.start(clusterfile['send_furl']) |
|
679 | d = ssh_set.start(clusterfile['send_furl']) | |
672 | return d |
|
680 | return d | |
673 | config = kernel_config_manager.get_config_obj() |
|
681 | config = kernel_config_manager.get_config_obj() | |
674 | furl_file = config['controller']['engine_furl_file'] |
|
682 | furl_file = config['controller']['engine_furl_file'] | |
675 | dstart.addCallback(_delay_start, start_engines, furl_file, args.r) |
|
683 | dstart.addCallback(_delay_start, start_engines, furl_file, args.r) | |
676 | dstart.addErrback(_err_and_stop) |
|
684 | dstart.addErrback(_err_and_stop) | |
677 |
|
685 | |||
678 |
|
686 | |||
679 | def get_args(): |
|
687 | def get_args(): | |
680 | base_parser = argparse.ArgumentParser(add_help=False) |
|
688 | base_parser = argparse.ArgumentParser(add_help=False) | |
681 | base_parser.add_argument( |
|
689 | base_parser.add_argument( | |
682 | '-r', |
|
690 | '-r', | |
683 | action='store_true', |
|
691 | action='store_true', | |
684 | dest='r', |
|
692 | dest='r', | |
685 | help='try to reuse FURL files. Use with --client-port and --engine-port' |
|
693 | help='try to reuse FURL files. Use with --client-port and --engine-port' | |
686 | ) |
|
694 | ) | |
687 | base_parser.add_argument( |
|
695 | base_parser.add_argument( | |
688 | '--client-port', |
|
696 | '--client-port', | |
689 | type=int, |
|
697 | type=int, | |
690 | dest='client_port', |
|
698 | dest='client_port', | |
691 | help='the port the controller will listen on for client connections', |
|
699 | help='the port the controller will listen on for client connections', | |
692 | default=0 |
|
700 | default=0 | |
693 | ) |
|
701 | ) | |
694 | base_parser.add_argument( |
|
702 | base_parser.add_argument( | |
695 | '--engine-port', |
|
703 | '--engine-port', | |
696 | type=int, |
|
704 | type=int, | |
697 | dest='engine_port', |
|
705 | dest='engine_port', | |
698 | help='the port the controller will listen on for engine connections', |
|
706 | help='the port the controller will listen on for engine connections', | |
699 | default=0 |
|
707 | default=0 | |
700 | ) |
|
708 | ) | |
701 | base_parser.add_argument( |
|
709 | base_parser.add_argument( | |
702 | '-x', |
|
710 | '-x', | |
703 | action='store_true', |
|
711 | action='store_true', | |
704 | dest='x', |
|
712 | dest='x', | |
705 | help='turn off client security' |
|
713 | help='turn off client security' | |
706 | ) |
|
714 | ) | |
707 | base_parser.add_argument( |
|
715 | base_parser.add_argument( | |
708 | '-y', |
|
716 | '-y', | |
709 | action='store_true', |
|
717 | action='store_true', | |
710 | dest='y', |
|
718 | dest='y', | |
711 | help='turn off engine security' |
|
719 | help='turn off engine security' | |
712 | ) |
|
720 | ) | |
713 | base_parser.add_argument( |
|
721 | base_parser.add_argument( | |
714 | "--logdir", |
|
722 | "--logdir", | |
715 | type=str, |
|
723 | type=str, | |
716 | dest="logdir", |
|
724 | dest="logdir", | |
717 | help="directory to put log files (default=$IPYTHONDIR/log)", |
|
725 | help="directory to put log files (default=$IPYTHONDIR/log)", | |
718 | default=pjoin(get_ipython_dir(),'log') |
|
726 | default=pjoin(get_ipython_dir(),'log') | |
719 | ) |
|
727 | ) | |
720 | base_parser.add_argument( |
|
728 | base_parser.add_argument( | |
721 | "-n", |
|
729 | "-n", | |
722 | "--num", |
|
730 | "--num", | |
723 | type=int, |
|
731 | type=int, | |
724 | dest="n", |
|
732 | dest="n", | |
725 | default=2, |
|
733 | default=2, | |
726 | help="the number of engines to start" |
|
734 | help="the number of engines to start" | |
727 | ) |
|
735 | ) | |
728 |
|
736 | |||
729 | parser = argparse.ArgumentParser( |
|
737 | parser = argparse.ArgumentParser( | |
730 | description='IPython cluster startup. This starts a controller and\ |
|
738 | description='IPython cluster startup. This starts a controller and\ | |
731 |
engines using various approaches. |
|
739 | engines using various approaches. Use the IPYTHONDIR environment\ | |
732 | THE API WILL CHANGE SIGNIFICANTLY BEFORE THE FINAL RELEASE.' |
|
740 | variable to change your IPython directory from the default of\ | |
|
741 | .ipython or _ipython. The log and security subdirectories of your\ | |||
|
742 | IPython directory will be used by this script for log files and\ | |||
|
743 | security files.' | |||
733 | ) |
|
744 | ) | |
734 | subparsers = parser.add_subparsers( |
|
745 | subparsers = parser.add_subparsers( | |
735 | help='available cluster types. For help, do "ipcluster TYPE --help"') |
|
746 | help='available cluster types. For help, do "ipcluster TYPE --help"') | |
736 |
|
747 | |||
737 | parser_local = subparsers.add_parser( |
|
748 | parser_local = subparsers.add_parser( | |
738 | 'local', |
|
749 | 'local', | |
739 | help='run a local cluster', |
|
750 | help='run a local cluster', | |
740 | parents=[base_parser] |
|
751 | parents=[base_parser] | |
741 | ) |
|
752 | ) | |
742 | parser_local.set_defaults(func=main_local) |
|
753 | parser_local.set_defaults(func=main_local) | |
743 |
|
754 | |||
744 | parser_mpirun = subparsers.add_parser( |
|
755 | parser_mpirun = subparsers.add_parser( | |
745 | 'mpirun', |
|
756 | 'mpirun', | |
746 | help='run a cluster using mpirun (mpiexec also works)', |
|
757 | help='run a cluster using mpirun (mpiexec also works)', | |
747 | parents=[base_parser] |
|
758 | parents=[base_parser] | |
748 | ) |
|
759 | ) | |
749 | parser_mpirun.add_argument( |
|
760 | parser_mpirun.add_argument( | |
750 | "--mpi", |
|
761 | "--mpi", | |
751 | type=str, |
|
762 | type=str, | |
752 | dest="mpi", # Don't put a default here to allow no MPI support |
|
763 | dest="mpi", # Don't put a default here to allow no MPI support | |
753 | help="how to call MPI_Init (default=mpi4py)" |
|
764 | help="how to call MPI_Init (default=mpi4py)" | |
754 | ) |
|
765 | ) | |
755 | parser_mpirun.set_defaults(func=main_mpi, cmd='mpirun') |
|
766 | parser_mpirun.set_defaults(func=main_mpi, cmd='mpirun') | |
756 |
|
767 | |||
757 | parser_mpiexec = subparsers.add_parser( |
|
768 | parser_mpiexec = subparsers.add_parser( | |
758 | 'mpiexec', |
|
769 | 'mpiexec', | |
759 | help='run a cluster using mpiexec (mpirun also works)', |
|
770 | help='run a cluster using mpiexec (mpirun also works)', | |
760 | parents=[base_parser] |
|
771 | parents=[base_parser] | |
761 | ) |
|
772 | ) | |
762 | parser_mpiexec.add_argument( |
|
773 | parser_mpiexec.add_argument( | |
763 | "--mpi", |
|
774 | "--mpi", | |
764 | type=str, |
|
775 | type=str, | |
765 | dest="mpi", # Don't put a default here to allow no MPI support |
|
776 | dest="mpi", # Don't put a default here to allow no MPI support | |
766 | help="how to call MPI_Init (default=mpi4py)" |
|
777 | help="how to call MPI_Init (default=mpi4py)" | |
767 | ) |
|
778 | ) | |
768 | parser_mpiexec.set_defaults(func=main_mpi, cmd='mpiexec') |
|
779 | parser_mpiexec.set_defaults(func=main_mpi, cmd='mpiexec') | |
769 |
|
780 | |||
770 | parser_pbs = subparsers.add_parser( |
|
781 | parser_pbs = subparsers.add_parser( | |
771 | 'pbs', |
|
782 | 'pbs', | |
772 | help='run a pbs cluster', |
|
783 | help='run a pbs cluster', | |
773 | parents=[base_parser] |
|
784 | parents=[base_parser] | |
774 | ) |
|
785 | ) | |
775 | parser_pbs.add_argument( |
|
786 | parser_pbs.add_argument( | |
776 | '--pbs-script', |
|
787 | '--pbs-script', | |
777 | type=str, |
|
788 | type=str, | |
778 | dest='pbsscript', |
|
789 | dest='pbsscript', | |
779 | help='PBS script template', |
|
790 | help='PBS script template', | |
780 | default='pbs.template' |
|
791 | default='pbs.template' | |
781 | ) |
|
792 | ) | |
782 | parser_pbs.set_defaults(func=main_pbs) |
|
793 | parser_pbs.set_defaults(func=main_pbs) | |
783 |
|
794 | |||
784 | parser_ssh = subparsers.add_parser( |
|
795 | parser_ssh = subparsers.add_parser( | |
785 | 'ssh', |
|
796 | 'ssh', | |
786 | help='run a cluster using ssh, should have ssh-keys setup', |
|
797 | help='run a cluster using ssh, should have ssh-keys setup', | |
787 | parents=[base_parser] |
|
798 | parents=[base_parser] | |
788 | ) |
|
799 | ) | |
789 | parser_ssh.add_argument( |
|
800 | parser_ssh.add_argument( | |
790 | '--clusterfile', |
|
801 | '--clusterfile', | |
791 | type=str, |
|
802 | type=str, | |
792 | dest='clusterfile', |
|
803 | dest='clusterfile', | |
793 | help='python file describing the cluster', |
|
804 | help='python file describing the cluster', | |
794 | default='clusterfile.py', |
|
805 | default='clusterfile.py', | |
795 | ) |
|
806 | ) | |
796 | parser_ssh.add_argument( |
|
807 | parser_ssh.add_argument( | |
797 | '--sshx', |
|
808 | '--sshx', | |
798 | type=str, |
|
809 | type=str, | |
799 | dest='sshx', |
|
810 | dest='sshx', | |
800 | help='sshx launcher helper' |
|
811 | help='sshx launcher helper' | |
801 | ) |
|
812 | ) | |
802 | parser_ssh.set_defaults(func=main_ssh) |
|
813 | parser_ssh.set_defaults(func=main_ssh) | |
803 |
|
814 | |||
804 | args = parser.parse_args() |
|
815 | args = parser.parse_args() | |
805 | return args |
|
816 | return args | |
806 |
|
817 | |||
807 | def main(): |
|
818 | def main(): | |
808 | args = get_args() |
|
819 | args = get_args() | |
809 | reactor.callWhenRunning(args.func, args) |
|
820 | reactor.callWhenRunning(args.func, args) | |
810 | log.startLogging(sys.stdout) |
|
821 | log.startLogging(sys.stdout) | |
811 | reactor.run() |
|
822 | reactor.run() | |
812 |
|
823 | |||
813 | if __name__ == '__main__': |
|
824 | if __name__ == '__main__': | |
814 | main() |
|
825 | main() |
@@ -1,408 +1,416 b'' | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
3 |
|
3 | |||
4 | """The IPython controller.""" |
|
4 | """The IPython controller.""" | |
5 |
|
5 | |||
6 | __docformat__ = "restructuredtext en" |
|
6 | __docformat__ = "restructuredtext en" | |
7 |
|
7 | |||
8 | #------------------------------------------------------------------------------- |
|
8 | #------------------------------------------------------------------------------- | |
9 | # Copyright (C) 2008 The IPython Development Team |
|
9 | # Copyright (C) 2008 The IPython Development Team | |
10 | # |
|
10 | # | |
11 | # Distributed under the terms of the BSD License. The full license is in |
|
11 | # Distributed under the terms of the BSD License. The full license is in | |
12 | # the file COPYING, distributed as part of this software. |
|
12 | # the file COPYING, distributed as part of this software. | |
13 | #------------------------------------------------------------------------------- |
|
13 | #------------------------------------------------------------------------------- | |
14 |
|
14 | |||
15 | #------------------------------------------------------------------------------- |
|
15 | #------------------------------------------------------------------------------- | |
16 | # Imports |
|
16 | # Imports | |
17 | #------------------------------------------------------------------------------- |
|
17 | #------------------------------------------------------------------------------- | |
18 |
|
18 | |||
19 | # Python looks for an empty string at the beginning of sys.path to enable |
|
19 | # Python looks for an empty string at the beginning of sys.path to enable | |
20 | # importing from the cwd. |
|
20 | # importing from the cwd. | |
21 | import sys |
|
21 | import sys | |
22 | sys.path.insert(0, '') |
|
22 | sys.path.insert(0, '') | |
23 |
|
23 | |||
24 | import sys, time, os |
|
|||
25 | import tempfile |
|
|||
26 | from optparse import OptionParser |
|
24 | from optparse import OptionParser | |
|
25 | import os | |||
|
26 | import time | |||
|
27 | import tempfile | |||
27 |
|
28 | |||
28 | from twisted.application import internet, service |
|
29 | from twisted.application import internet, service | |
29 | from twisted.internet import reactor, error, defer |
|
30 | from twisted.internet import reactor, error, defer | |
30 | from twisted.python import log |
|
31 | from twisted.python import log | |
31 |
|
32 | |||
32 | from IPython.kernel.fcutil import Tub, UnauthenticatedTub, have_crypto |
|
33 | from IPython.kernel.fcutil import Tub, UnauthenticatedTub, have_crypto | |
33 |
|
34 | |||
34 | # from IPython.tools import growl |
|
35 | # from IPython.tools import growl | |
35 | # growl.start("IPython1 Controller") |
|
36 | # growl.start("IPython1 Controller") | |
36 |
|
37 | |||
37 | from IPython.kernel.error import SecurityError |
|
38 | from IPython.kernel.error import SecurityError | |
38 | from IPython.kernel import controllerservice |
|
39 | from IPython.kernel import controllerservice | |
39 | from IPython.kernel.fcutil import check_furl_file_security |
|
40 | from IPython.kernel.fcutil import check_furl_file_security | |
40 |
|
41 | |||
41 | # Create various ipython directories if they don't exist. |
|
42 | # Create various ipython directories if they don't exist. | |
42 | # This must be done before IPython.kernel.config is imported. |
|
43 | # This must be done before IPython.kernel.config is imported. | |
43 | from IPython.iplib import user_setup |
|
44 | from IPython.iplib import user_setup | |
44 | from IPython.genutils import get_ipython_dir, get_log_dir, get_security_dir |
|
45 | from IPython.genutils import get_ipython_dir, get_log_dir, get_security_dir | |
45 | if os.name == 'posix': |
|
46 | if os.name == 'posix': | |
46 | rc_suffix = '' |
|
47 | rc_suffix = '' | |
47 | else: |
|
48 | else: | |
48 | rc_suffix = '.ini' |
|
49 | rc_suffix = '.ini' | |
49 | user_setup(get_ipython_dir(), rc_suffix, mode='install', interactive=False) |
|
50 | user_setup(get_ipython_dir(), rc_suffix, mode='install', interactive=False) | |
50 | get_log_dir() |
|
51 | get_log_dir() | |
51 | get_security_dir() |
|
52 | get_security_dir() | |
52 |
|
53 | |||
53 | from IPython.kernel.config import config_manager as kernel_config_manager |
|
54 | from IPython.kernel.config import config_manager as kernel_config_manager | |
54 | from IPython.config.cutils import import_item |
|
55 | from IPython.config.cutils import import_item | |
55 |
|
56 | |||
56 |
|
57 | |||
57 | #------------------------------------------------------------------------------- |
|
58 | #------------------------------------------------------------------------------- | |
58 | # Code |
|
59 | # Code | |
59 | #------------------------------------------------------------------------------- |
|
60 | #------------------------------------------------------------------------------- | |
60 |
|
61 | |||
61 | def get_temp_furlfile(filename): |
|
62 | def get_temp_furlfile(filename): | |
62 | return tempfile.mktemp(dir=os.path.dirname(filename), |
|
63 | return tempfile.mktemp(dir=os.path.dirname(filename), | |
63 | prefix=os.path.basename(filename)) |
|
64 | prefix=os.path.basename(filename)) | |
64 |
|
65 | |||
65 | def make_tub(ip, port, secure, cert_file): |
|
66 | def make_tub(ip, port, secure, cert_file): | |
66 | """ |
|
67 | """ | |
67 | Create a listening tub given an ip, port, and cert_file location. |
|
68 | Create a listening tub given an ip, port, and cert_file location. | |
68 |
|
69 | |||
69 | :Parameters: |
|
70 | :Parameters: | |
70 | ip : str |
|
71 | ip : str | |
71 | The ip address that the tub should listen on. Empty means all |
|
72 | The ip address that the tub should listen on. Empty means all | |
72 | port : int |
|
73 | port : int | |
73 | The port that the tub should listen on. A value of 0 means |
|
74 | The port that the tub should listen on. A value of 0 means | |
74 | pick a random port |
|
75 | pick a random port | |
75 | secure: boolean |
|
76 | secure: boolean | |
76 | Will the connection be secure (in the foolscap sense) |
|
77 | Will the connection be secure (in the foolscap sense) | |
77 | cert_file: |
|
78 | cert_file: | |
78 | A filename of a file to be used for theSSL certificate |
|
79 | A filename of a file to be used for theSSL certificate | |
79 | """ |
|
80 | """ | |
80 | if secure: |
|
81 | if secure: | |
81 | if have_crypto: |
|
82 | if have_crypto: | |
82 | tub = Tub(certFile=cert_file) |
|
83 | tub = Tub(certFile=cert_file) | |
83 | else: |
|
84 | else: | |
84 | raise SecurityError(""" |
|
85 | raise SecurityError(""" | |
85 | OpenSSL/pyOpenSSL is not available, so we can't run in secure mode. |
|
86 | OpenSSL/pyOpenSSL is not available, so we can't run in secure mode. | |
86 | Try running without security using 'ipcontroller -xy'. |
|
87 | Try running without security using 'ipcontroller -xy'. | |
87 | """) |
|
88 | """) | |
88 | else: |
|
89 | else: | |
89 | tub = UnauthenticatedTub() |
|
90 | tub = UnauthenticatedTub() | |
90 |
|
91 | |||
91 | # Set the strport based on the ip and port and start listening |
|
92 | # Set the strport based on the ip and port and start listening | |
92 | if ip == '': |
|
93 | if ip == '': | |
93 | strport = "tcp:%i" % port |
|
94 | strport = "tcp:%i" % port | |
94 | else: |
|
95 | else: | |
95 | strport = "tcp:%i:interface=%s" % (port, ip) |
|
96 | strport = "tcp:%i:interface=%s" % (port, ip) | |
96 | listener = tub.listenOn(strport) |
|
97 | listener = tub.listenOn(strport) | |
97 |
|
98 | |||
98 | return tub, listener |
|
99 | return tub, listener | |
99 |
|
100 | |||
100 | def make_client_service(controller_service, config): |
|
101 | def make_client_service(controller_service, config): | |
101 | """ |
|
102 | """ | |
102 | Create a service that will listen for clients. |
|
103 | Create a service that will listen for clients. | |
103 |
|
104 | |||
104 | This service is simply a `foolscap.Tub` instance that has a set of Referenceables |
|
105 | This service is simply a `foolscap.Tub` instance that has a set of Referenceables | |
105 | registered with it. |
|
106 | registered with it. | |
106 | """ |
|
107 | """ | |
107 |
|
108 | |||
108 | # Now create the foolscap tub |
|
109 | # Now create the foolscap tub | |
109 | ip = config['controller']['client_tub']['ip'] |
|
110 | ip = config['controller']['client_tub']['ip'] | |
110 | port = config['controller']['client_tub'].as_int('port') |
|
111 | port = config['controller']['client_tub'].as_int('port') | |
111 | location = config['controller']['client_tub']['location'] |
|
112 | location = config['controller']['client_tub']['location'] | |
112 | secure = config['controller']['client_tub']['secure'] |
|
113 | secure = config['controller']['client_tub']['secure'] | |
113 | cert_file = config['controller']['client_tub']['cert_file'] |
|
114 | cert_file = config['controller']['client_tub']['cert_file'] | |
114 | client_tub, client_listener = make_tub(ip, port, secure, cert_file) |
|
115 | client_tub, client_listener = make_tub(ip, port, secure, cert_file) | |
115 |
|
116 | |||
116 | # Set the location in the trivial case of localhost |
|
117 | # Set the location in the trivial case of localhost | |
117 | if ip == 'localhost' or ip == '127.0.0.1': |
|
118 | if ip == 'localhost' or ip == '127.0.0.1': | |
118 | location = "127.0.0.1" |
|
119 | location = "127.0.0.1" | |
119 |
|
120 | |||
120 | if not secure: |
|
121 | if not secure: | |
121 | log.msg("WARNING: you are running the controller with no client security") |
|
122 | log.msg("WARNING: you are running the controller with no client security") | |
122 |
|
123 | |||
123 | def set_location_and_register(): |
|
124 | def set_location_and_register(): | |
124 | """Set the location for the tub and return a deferred.""" |
|
125 | """Set the location for the tub and return a deferred.""" | |
125 |
|
126 | |||
126 | def register(empty, ref, furl_file): |
|
127 | def register(empty, ref, furl_file): | |
127 | # We create and then move to make sure that when the file |
|
128 | # We create and then move to make sure that when the file | |
128 | # appears to other processes, the buffer has the flushed |
|
129 | # appears to other processes, the buffer has the flushed | |
129 | # and the file has been closed |
|
130 | # and the file has been closed | |
130 | temp_furl_file = get_temp_furlfile(furl_file) |
|
131 | temp_furl_file = get_temp_furlfile(furl_file) | |
131 | client_tub.registerReference(ref, furlFile=temp_furl_file) |
|
132 | client_tub.registerReference(ref, furlFile=temp_furl_file) | |
132 | os.rename(temp_furl_file, furl_file) |
|
133 | os.rename(temp_furl_file, furl_file) | |
133 |
|
134 | |||
134 | if location == '': |
|
135 | if location == '': | |
135 | d = client_tub.setLocationAutomatically() |
|
136 | d = client_tub.setLocationAutomatically() | |
136 | else: |
|
137 | else: | |
137 | d = defer.maybeDeferred(client_tub.setLocation, "%s:%i" % (location, client_listener.getPortnum())) |
|
138 | d = defer.maybeDeferred(client_tub.setLocation, "%s:%i" % (location, client_listener.getPortnum())) | |
138 |
|
139 | |||
139 | for ciname, ci in config['controller']['controller_interfaces'].iteritems(): |
|
140 | for ciname, ci in config['controller']['controller_interfaces'].iteritems(): | |
140 | log.msg("Adapting Controller to interface: %s" % ciname) |
|
141 | log.msg("Adapting Controller to interface: %s" % ciname) | |
141 | furl_file = ci['furl_file'] |
|
142 | furl_file = ci['furl_file'] | |
142 | log.msg("Saving furl for interface [%s] to file: %s" % (ciname, furl_file)) |
|
143 | log.msg("Saving furl for interface [%s] to file: %s" % (ciname, furl_file)) | |
143 | check_furl_file_security(furl_file, secure) |
|
144 | check_furl_file_security(furl_file, secure) | |
144 | adapted_controller = import_item(ci['controller_interface'])(controller_service) |
|
145 | adapted_controller = import_item(ci['controller_interface'])(controller_service) | |
145 | d.addCallback(register, import_item(ci['fc_interface'])(adapted_controller), |
|
146 | d.addCallback(register, import_item(ci['fc_interface'])(adapted_controller), | |
146 | furl_file=ci['furl_file']) |
|
147 | furl_file=ci['furl_file']) | |
147 |
|
148 | |||
148 | reactor.callWhenRunning(set_location_and_register) |
|
149 | reactor.callWhenRunning(set_location_and_register) | |
149 | return client_tub |
|
150 | return client_tub | |
150 |
|
151 | |||
151 |
|
152 | |||
152 | def make_engine_service(controller_service, config): |
|
153 | def make_engine_service(controller_service, config): | |
153 | """ |
|
154 | """ | |
154 | Create a service that will listen for engines. |
|
155 | Create a service that will listen for engines. | |
155 |
|
156 | |||
156 | This service is simply a `foolscap.Tub` instance that has a set of Referenceables |
|
157 | This service is simply a `foolscap.Tub` instance that has a set of Referenceables | |
157 | registered with it. |
|
158 | registered with it. | |
158 | """ |
|
159 | """ | |
159 |
|
160 | |||
160 | # Now create the foolscap tub |
|
161 | # Now create the foolscap tub | |
161 | ip = config['controller']['engine_tub']['ip'] |
|
162 | ip = config['controller']['engine_tub']['ip'] | |
162 | port = config['controller']['engine_tub'].as_int('port') |
|
163 | port = config['controller']['engine_tub'].as_int('port') | |
163 | location = config['controller']['engine_tub']['location'] |
|
164 | location = config['controller']['engine_tub']['location'] | |
164 | secure = config['controller']['engine_tub']['secure'] |
|
165 | secure = config['controller']['engine_tub']['secure'] | |
165 | cert_file = config['controller']['engine_tub']['cert_file'] |
|
166 | cert_file = config['controller']['engine_tub']['cert_file'] | |
166 | engine_tub, engine_listener = make_tub(ip, port, secure, cert_file) |
|
167 | engine_tub, engine_listener = make_tub(ip, port, secure, cert_file) | |
167 |
|
168 | |||
168 | # Set the location in the trivial case of localhost |
|
169 | # Set the location in the trivial case of localhost | |
169 | if ip == 'localhost' or ip == '127.0.0.1': |
|
170 | if ip == 'localhost' or ip == '127.0.0.1': | |
170 | location = "127.0.0.1" |
|
171 | location = "127.0.0.1" | |
171 |
|
172 | |||
172 | if not secure: |
|
173 | if not secure: | |
173 | log.msg("WARNING: you are running the controller with no engine security") |
|
174 | log.msg("WARNING: you are running the controller with no engine security") | |
174 |
|
175 | |||
175 | def set_location_and_register(): |
|
176 | def set_location_and_register(): | |
176 | """Set the location for the tub and return a deferred.""" |
|
177 | """Set the location for the tub and return a deferred.""" | |
177 |
|
178 | |||
178 | def register(empty, ref, furl_file): |
|
179 | def register(empty, ref, furl_file): | |
179 | # We create and then move to make sure that when the file |
|
180 | # We create and then move to make sure that when the file | |
180 | # appears to other processes, the buffer has the flushed |
|
181 | # appears to other processes, the buffer has the flushed | |
181 | # and the file has been closed |
|
182 | # and the file has been closed | |
182 | temp_furl_file = get_temp_furlfile(furl_file) |
|
183 | temp_furl_file = get_temp_furlfile(furl_file) | |
183 | engine_tub.registerReference(ref, furlFile=temp_furl_file) |
|
184 | engine_tub.registerReference(ref, furlFile=temp_furl_file) | |
184 | os.rename(temp_furl_file, furl_file) |
|
185 | os.rename(temp_furl_file, furl_file) | |
185 |
|
186 | |||
186 | if location == '': |
|
187 | if location == '': | |
187 | d = engine_tub.setLocationAutomatically() |
|
188 | d = engine_tub.setLocationAutomatically() | |
188 | else: |
|
189 | else: | |
189 | d = defer.maybeDeferred(engine_tub.setLocation, "%s:%i" % (location, engine_listener.getPortnum())) |
|
190 | d = defer.maybeDeferred(engine_tub.setLocation, "%s:%i" % (location, engine_listener.getPortnum())) | |
190 |
|
191 | |||
191 | furl_file = config['controller']['engine_furl_file'] |
|
192 | furl_file = config['controller']['engine_furl_file'] | |
192 | engine_fc_interface = import_item(config['controller']['engine_fc_interface']) |
|
193 | engine_fc_interface = import_item(config['controller']['engine_fc_interface']) | |
193 | log.msg("Saving furl for the engine to file: %s" % furl_file) |
|
194 | log.msg("Saving furl for the engine to file: %s" % furl_file) | |
194 | check_furl_file_security(furl_file, secure) |
|
195 | check_furl_file_security(furl_file, secure) | |
195 | fc_controller = engine_fc_interface(controller_service) |
|
196 | fc_controller = engine_fc_interface(controller_service) | |
196 | d.addCallback(register, fc_controller, furl_file=furl_file) |
|
197 | d.addCallback(register, fc_controller, furl_file=furl_file) | |
197 |
|
198 | |||
198 | reactor.callWhenRunning(set_location_and_register) |
|
199 | reactor.callWhenRunning(set_location_and_register) | |
199 | return engine_tub |
|
200 | return engine_tub | |
200 |
|
201 | |||
201 | def start_controller(): |
|
202 | def start_controller(): | |
202 | """ |
|
203 | """ | |
203 | Start the controller by creating the service hierarchy and starting the reactor. |
|
204 | Start the controller by creating the service hierarchy and starting the reactor. | |
204 |
|
205 | |||
205 | This method does the following: |
|
206 | This method does the following: | |
206 |
|
207 | |||
207 | * It starts the controller logging |
|
208 | * It starts the controller logging | |
208 | * In execute an import statement for the controller |
|
209 | * In execute an import statement for the controller | |
209 | * It creates 2 `foolscap.Tub` instances for the client and the engines |
|
210 | * It creates 2 `foolscap.Tub` instances for the client and the engines | |
210 | and registers `foolscap.Referenceables` with the tubs to expose the |
|
211 | and registers `foolscap.Referenceables` with the tubs to expose the | |
211 | controller to engines and clients. |
|
212 | controller to engines and clients. | |
212 | """ |
|
213 | """ | |
213 | config = kernel_config_manager.get_config_obj() |
|
214 | config = kernel_config_manager.get_config_obj() | |
214 |
|
215 | |||
215 | # Start logging |
|
216 | # Start logging | |
216 | logfile = config['controller']['logfile'] |
|
217 | logfile = config['controller']['logfile'] | |
217 | if logfile: |
|
218 | if logfile: | |
218 | logfile = logfile + str(os.getpid()) + '.log' |
|
219 | logfile = logfile + str(os.getpid()) + '.log' | |
219 | try: |
|
220 | try: | |
220 | openLogFile = open(logfile, 'w') |
|
221 | openLogFile = open(logfile, 'w') | |
221 | except: |
|
222 | except: | |
222 | openLogFile = sys.stdout |
|
223 | openLogFile = sys.stdout | |
223 | else: |
|
224 | else: | |
224 | openLogFile = sys.stdout |
|
225 | openLogFile = sys.stdout | |
225 | log.startLogging(openLogFile) |
|
226 | log.startLogging(openLogFile) | |
226 |
|
227 | |||
227 | # Execute any user defined import statements |
|
228 | # Execute any user defined import statements | |
228 | cis = config['controller']['import_statement'] |
|
229 | cis = config['controller']['import_statement'] | |
229 | if cis: |
|
230 | if cis: | |
230 | try: |
|
231 | try: | |
231 | exec cis in globals(), locals() |
|
232 | exec cis in globals(), locals() | |
232 | except: |
|
233 | except: | |
233 | log.msg("Error running import_statement: %s" % cis) |
|
234 | log.msg("Error running import_statement: %s" % cis) | |
234 |
|
235 | |||
235 | # Delete old furl files unless the reuse_furls is set |
|
236 | # Delete old furl files unless the reuse_furls is set | |
236 | reuse = config['controller']['reuse_furls'] |
|
237 | reuse = config['controller']['reuse_furls'] | |
237 | if not reuse: |
|
238 | if not reuse: | |
238 | paths = (config['controller']['engine_furl_file'], |
|
239 | paths = (config['controller']['engine_furl_file'], | |
239 | config['controller']['controller_interfaces']['task']['furl_file'], |
|
240 | config['controller']['controller_interfaces']['task']['furl_file'], | |
240 | config['controller']['controller_interfaces']['multiengine']['furl_file'] |
|
241 | config['controller']['controller_interfaces']['multiengine']['furl_file'] | |
241 | ) |
|
242 | ) | |
242 | for p in paths: |
|
243 | for p in paths: | |
243 | if os.path.isfile(p): |
|
244 | if os.path.isfile(p): | |
244 | os.remove(p) |
|
245 | os.remove(p) | |
245 |
|
246 | |||
246 | # Create the service hierarchy |
|
247 | # Create the service hierarchy | |
247 | main_service = service.MultiService() |
|
248 | main_service = service.MultiService() | |
248 | # The controller service |
|
249 | # The controller service | |
249 | controller_service = controllerservice.ControllerService() |
|
250 | controller_service = controllerservice.ControllerService() | |
250 | controller_service.setServiceParent(main_service) |
|
251 | controller_service.setServiceParent(main_service) | |
251 | # The client tub and all its refereceables |
|
252 | # The client tub and all its refereceables | |
252 | client_service = make_client_service(controller_service, config) |
|
253 | client_service = make_client_service(controller_service, config) | |
253 | client_service.setServiceParent(main_service) |
|
254 | client_service.setServiceParent(main_service) | |
254 | # The engine tub |
|
255 | # The engine tub | |
255 | engine_service = make_engine_service(controller_service, config) |
|
256 | engine_service = make_engine_service(controller_service, config) | |
256 | engine_service.setServiceParent(main_service) |
|
257 | engine_service.setServiceParent(main_service) | |
257 | # Start the controller service and set things running |
|
258 | # Start the controller service and set things running | |
258 | main_service.startService() |
|
259 | main_service.startService() | |
259 | reactor.run() |
|
260 | reactor.run() | |
260 |
|
261 | |||
261 | def init_config(): |
|
262 | def init_config(): | |
262 | """ |
|
263 | """ | |
263 | Initialize the configuration using default and command line options. |
|
264 | Initialize the configuration using default and command line options. | |
264 | """ |
|
265 | """ | |
265 |
|
266 | |||
266 |
parser = OptionParser( |
|
267 | parser = OptionParser("""ipcontroller [options] | |
|
268 | ||||
|
269 | Start an IPython controller. | |||
|
270 | ||||
|
271 | Use the IPYTHONDIR environment variable to change your IPython directory | |||
|
272 | from the default of .ipython or _ipython. The log and security | |||
|
273 | subdirectories of your IPython directory will be used by this script | |||
|
274 | for log files and security files.""") | |||
267 |
|
275 | |||
268 | # Client related options |
|
276 | # Client related options | |
269 | parser.add_option( |
|
277 | parser.add_option( | |
270 | "--client-ip", |
|
278 | "--client-ip", | |
271 | type="string", |
|
279 | type="string", | |
272 | dest="client_ip", |
|
280 | dest="client_ip", | |
273 | help="the IP address or hostname the controller will listen on for client connections" |
|
281 | help="the IP address or hostname the controller will listen on for client connections" | |
274 | ) |
|
282 | ) | |
275 | parser.add_option( |
|
283 | parser.add_option( | |
276 | "--client-port", |
|
284 | "--client-port", | |
277 | type="int", |
|
285 | type="int", | |
278 | dest="client_port", |
|
286 | dest="client_port", | |
279 | help="the port the controller will listen on for client connections" |
|
287 | help="the port the controller will listen on for client connections" | |
280 | ) |
|
288 | ) | |
281 | parser.add_option( |
|
289 | parser.add_option( | |
282 | '--client-location', |
|
290 | '--client-location', | |
283 | type="string", |
|
291 | type="string", | |
284 | dest="client_location", |
|
292 | dest="client_location", | |
285 | help="hostname or ip for clients to connect to" |
|
293 | help="hostname or ip for clients to connect to" | |
286 | ) |
|
294 | ) | |
287 | parser.add_option( |
|
295 | parser.add_option( | |
288 | "-x", |
|
296 | "-x", | |
289 | action="store_false", |
|
297 | action="store_false", | |
290 | dest="client_secure", |
|
298 | dest="client_secure", | |
291 | help="turn off all client security" |
|
299 | help="turn off all client security" | |
292 | ) |
|
300 | ) | |
293 | parser.add_option( |
|
301 | parser.add_option( | |
294 | '--client-cert-file', |
|
302 | '--client-cert-file', | |
295 | type="string", |
|
303 | type="string", | |
296 | dest="client_cert_file", |
|
304 | dest="client_cert_file", | |
297 | help="file to store the client SSL certificate" |
|
305 | help="file to store the client SSL certificate" | |
298 | ) |
|
306 | ) | |
299 | parser.add_option( |
|
307 | parser.add_option( | |
300 | '--task-furl-file', |
|
308 | '--task-furl-file', | |
301 | type="string", |
|
309 | type="string", | |
302 | dest="task_furl_file", |
|
310 | dest="task_furl_file", | |
303 | help="file to store the FURL for task clients to connect with" |
|
311 | help="file to store the FURL for task clients to connect with" | |
304 | ) |
|
312 | ) | |
305 | parser.add_option( |
|
313 | parser.add_option( | |
306 | '--multiengine-furl-file', |
|
314 | '--multiengine-furl-file', | |
307 | type="string", |
|
315 | type="string", | |
308 | dest="multiengine_furl_file", |
|
316 | dest="multiengine_furl_file", | |
309 | help="file to store the FURL for multiengine clients to connect with" |
|
317 | help="file to store the FURL for multiengine clients to connect with" | |
310 | ) |
|
318 | ) | |
311 | # Engine related options |
|
319 | # Engine related options | |
312 | parser.add_option( |
|
320 | parser.add_option( | |
313 | "--engine-ip", |
|
321 | "--engine-ip", | |
314 | type="string", |
|
322 | type="string", | |
315 | dest="engine_ip", |
|
323 | dest="engine_ip", | |
316 | help="the IP address or hostname the controller will listen on for engine connections" |
|
324 | help="the IP address or hostname the controller will listen on for engine connections" | |
317 | ) |
|
325 | ) | |
318 | parser.add_option( |
|
326 | parser.add_option( | |
319 | "--engine-port", |
|
327 | "--engine-port", | |
320 | type="int", |
|
328 | type="int", | |
321 | dest="engine_port", |
|
329 | dest="engine_port", | |
322 | help="the port the controller will listen on for engine connections" |
|
330 | help="the port the controller will listen on for engine connections" | |
323 | ) |
|
331 | ) | |
324 | parser.add_option( |
|
332 | parser.add_option( | |
325 | '--engine-location', |
|
333 | '--engine-location', | |
326 | type="string", |
|
334 | type="string", | |
327 | dest="engine_location", |
|
335 | dest="engine_location", | |
328 | help="hostname or ip for engines to connect to" |
|
336 | help="hostname or ip for engines to connect to" | |
329 | ) |
|
337 | ) | |
330 | parser.add_option( |
|
338 | parser.add_option( | |
331 | "-y", |
|
339 | "-y", | |
332 | action="store_false", |
|
340 | action="store_false", | |
333 | dest="engine_secure", |
|
341 | dest="engine_secure", | |
334 | help="turn off all engine security" |
|
342 | help="turn off all engine security" | |
335 | ) |
|
343 | ) | |
336 | parser.add_option( |
|
344 | parser.add_option( | |
337 | '--engine-cert-file', |
|
345 | '--engine-cert-file', | |
338 | type="string", |
|
346 | type="string", | |
339 | dest="engine_cert_file", |
|
347 | dest="engine_cert_file", | |
340 | help="file to store the engine SSL certificate" |
|
348 | help="file to store the engine SSL certificate" | |
341 | ) |
|
349 | ) | |
342 | parser.add_option( |
|
350 | parser.add_option( | |
343 | '--engine-furl-file', |
|
351 | '--engine-furl-file', | |
344 | type="string", |
|
352 | type="string", | |
345 | dest="engine_furl_file", |
|
353 | dest="engine_furl_file", | |
346 | help="file to store the FURL for engines to connect with" |
|
354 | help="file to store the FURL for engines to connect with" | |
347 | ) |
|
355 | ) | |
348 | parser.add_option( |
|
356 | parser.add_option( | |
349 | "-l", "--logfile", |
|
357 | "-l", "--logfile", | |
350 | type="string", |
|
358 | type="string", | |
351 | dest="logfile", |
|
359 | dest="logfile", | |
352 | help="log file name (default is stdout)" |
|
360 | help="log file name (default is stdout)" | |
353 | ) |
|
361 | ) | |
354 | parser.add_option( |
|
362 | parser.add_option( | |
355 | "-r", |
|
363 | "-r", | |
356 | action="store_true", |
|
364 | action="store_true", | |
357 | dest="reuse_furls", |
|
365 | dest="reuse_furls", | |
358 | help="try to reuse all furl files" |
|
366 | help="try to reuse all furl files" | |
359 | ) |
|
367 | ) | |
360 |
|
368 | |||
361 | (options, args) = parser.parse_args() |
|
369 | (options, args) = parser.parse_args() | |
362 |
|
370 | |||
363 | config = kernel_config_manager.get_config_obj() |
|
371 | config = kernel_config_manager.get_config_obj() | |
364 |
|
372 | |||
365 | # Update with command line options |
|
373 | # Update with command line options | |
366 | if options.client_ip is not None: |
|
374 | if options.client_ip is not None: | |
367 | config['controller']['client_tub']['ip'] = options.client_ip |
|
375 | config['controller']['client_tub']['ip'] = options.client_ip | |
368 | if options.client_port is not None: |
|
376 | if options.client_port is not None: | |
369 | config['controller']['client_tub']['port'] = options.client_port |
|
377 | config['controller']['client_tub']['port'] = options.client_port | |
370 | if options.client_location is not None: |
|
378 | if options.client_location is not None: | |
371 | config['controller']['client_tub']['location'] = options.client_location |
|
379 | config['controller']['client_tub']['location'] = options.client_location | |
372 | if options.client_secure is not None: |
|
380 | if options.client_secure is not None: | |
373 | config['controller']['client_tub']['secure'] = options.client_secure |
|
381 | config['controller']['client_tub']['secure'] = options.client_secure | |
374 | if options.client_cert_file is not None: |
|
382 | if options.client_cert_file is not None: | |
375 | config['controller']['client_tub']['cert_file'] = options.client_cert_file |
|
383 | config['controller']['client_tub']['cert_file'] = options.client_cert_file | |
376 | if options.task_furl_file is not None: |
|
384 | if options.task_furl_file is not None: | |
377 | config['controller']['controller_interfaces']['task']['furl_file'] = options.task_furl_file |
|
385 | config['controller']['controller_interfaces']['task']['furl_file'] = options.task_furl_file | |
378 | if options.multiengine_furl_file is not None: |
|
386 | if options.multiengine_furl_file is not None: | |
379 | config['controller']['controller_interfaces']['multiengine']['furl_file'] = options.multiengine_furl_file |
|
387 | config['controller']['controller_interfaces']['multiengine']['furl_file'] = options.multiengine_furl_file | |
380 | if options.engine_ip is not None: |
|
388 | if options.engine_ip is not None: | |
381 | config['controller']['engine_tub']['ip'] = options.engine_ip |
|
389 | config['controller']['engine_tub']['ip'] = options.engine_ip | |
382 | if options.engine_port is not None: |
|
390 | if options.engine_port is not None: | |
383 | config['controller']['engine_tub']['port'] = options.engine_port |
|
391 | config['controller']['engine_tub']['port'] = options.engine_port | |
384 | if options.engine_location is not None: |
|
392 | if options.engine_location is not None: | |
385 | config['controller']['engine_tub']['location'] = options.engine_location |
|
393 | config['controller']['engine_tub']['location'] = options.engine_location | |
386 | if options.engine_secure is not None: |
|
394 | if options.engine_secure is not None: | |
387 | config['controller']['engine_tub']['secure'] = options.engine_secure |
|
395 | config['controller']['engine_tub']['secure'] = options.engine_secure | |
388 | if options.engine_cert_file is not None: |
|
396 | if options.engine_cert_file is not None: | |
389 | config['controller']['engine_tub']['cert_file'] = options.engine_cert_file |
|
397 | config['controller']['engine_tub']['cert_file'] = options.engine_cert_file | |
390 | if options.engine_furl_file is not None: |
|
398 | if options.engine_furl_file is not None: | |
391 | config['controller']['engine_furl_file'] = options.engine_furl_file |
|
399 | config['controller']['engine_furl_file'] = options.engine_furl_file | |
392 | if options.reuse_furls is not None: |
|
400 | if options.reuse_furls is not None: | |
393 | config['controller']['reuse_furls'] = options.reuse_furls |
|
401 | config['controller']['reuse_furls'] = options.reuse_furls | |
394 |
|
402 | |||
395 | if options.logfile is not None: |
|
403 | if options.logfile is not None: | |
396 | config['controller']['logfile'] = options.logfile |
|
404 | config['controller']['logfile'] = options.logfile | |
397 |
|
405 | |||
398 | kernel_config_manager.update_config_obj(config) |
|
406 | kernel_config_manager.update_config_obj(config) | |
399 |
|
407 | |||
400 | def main(): |
|
408 | def main(): | |
401 | """ |
|
409 | """ | |
402 | After creating the configuration information, start the controller. |
|
410 | After creating the configuration information, start the controller. | |
403 | """ |
|
411 | """ | |
404 | init_config() |
|
412 | init_config() | |
405 | start_controller() |
|
413 | start_controller() | |
406 |
|
414 | |||
407 | if __name__ == "__main__": |
|
415 | if __name__ == "__main__": | |
408 | main() |
|
416 | main() |
@@ -1,186 +1,193 b'' | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
3 |
|
3 | |||
4 | """Start the IPython Engine.""" |
|
4 | """Start the IPython Engine.""" | |
5 |
|
5 | |||
6 | __docformat__ = "restructuredtext en" |
|
6 | __docformat__ = "restructuredtext en" | |
7 |
|
7 | |||
8 | #------------------------------------------------------------------------------- |
|
8 | #------------------------------------------------------------------------------- | |
9 | # Copyright (C) 2008 The IPython Development Team |
|
9 | # Copyright (C) 2008 The IPython Development Team | |
10 | # |
|
10 | # | |
11 | # Distributed under the terms of the BSD License. The full license is in |
|
11 | # Distributed under the terms of the BSD License. The full license is in | |
12 | # the file COPYING, distributed as part of this software. |
|
12 | # the file COPYING, distributed as part of this software. | |
13 | #------------------------------------------------------------------------------- |
|
13 | #------------------------------------------------------------------------------- | |
14 |
|
14 | |||
15 | #------------------------------------------------------------------------------- |
|
15 | #------------------------------------------------------------------------------- | |
16 | # Imports |
|
16 | # Imports | |
17 | #------------------------------------------------------------------------------- |
|
17 | #------------------------------------------------------------------------------- | |
18 |
|
18 | |||
19 | # Python looks for an empty string at the beginning of sys.path to enable |
|
19 | # Python looks for an empty string at the beginning of sys.path to enable | |
20 | # importing from the cwd. |
|
20 | # importing from the cwd. | |
21 | import sys |
|
21 | import sys | |
22 | sys.path.insert(0, '') |
|
22 | sys.path.insert(0, '') | |
23 |
|
23 | |||
24 | import sys, os |
|
|||
25 | from optparse import OptionParser |
|
24 | from optparse import OptionParser | |
|
25 | import os | |||
26 |
|
26 | |||
27 | from twisted.application import service |
|
27 | from twisted.application import service | |
28 | from twisted.internet import reactor |
|
28 | from twisted.internet import reactor | |
29 | from twisted.python import log |
|
29 | from twisted.python import log | |
30 |
|
30 | |||
31 | from IPython.kernel.fcutil import Tub, UnauthenticatedTub |
|
31 | from IPython.kernel.fcutil import Tub, UnauthenticatedTub | |
32 |
|
32 | |||
33 | from IPython.kernel.core.config import config_manager as core_config_manager |
|
33 | from IPython.kernel.core.config import config_manager as core_config_manager | |
34 | from IPython.config.cutils import import_item |
|
34 | from IPython.config.cutils import import_item | |
35 | from IPython.kernel.engineservice import EngineService |
|
35 | from IPython.kernel.engineservice import EngineService | |
36 |
|
36 | |||
37 | # Create various ipython directories if they don't exist. |
|
37 | # Create various ipython directories if they don't exist. | |
38 | # This must be done before IPython.kernel.config is imported. |
|
38 | # This must be done before IPython.kernel.config is imported. | |
39 | from IPython.iplib import user_setup |
|
39 | from IPython.iplib import user_setup | |
40 | from IPython.genutils import get_ipython_dir, get_log_dir, get_security_dir |
|
40 | from IPython.genutils import get_ipython_dir, get_log_dir, get_security_dir | |
41 | if os.name == 'posix': |
|
41 | if os.name == 'posix': | |
42 | rc_suffix = '' |
|
42 | rc_suffix = '' | |
43 | else: |
|
43 | else: | |
44 | rc_suffix = '.ini' |
|
44 | rc_suffix = '.ini' | |
45 | user_setup(get_ipython_dir(), rc_suffix, mode='install', interactive=False) |
|
45 | user_setup(get_ipython_dir(), rc_suffix, mode='install', interactive=False) | |
46 | get_log_dir() |
|
46 | get_log_dir() | |
47 | get_security_dir() |
|
47 | get_security_dir() | |
48 |
|
48 | |||
49 | from IPython.kernel.config import config_manager as kernel_config_manager |
|
49 | from IPython.kernel.config import config_manager as kernel_config_manager | |
50 | from IPython.kernel.engineconnector import EngineConnector |
|
50 | from IPython.kernel.engineconnector import EngineConnector | |
51 |
|
51 | |||
52 |
|
52 | |||
53 | #------------------------------------------------------------------------------- |
|
53 | #------------------------------------------------------------------------------- | |
54 | # Code |
|
54 | # Code | |
55 | #------------------------------------------------------------------------------- |
|
55 | #------------------------------------------------------------------------------- | |
56 |
|
56 | |||
57 | def start_engine(): |
|
57 | def start_engine(): | |
58 | """ |
|
58 | """ | |
59 | Start the engine, by creating it and starting the Twisted reactor. |
|
59 | Start the engine, by creating it and starting the Twisted reactor. | |
60 |
|
60 | |||
61 | This method does: |
|
61 | This method does: | |
62 |
|
62 | |||
63 | * If it exists, runs the `mpi_import_statement` to call `MPI_Init` |
|
63 | * If it exists, runs the `mpi_import_statement` to call `MPI_Init` | |
64 | * Starts the engine logging |
|
64 | * Starts the engine logging | |
65 | * Creates an IPython shell and wraps it in an `EngineService` |
|
65 | * Creates an IPython shell and wraps it in an `EngineService` | |
66 | * Creates a `foolscap.Tub` to use in connecting to a controller. |
|
66 | * Creates a `foolscap.Tub` to use in connecting to a controller. | |
67 | * Uses the tub and the `EngineService` along with a Foolscap URL |
|
67 | * Uses the tub and the `EngineService` along with a Foolscap URL | |
68 | (or FURL) to connect to the controller and register the engine |
|
68 | (or FURL) to connect to the controller and register the engine | |
69 | with the controller |
|
69 | with the controller | |
70 | """ |
|
70 | """ | |
71 | kernel_config = kernel_config_manager.get_config_obj() |
|
71 | kernel_config = kernel_config_manager.get_config_obj() | |
72 | core_config = core_config_manager.get_config_obj() |
|
72 | core_config = core_config_manager.get_config_obj() | |
73 |
|
73 | |||
74 |
|
74 | |||
75 | # Execute the mpi import statement that needs to call MPI_Init |
|
75 | # Execute the mpi import statement that needs to call MPI_Init | |
76 | global mpi |
|
76 | global mpi | |
77 | mpikey = kernel_config['mpi']['default'] |
|
77 | mpikey = kernel_config['mpi']['default'] | |
78 | mpi_import_statement = kernel_config['mpi'].get(mpikey, None) |
|
78 | mpi_import_statement = kernel_config['mpi'].get(mpikey, None) | |
79 | if mpi_import_statement is not None: |
|
79 | if mpi_import_statement is not None: | |
80 | try: |
|
80 | try: | |
81 | exec mpi_import_statement in globals() |
|
81 | exec mpi_import_statement in globals() | |
82 | except: |
|
82 | except: | |
83 | mpi = None |
|
83 | mpi = None | |
84 | else: |
|
84 | else: | |
85 | mpi = None |
|
85 | mpi = None | |
86 |
|
86 | |||
87 | # Start logging |
|
87 | # Start logging | |
88 | logfile = kernel_config['engine']['logfile'] |
|
88 | logfile = kernel_config['engine']['logfile'] | |
89 | if logfile: |
|
89 | if logfile: | |
90 | logfile = logfile + str(os.getpid()) + '.log' |
|
90 | logfile = logfile + str(os.getpid()) + '.log' | |
91 | try: |
|
91 | try: | |
92 | openLogFile = open(logfile, 'w') |
|
92 | openLogFile = open(logfile, 'w') | |
93 | except: |
|
93 | except: | |
94 | openLogFile = sys.stdout |
|
94 | openLogFile = sys.stdout | |
95 | else: |
|
95 | else: | |
96 | openLogFile = sys.stdout |
|
96 | openLogFile = sys.stdout | |
97 | log.startLogging(openLogFile) |
|
97 | log.startLogging(openLogFile) | |
98 |
|
98 | |||
99 | # Create the underlying shell class and EngineService |
|
99 | # Create the underlying shell class and EngineService | |
100 | shell_class = import_item(core_config['shell']['shell_class']) |
|
100 | shell_class = import_item(core_config['shell']['shell_class']) | |
101 | engine_service = EngineService(shell_class, mpi=mpi) |
|
101 | engine_service = EngineService(shell_class, mpi=mpi) | |
102 | shell_import_statement = core_config['shell']['import_statement'] |
|
102 | shell_import_statement = core_config['shell']['import_statement'] | |
103 | if shell_import_statement: |
|
103 | if shell_import_statement: | |
104 | try: |
|
104 | try: | |
105 | engine_service.execute(shell_import_statement) |
|
105 | engine_service.execute(shell_import_statement) | |
106 | except: |
|
106 | except: | |
107 | log.msg("Error running import_statement: %s" % shell_import_statement) |
|
107 | log.msg("Error running import_statement: %s" % shell_import_statement) | |
108 |
|
108 | |||
109 | # Create the service hierarchy |
|
109 | # Create the service hierarchy | |
110 | main_service = service.MultiService() |
|
110 | main_service = service.MultiService() | |
111 | engine_service.setServiceParent(main_service) |
|
111 | engine_service.setServiceParent(main_service) | |
112 | tub_service = Tub() |
|
112 | tub_service = Tub() | |
113 | tub_service.setServiceParent(main_service) |
|
113 | tub_service.setServiceParent(main_service) | |
114 | # This needs to be called before the connection is initiated |
|
114 | # This needs to be called before the connection is initiated | |
115 | main_service.startService() |
|
115 | main_service.startService() | |
116 |
|
116 | |||
117 | # This initiates the connection to the controller and calls |
|
117 | # This initiates the connection to the controller and calls | |
118 | # register_engine to tell the controller we are ready to do work |
|
118 | # register_engine to tell the controller we are ready to do work | |
119 | engine_connector = EngineConnector(tub_service) |
|
119 | engine_connector = EngineConnector(tub_service) | |
120 | furl_file = kernel_config['engine']['furl_file'] |
|
120 | furl_file = kernel_config['engine']['furl_file'] | |
121 | log.msg("Using furl file: %s" % furl_file) |
|
121 | log.msg("Using furl file: %s" % furl_file) | |
122 |
|
122 | |||
123 | def call_connect(engine_service, furl_file): |
|
123 | def call_connect(engine_service, furl_file): | |
124 | d = engine_connector.connect_to_controller(engine_service, furl_file) |
|
124 | d = engine_connector.connect_to_controller(engine_service, furl_file) | |
125 | def handle_error(f): |
|
125 | def handle_error(f): | |
126 | # If this print statement is replaced by a log.err(f) I get |
|
126 | # If this print statement is replaced by a log.err(f) I get | |
127 | # an unhandled error, which makes no sense. I shouldn't have |
|
127 | # an unhandled error, which makes no sense. I shouldn't have | |
128 | # to use a print statement here. My only thought is that |
|
128 | # to use a print statement here. My only thought is that | |
129 | # at the beginning of the process the logging is still starting up |
|
129 | # at the beginning of the process the logging is still starting up | |
130 | print "error connecting to controller:", f.getErrorMessage() |
|
130 | print "error connecting to controller:", f.getErrorMessage() | |
131 | reactor.callLater(0.1, reactor.stop) |
|
131 | reactor.callLater(0.1, reactor.stop) | |
132 | d.addErrback(handle_error) |
|
132 | d.addErrback(handle_error) | |
133 |
|
133 | |||
134 | reactor.callWhenRunning(call_connect, engine_service, furl_file) |
|
134 | reactor.callWhenRunning(call_connect, engine_service, furl_file) | |
135 | reactor.run() |
|
135 | reactor.run() | |
136 |
|
136 | |||
137 |
|
137 | |||
138 | def init_config(): |
|
138 | def init_config(): | |
139 | """ |
|
139 | """ | |
140 | Initialize the configuration using default and command line options. |
|
140 | Initialize the configuration using default and command line options. | |
141 | """ |
|
141 | """ | |
142 |
|
142 | |||
143 |
parser = OptionParser( |
|
143 | parser = OptionParser("""ipengine [options] | |
|
144 | ||||
|
145 | Start an IPython engine. | |||
|
146 | ||||
|
147 | Use the IPYTHONDIR environment variable to change your IPython directory | |||
|
148 | from the default of .ipython or _ipython. The log and security | |||
|
149 | subdirectories of your IPython directory will be used by this script | |||
|
150 | for log files and security files.""") | |||
144 |
|
151 | |||
145 | parser.add_option( |
|
152 | parser.add_option( | |
146 | "--furl-file", |
|
153 | "--furl-file", | |
147 | type="string", |
|
154 | type="string", | |
148 | dest="furl_file", |
|
155 | dest="furl_file", | |
149 | help="The filename containing the FURL of the controller" |
|
156 | help="The filename containing the FURL of the controller" | |
150 | ) |
|
157 | ) | |
151 | parser.add_option( |
|
158 | parser.add_option( | |
152 | "--mpi", |
|
159 | "--mpi", | |
153 | type="string", |
|
160 | type="string", | |
154 | dest="mpi", |
|
161 | dest="mpi", | |
155 | help="How to enable MPI (mpi4py, pytrilinos, or empty string to disable)" |
|
162 | help="How to enable MPI (mpi4py, pytrilinos, or empty string to disable)" | |
156 | ) |
|
163 | ) | |
157 | parser.add_option( |
|
164 | parser.add_option( | |
158 | "-l", |
|
165 | "-l", | |
159 | "--logfile", |
|
166 | "--logfile", | |
160 | type="string", |
|
167 | type="string", | |
161 | dest="logfile", |
|
168 | dest="logfile", | |
162 | help="log file name (default is stdout)" |
|
169 | help="log file name (default is stdout)" | |
163 | ) |
|
170 | ) | |
164 |
|
171 | |||
165 | (options, args) = parser.parse_args() |
|
172 | (options, args) = parser.parse_args() | |
166 |
|
173 | |||
167 | kernel_config = kernel_config_manager.get_config_obj() |
|
174 | kernel_config = kernel_config_manager.get_config_obj() | |
168 | # Now override with command line options |
|
175 | # Now override with command line options | |
169 | if options.furl_file is not None: |
|
176 | if options.furl_file is not None: | |
170 | kernel_config['engine']['furl_file'] = options.furl_file |
|
177 | kernel_config['engine']['furl_file'] = options.furl_file | |
171 | if options.logfile is not None: |
|
178 | if options.logfile is not None: | |
172 | kernel_config['engine']['logfile'] = options.logfile |
|
179 | kernel_config['engine']['logfile'] = options.logfile | |
173 | if options.mpi is not None: |
|
180 | if options.mpi is not None: | |
174 | kernel_config['mpi']['default'] = options.mpi |
|
181 | kernel_config['mpi']['default'] = options.mpi | |
175 |
|
182 | |||
176 |
|
183 | |||
177 | def main(): |
|
184 | def main(): | |
178 | """ |
|
185 | """ | |
179 | After creating the configuration information, start the engine. |
|
186 | After creating the configuration information, start the engine. | |
180 | """ |
|
187 | """ | |
181 | init_config() |
|
188 | init_config() | |
182 | start_engine() |
|
189 | start_engine() | |
183 |
|
190 | |||
184 |
|
191 | |||
185 | if __name__ == "__main__": |
|
192 | if __name__ == "__main__": | |
186 | main() |
|
193 | main() |
@@ -1,43 +1,46 b'' | |||||
|
1 | # Tell nose to skip this module | |||
|
2 | __test__ = {} | |||
|
3 | ||||
1 | #from __future__ import with_statement |
|
4 | #from __future__ import with_statement | |
2 |
|
5 | |||
3 | # XXX This file is currently disabled to preserve 2.4 compatibility. |
|
6 | # XXX This file is currently disabled to preserve 2.4 compatibility. | |
4 |
|
7 | |||
5 | #def test_simple(): |
|
8 | #def test_simple(): | |
6 | if 0: |
|
9 | if 0: | |
7 |
|
10 | |||
8 | # XXX - for now, we need a running cluster to be started separately. The |
|
11 | # XXX - for now, we need a running cluster to be started separately. The | |
9 | # daemon work is almost finished, and will make much of this unnecessary. |
|
12 | # daemon work is almost finished, and will make much of this unnecessary. | |
10 | from IPython.kernel import client |
|
13 | from IPython.kernel import client | |
11 | mec = client.MultiEngineClient(('127.0.0.1',10105)) |
|
14 | mec = client.MultiEngineClient(('127.0.0.1',10105)) | |
12 |
|
15 | |||
13 | try: |
|
16 | try: | |
14 | mec.get_ids() |
|
17 | mec.get_ids() | |
15 | except ConnectionRefusedError: |
|
18 | except ConnectionRefusedError: | |
16 | import os, time |
|
19 | import os, time | |
17 | os.system('ipcluster -n 2 &') |
|
20 | os.system('ipcluster -n 2 &') | |
18 | time.sleep(2) |
|
21 | time.sleep(2) | |
19 | mec = client.MultiEngineClient(('127.0.0.1',10105)) |
|
22 | mec = client.MultiEngineClient(('127.0.0.1',10105)) | |
20 |
|
23 | |||
21 | mec.block = False |
|
24 | mec.block = False | |
22 |
|
25 | |||
23 | import itertools |
|
26 | import itertools | |
24 | c = itertools.count() |
|
27 | c = itertools.count() | |
25 |
|
28 | |||
26 | parallel = RemoteMultiEngine(mec) |
|
29 | parallel = RemoteMultiEngine(mec) | |
27 |
|
30 | |||
28 | mec.pushAll() |
|
31 | mec.pushAll() | |
29 |
|
32 | |||
30 | ## with parallel as pr: |
|
33 | ## with parallel as pr: | |
31 | ## # A comment |
|
34 | ## # A comment | |
32 | ## remote() # this means the code below only runs remotely |
|
35 | ## remote() # this means the code below only runs remotely | |
33 | ## print 'Hello remote world' |
|
36 | ## print 'Hello remote world' | |
34 | ## x = range(10) |
|
37 | ## x = range(10) | |
35 | ## # Comments are OK |
|
38 | ## # Comments are OK | |
36 | ## # Even misindented. |
|
39 | ## # Even misindented. | |
37 | ## y = x+1 |
|
40 | ## y = x+1 | |
38 |
|
41 | |||
39 |
|
42 | |||
40 | ## with pfor('i',sequence) as pr: |
|
43 | ## with pfor('i',sequence) as pr: | |
41 | ## print x[i] |
|
44 | ## print x[i] | |
42 |
|
45 | |||
43 | print pr.x + pr.y |
|
46 | print pr.x + pr.y |
@@ -1,44 +1,44 b'' | |||||
1 | # encoding: utf-8 |
|
1 | # encoding: utf-8 | |
2 |
|
2 | |||
3 | """This file contains unittests for the kernel.engineservice.py module. |
|
3 | """This file contains unittests for the kernel.engineservice.py module. | |
4 |
|
4 | |||
5 | Things that should be tested: |
|
5 | Things that should be tested: | |
6 |
|
6 | |||
7 | - Should the EngineService return Deferred objects? |
|
7 | - Should the EngineService return Deferred objects? | |
8 | - Run the same tests that are run in shell.py. |
|
8 | - Run the same tests that are run in shell.py. | |
9 | - Make sure that the Interface is really implemented. |
|
9 | - Make sure that the Interface is really implemented. | |
10 | - The startService and stopService methods. |
|
10 | - The startService and stopService methods. | |
11 | """ |
|
11 | """ | |
12 |
|
12 | |||
13 | __docformat__ = "restructuredtext en" |
|
13 | __docformat__ = "restructuredtext en" | |
14 |
|
14 | |||
15 | #------------------------------------------------------------------------------- |
|
15 | #------------------------------------------------------------------------------- | |
16 | # Copyright (C) 2008 The IPython Development Team |
|
16 | # Copyright (C) 2008 The IPython Development Team | |
17 | # |
|
17 | # | |
18 | # Distributed under the terms of the BSD License. The full license is in |
|
18 | # Distributed under the terms of the BSD License. The full license is in | |
19 | # the file COPYING, distributed as part of this software. |
|
19 | # the file COPYING, distributed as part of this software. | |
20 | #------------------------------------------------------------------------------- |
|
20 | #------------------------------------------------------------------------------- | |
21 |
|
21 | |||
22 | #------------------------------------------------------------------------------- |
|
22 | #------------------------------------------------------------------------------- | |
23 | # Imports |
|
23 | # Imports | |
24 | #------------------------------------------------------------------------------- |
|
24 | #------------------------------------------------------------------------------- | |
25 |
|
25 | |||
26 | try: |
|
26 | # Tell nose to skip this module | |
27 | from twisted.application.service import IService |
|
27 | __test__ = {} | |
28 | from IPython.kernel.controllerservice import ControllerService |
|
28 | ||
29 | from IPython.kernel.tests import multienginetest as met |
|
29 | from twisted.application.service import IService | |
30 |
|
|
30 | from IPython.kernel.controllerservice import ControllerService | |
31 |
|
|
31 | from IPython.kernel.tests import multienginetest as met | |
32 | except ImportError: |
|
32 | from controllertest import IControllerCoreTestCase | |
33 | import nose |
|
33 | from IPython.testing.util import DeferredTestCase | |
34 | raise nose.SkipTest("This test requires zope.interface, Twisted and Foolscap") |
|
34 | ||
35 |
|
35 | |||
36 | class BasicControllerServiceTest(DeferredTestCase, |
|
36 | class BasicControllerServiceTest(DeferredTestCase, | |
37 | IControllerCoreTestCase): |
|
37 | IControllerCoreTestCase): | |
38 |
|
38 | |||
39 | def setUp(self): |
|
39 | def setUp(self): | |
40 | self.controller = ControllerService() |
|
40 | self.controller = ControllerService() | |
41 | self.controller.startService() |
|
41 | self.controller.startService() | |
42 |
|
42 | |||
43 | def tearDown(self): |
|
43 | def tearDown(self): | |
44 | self.controller.stopService() |
|
44 | self.controller.stopService() |
@@ -1,93 +1,92 b'' | |||||
1 | # encoding: utf-8 |
|
1 | # encoding: utf-8 | |
2 |
|
2 | |||
3 | """This file contains unittests for the enginepb.py module.""" |
|
3 | """This file contains unittests for the enginepb.py module.""" | |
4 |
|
4 | |||
5 | __docformat__ = "restructuredtext en" |
|
5 | __docformat__ = "restructuredtext en" | |
6 |
|
6 | |||
7 | #------------------------------------------------------------------------------- |
|
7 | #------------------------------------------------------------------------------- | |
8 | # Copyright (C) 2008 The IPython Development Team |
|
8 | # Copyright (C) 2008 The IPython Development Team | |
9 | # |
|
9 | # | |
10 | # Distributed under the terms of the BSD License. The full license is in |
|
10 | # Distributed under the terms of the BSD License. The full license is in | |
11 | # the file COPYING, distributed as part of this software. |
|
11 | # the file COPYING, distributed as part of this software. | |
12 | #------------------------------------------------------------------------------- |
|
12 | #------------------------------------------------------------------------------- | |
13 |
|
13 | |||
14 | #------------------------------------------------------------------------------- |
|
14 | #------------------------------------------------------------------------------- | |
15 | # Imports |
|
15 | # Imports | |
16 | #------------------------------------------------------------------------------- |
|
16 | #------------------------------------------------------------------------------- | |
17 |
|
17 | |||
18 | try: |
|
18 | # Tell nose to skip this module | |
19 | from twisted.python import components |
|
19 | __test__ = {} | |
20 | from twisted.internet import reactor, defer |
|
|||
21 | from twisted.spread import pb |
|
|||
22 | from twisted.internet.base import DelayedCall |
|
|||
23 | DelayedCall.debug = True |
|
|||
24 |
|
20 | |||
25 | import zope.interface as zi |
|
21 | from twisted.python import components | |
|
22 | from twisted.internet import reactor, defer | |||
|
23 | from twisted.spread import pb | |||
|
24 | from twisted.internet.base import DelayedCall | |||
|
25 | DelayedCall.debug = True | |||
26 |
|
26 | |||
27 | from IPython.kernel.fcutil import Tub, UnauthenticatedTub |
|
27 | import zope.interface as zi | |
28 | from IPython.kernel import engineservice as es |
|
28 | ||
29 | from IPython.testing.util import DeferredTestCase |
|
29 | from IPython.kernel.fcutil import Tub, UnauthenticatedTub | |
30 |
|
|
30 | from IPython.kernel import engineservice as es | |
31 | from IPython.kernel.enginefc import FCRemoteEngineRefFromService, IEngineBase |
|
31 | from IPython.testing.util import DeferredTestCase | |
32 |
|
|
32 | from IPython.kernel.controllerservice import IControllerBase | |
33 |
|
|
33 | from IPython.kernel.enginefc import FCRemoteEngineRefFromService, IEngineBase | |
34 |
|
34 | from IPython.kernel.engineservice import IEngineQueued | ||
35 |
|
|
35 | from IPython.kernel.engineconnector import EngineConnector | |
36 | IEngineCoreTestCase, \ |
|
36 | ||
37 | IEngineSerializedTestCase, \ |
|
37 | from IPython.kernel.tests.engineservicetest import \ | |
38 |
|
|
38 | IEngineCoreTestCase, \ | |
39 | except ImportError: |
|
39 | IEngineSerializedTestCase, \ | |
40 | import nose |
|
40 | IEngineQueuedTestCase | |
41 | raise nose.SkipTest("This test requires zope.interface, Twisted and Foolscap") |
|
|||
42 |
|
41 | |||
43 |
|
42 | |||
44 | class EngineFCTest(DeferredTestCase, |
|
43 | class EngineFCTest(DeferredTestCase, | |
45 | IEngineCoreTestCase, |
|
44 | IEngineCoreTestCase, | |
46 | IEngineSerializedTestCase, |
|
45 | IEngineSerializedTestCase, | |
47 | IEngineQueuedTestCase |
|
46 | IEngineQueuedTestCase | |
48 | ): |
|
47 | ): | |
49 |
|
48 | |||
50 | zi.implements(IControllerBase) |
|
49 | zi.implements(IControllerBase) | |
51 |
|
50 | |||
52 | def setUp(self): |
|
51 | def setUp(self): | |
53 |
|
52 | |||
54 | # Start a server and append to self.servers |
|
53 | # Start a server and append to self.servers | |
55 | self.controller_reference = FCRemoteEngineRefFromService(self) |
|
54 | self.controller_reference = FCRemoteEngineRefFromService(self) | |
56 | self.controller_tub = Tub() |
|
55 | self.controller_tub = Tub() | |
57 | self.controller_tub.listenOn('tcp:10105:interface=127.0.0.1') |
|
56 | self.controller_tub.listenOn('tcp:10105:interface=127.0.0.1') | |
58 | self.controller_tub.setLocation('127.0.0.1:10105') |
|
57 | self.controller_tub.setLocation('127.0.0.1:10105') | |
59 |
|
58 | |||
60 | furl = self.controller_tub.registerReference(self.controller_reference) |
|
59 | furl = self.controller_tub.registerReference(self.controller_reference) | |
61 | self.controller_tub.startService() |
|
60 | self.controller_tub.startService() | |
62 |
|
61 | |||
63 | # Start an EngineService and append to services/client |
|
62 | # Start an EngineService and append to services/client | |
64 | self.engine_service = es.EngineService() |
|
63 | self.engine_service = es.EngineService() | |
65 | self.engine_service.startService() |
|
64 | self.engine_service.startService() | |
66 | self.engine_tub = Tub() |
|
65 | self.engine_tub = Tub() | |
67 | self.engine_tub.startService() |
|
66 | self.engine_tub.startService() | |
68 | engine_connector = EngineConnector(self.engine_tub) |
|
67 | engine_connector = EngineConnector(self.engine_tub) | |
69 | d = engine_connector.connect_to_controller(self.engine_service, furl) |
|
68 | d = engine_connector.connect_to_controller(self.engine_service, furl) | |
70 | # This deferred doesn't fire until after register_engine has returned and |
|
69 | # This deferred doesn't fire until after register_engine has returned and | |
71 | # thus, self.engine has been defined and the tets can proceed. |
|
70 | # thus, self.engine has been defined and the tets can proceed. | |
72 | return d |
|
71 | return d | |
73 |
|
72 | |||
74 | def tearDown(self): |
|
73 | def tearDown(self): | |
75 | dlist = [] |
|
74 | dlist = [] | |
76 | # Shut down the engine |
|
75 | # Shut down the engine | |
77 | d = self.engine_tub.stopService() |
|
76 | d = self.engine_tub.stopService() | |
78 | dlist.append(d) |
|
77 | dlist.append(d) | |
79 | # Shut down the controller |
|
78 | # Shut down the controller | |
80 | d = self.controller_tub.stopService() |
|
79 | d = self.controller_tub.stopService() | |
81 | dlist.append(d) |
|
80 | dlist.append(d) | |
82 | return defer.DeferredList(dlist) |
|
81 | return defer.DeferredList(dlist) | |
83 |
|
82 | |||
84 | #--------------------------------------------------------------------------- |
|
83 | #--------------------------------------------------------------------------- | |
85 | # Make me look like a basic controller |
|
84 | # Make me look like a basic controller | |
86 | #--------------------------------------------------------------------------- |
|
85 | #--------------------------------------------------------------------------- | |
87 |
|
86 | |||
88 | def register_engine(self, engine_ref, id=None, ip=None, port=None, pid=None): |
|
87 | def register_engine(self, engine_ref, id=None, ip=None, port=None, pid=None): | |
89 | self.engine = IEngineQueued(IEngineBase(engine_ref)) |
|
88 | self.engine = IEngineQueued(IEngineBase(engine_ref)) | |
90 | return {'id':id} |
|
89 | return {'id':id} | |
91 |
|
90 | |||
92 | def unregister_engine(self, id): |
|
91 | def unregister_engine(self, id): | |
93 | pass No newline at end of file |
|
92 | pass |
@@ -1,80 +1,79 b'' | |||||
1 | # encoding: utf-8 |
|
1 | # encoding: utf-8 | |
2 |
|
2 | |||
3 | """This file contains unittests for the kernel.engineservice.py module. |
|
3 | """This file contains unittests for the kernel.engineservice.py module. | |
4 |
|
4 | |||
5 | Things that should be tested: |
|
5 | Things that should be tested: | |
6 |
|
6 | |||
7 | - Should the EngineService return Deferred objects? |
|
7 | - Should the EngineService return Deferred objects? | |
8 | - Run the same tests that are run in shell.py. |
|
8 | - Run the same tests that are run in shell.py. | |
9 | - Make sure that the Interface is really implemented. |
|
9 | - Make sure that the Interface is really implemented. | |
10 | - The startService and stopService methods. |
|
10 | - The startService and stopService methods. | |
11 | """ |
|
11 | """ | |
12 |
|
12 | |||
13 | __docformat__ = "restructuredtext en" |
|
13 | __docformat__ = "restructuredtext en" | |
14 |
|
14 | |||
15 | #------------------------------------------------------------------------------- |
|
15 | #------------------------------------------------------------------------------- | |
16 | # Copyright (C) 2008 The IPython Development Team |
|
16 | # Copyright (C) 2008 The IPython Development Team | |
17 | # |
|
17 | # | |
18 | # Distributed under the terms of the BSD License. The full license is in |
|
18 | # Distributed under the terms of the BSD License. The full license is in | |
19 | # the file COPYING, distributed as part of this software. |
|
19 | # the file COPYING, distributed as part of this software. | |
20 | #------------------------------------------------------------------------------- |
|
20 | #------------------------------------------------------------------------------- | |
21 |
|
21 | |||
22 | #------------------------------------------------------------------------------- |
|
22 | #------------------------------------------------------------------------------- | |
23 | # Imports |
|
23 | # Imports | |
24 | #------------------------------------------------------------------------------- |
|
24 | #------------------------------------------------------------------------------- | |
25 |
|
25 | |||
26 | try: |
|
26 | # Tell nose to skip this module | |
27 | from twisted.internet import defer |
|
27 | __test__ = {} | |
28 | from twisted.application.service import IService |
|
28 | ||
29 |
|
29 | from twisted.internet import defer | ||
30 | from IPython.kernel import engineservice as es |
|
30 | from twisted.application.service import IService | |
31 | from IPython.testing.util import DeferredTestCase |
|
31 | ||
32 |
|
|
32 | from IPython.kernel import engineservice as es | |
33 | IEngineCoreTestCase, \ |
|
33 | from IPython.testing.util import DeferredTestCase | |
34 | IEngineSerializedTestCase, \ |
|
34 | from IPython.kernel.tests.engineservicetest import \ | |
35 |
|
|
35 | IEngineCoreTestCase, \ | |
36 |
|
|
36 | IEngineSerializedTestCase, \ | |
37 | except ImportError: |
|
37 | IEngineQueuedTestCase, \ | |
38 | import nose |
|
38 | IEnginePropertiesTestCase | |
39 | raise nose.SkipTest("This test requires zope.interface, Twisted and Foolscap") |
|
|||
40 |
|
39 | |||
41 |
|
40 | |||
42 | class BasicEngineServiceTest(DeferredTestCase, |
|
41 | class BasicEngineServiceTest(DeferredTestCase, | |
43 | IEngineCoreTestCase, |
|
42 | IEngineCoreTestCase, | |
44 | IEngineSerializedTestCase, |
|
43 | IEngineSerializedTestCase, | |
45 | IEnginePropertiesTestCase): |
|
44 | IEnginePropertiesTestCase): | |
46 |
|
45 | |||
47 | def setUp(self): |
|
46 | def setUp(self): | |
48 | self.engine = es.EngineService() |
|
47 | self.engine = es.EngineService() | |
49 | self.engine.startService() |
|
48 | self.engine.startService() | |
50 |
|
49 | |||
51 | def tearDown(self): |
|
50 | def tearDown(self): | |
52 | return self.engine.stopService() |
|
51 | return self.engine.stopService() | |
53 |
|
52 | |||
54 | class ThreadedEngineServiceTest(DeferredTestCase, |
|
53 | class ThreadedEngineServiceTest(DeferredTestCase, | |
55 | IEngineCoreTestCase, |
|
54 | IEngineCoreTestCase, | |
56 | IEngineSerializedTestCase, |
|
55 | IEngineSerializedTestCase, | |
57 | IEnginePropertiesTestCase): |
|
56 | IEnginePropertiesTestCase): | |
58 |
|
57 | |||
59 | def setUp(self): |
|
58 | def setUp(self): | |
60 | self.engine = es.ThreadedEngineService() |
|
59 | self.engine = es.ThreadedEngineService() | |
61 | self.engine.startService() |
|
60 | self.engine.startService() | |
62 |
|
61 | |||
63 | def tearDown(self): |
|
62 | def tearDown(self): | |
64 | return self.engine.stopService() |
|
63 | return self.engine.stopService() | |
65 |
|
64 | |||
66 | class QueuedEngineServiceTest(DeferredTestCase, |
|
65 | class QueuedEngineServiceTest(DeferredTestCase, | |
67 | IEngineCoreTestCase, |
|
66 | IEngineCoreTestCase, | |
68 | IEngineSerializedTestCase, |
|
67 | IEngineSerializedTestCase, | |
69 | IEnginePropertiesTestCase, |
|
68 | IEnginePropertiesTestCase, | |
70 | IEngineQueuedTestCase): |
|
69 | IEngineQueuedTestCase): | |
71 |
|
70 | |||
72 | def setUp(self): |
|
71 | def setUp(self): | |
73 | self.rawEngine = es.EngineService() |
|
72 | self.rawEngine = es.EngineService() | |
74 | self.rawEngine.startService() |
|
73 | self.rawEngine.startService() | |
75 | self.engine = es.IEngineQueued(self.rawEngine) |
|
74 | self.engine = es.IEngineQueued(self.rawEngine) | |
76 |
|
75 | |||
77 | def tearDown(self): |
|
76 | def tearDown(self): | |
78 | return self.rawEngine.stopService() |
|
77 | return self.rawEngine.stopService() | |
79 |
|
78 | |||
80 |
|
79 |
@@ -1,56 +1,55 b'' | |||||
1 | # encoding: utf-8 |
|
1 | # encoding: utf-8 | |
2 |
|
2 | |||
3 | """""" |
|
3 | """""" | |
4 |
|
4 | |||
5 | __docformat__ = "restructuredtext en" |
|
5 | __docformat__ = "restructuredtext en" | |
6 |
|
6 | |||
7 | #----------------------------------------------------------------------------- |
|
7 | #----------------------------------------------------------------------------- | |
8 | # Copyright (C) 2008 The IPython Development Team |
|
8 | # Copyright (C) 2008 The IPython Development Team | |
9 | # |
|
9 | # | |
10 | # Distributed under the terms of the BSD License. The full license is in |
|
10 | # Distributed under the terms of the BSD License. The full license is in | |
11 | # the file COPYING, distributed as part of this software. |
|
11 | # the file COPYING, distributed as part of this software. | |
12 | #----------------------------------------------------------------------------- |
|
12 | #----------------------------------------------------------------------------- | |
13 |
|
13 | |||
14 | #----------------------------------------------------------------------------- |
|
14 | #----------------------------------------------------------------------------- | |
15 | # Imports |
|
15 | # Imports | |
16 | #----------------------------------------------------------------------------- |
|
16 | #----------------------------------------------------------------------------- | |
17 |
|
17 | |||
18 | try: |
|
18 | # Tell nose to skip this module | |
19 | from twisted.internet import defer |
|
19 | __test__ = {} | |
20 | from IPython.testing.util import DeferredTestCase |
|
20 | ||
21 | from IPython.kernel.controllerservice import ControllerService |
|
21 | from twisted.internet import defer | |
22 | from IPython.kernel import multiengine as me |
|
22 | from IPython.testing.util import DeferredTestCase | |
23 | from IPython.kernel.tests.multienginetest import (IMultiEngineTestCase, |
|
23 | from IPython.kernel.controllerservice import ControllerService | |
24 | ISynchronousMultiEngineTestCase) |
|
24 | from IPython.kernel import multiengine as me | |
25 | except ImportError: |
|
25 | from IPython.kernel.tests.multienginetest import (IMultiEngineTestCase, | |
26 | import nose |
|
26 | ISynchronousMultiEngineTestCase) | |
27 | raise nose.SkipTest("This test requires zope.interface, Twisted and Foolscap") |
|
27 | ||
28 |
|
28 | |||
29 |
|
||||
30 | class BasicMultiEngineTestCase(DeferredTestCase, IMultiEngineTestCase): |
|
29 | class BasicMultiEngineTestCase(DeferredTestCase, IMultiEngineTestCase): | |
31 |
|
30 | |||
32 | def setUp(self): |
|
31 | def setUp(self): | |
33 | self.controller = ControllerService() |
|
32 | self.controller = ControllerService() | |
34 | self.controller.startService() |
|
33 | self.controller.startService() | |
35 | self.multiengine = me.IMultiEngine(self.controller) |
|
34 | self.multiengine = me.IMultiEngine(self.controller) | |
36 | self.engines = [] |
|
35 | self.engines = [] | |
37 |
|
36 | |||
38 | def tearDown(self): |
|
37 | def tearDown(self): | |
39 | self.controller.stopService() |
|
38 | self.controller.stopService() | |
40 | for e in self.engines: |
|
39 | for e in self.engines: | |
41 | e.stopService() |
|
40 | e.stopService() | |
42 |
|
41 | |||
43 |
|
42 | |||
44 | class SynchronousMultiEngineTestCase(DeferredTestCase, ISynchronousMultiEngineTestCase): |
|
43 | class SynchronousMultiEngineTestCase(DeferredTestCase, ISynchronousMultiEngineTestCase): | |
45 |
|
44 | |||
46 | def setUp(self): |
|
45 | def setUp(self): | |
47 | self.controller = ControllerService() |
|
46 | self.controller = ControllerService() | |
48 | self.controller.startService() |
|
47 | self.controller.startService() | |
49 | self.multiengine = me.ISynchronousMultiEngine(me.IMultiEngine(self.controller)) |
|
48 | self.multiengine = me.ISynchronousMultiEngine(me.IMultiEngine(self.controller)) | |
50 | self.engines = [] |
|
49 | self.engines = [] | |
51 |
|
50 | |||
52 | def tearDown(self): |
|
51 | def tearDown(self): | |
53 | self.controller.stopService() |
|
52 | self.controller.stopService() | |
54 | for e in self.engines: |
|
53 | for e in self.engines: | |
55 | e.stopService() |
|
54 | e.stopService() | |
56 |
|
55 |
@@ -1,144 +1,144 b'' | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
3 |
|
3 | |||
4 | __docformat__ = "restructuredtext en" |
|
4 | __docformat__ = "restructuredtext en" | |
5 |
|
5 | |||
6 | #------------------------------------------------------------------------------- |
|
6 | #------------------------------------------------------------------------------- | |
7 | # Copyright (C) 2008 The IPython Development Team |
|
7 | # Copyright (C) 2008 The IPython Development Team | |
8 | # |
|
8 | # | |
9 | # Distributed under the terms of the BSD License. The full license is in |
|
9 | # Distributed under the terms of the BSD License. The full license is in | |
10 | # the file COPYING, distributed as part of this software. |
|
10 | # the file COPYING, distributed as part of this software. | |
11 | #------------------------------------------------------------------------------- |
|
11 | #------------------------------------------------------------------------------- | |
12 |
|
12 | |||
13 | #------------------------------------------------------------------------------- |
|
13 | #------------------------------------------------------------------------------- | |
14 | # Imports |
|
14 | # Imports | |
15 | #------------------------------------------------------------------------------- |
|
15 | #------------------------------------------------------------------------------- | |
16 |
|
16 | |||
17 | try: |
|
17 | # Tell nose to skip this module | |
18 | from twisted.internet import defer, reactor |
|
18 | __test__ = {} | |
|
19 | ||||
|
20 | from twisted.internet import defer, reactor | |||
|
21 | ||||
|
22 | from IPython.kernel.fcutil import Tub, UnauthenticatedTub | |||
|
23 | ||||
|
24 | from IPython.testing.util import DeferredTestCase | |||
|
25 | from IPython.kernel.controllerservice import ControllerService | |||
|
26 | from IPython.kernel.multiengine import IMultiEngine | |||
|
27 | from IPython.kernel.tests.multienginetest import IFullSynchronousMultiEngineTestCase | |||
|
28 | from IPython.kernel.multienginefc import IFCSynchronousMultiEngine | |||
|
29 | from IPython.kernel import multiengine as me | |||
|
30 | from IPython.kernel.clientconnector import ClientConnector | |||
|
31 | from IPython.kernel.parallelfunction import ParallelFunction | |||
|
32 | from IPython.kernel.error import CompositeError | |||
|
33 | from IPython.kernel.util import printer | |||
19 |
|
34 | |||
20 | from IPython.kernel.fcutil import Tub, UnauthenticatedTub |
|
|||
21 |
|
35 | |||
22 | from IPython.testing.util import DeferredTestCase |
|
|||
23 | from IPython.kernel.controllerservice import ControllerService |
|
|||
24 | from IPython.kernel.multiengine import IMultiEngine |
|
|||
25 | from IPython.kernel.tests.multienginetest import IFullSynchronousMultiEngineTestCase |
|
|||
26 | from IPython.kernel.multienginefc import IFCSynchronousMultiEngine |
|
|||
27 | from IPython.kernel import multiengine as me |
|
|||
28 | from IPython.kernel.clientconnector import ClientConnector |
|
|||
29 | from IPython.kernel.parallelfunction import ParallelFunction |
|
|||
30 | from IPython.kernel.error import CompositeError |
|
|||
31 | from IPython.kernel.util import printer |
|
|||
32 | except ImportError: |
|
|||
33 | import nose |
|
|||
34 | raise nose.SkipTest("This test requires zope.interface, Twisted and Foolscap") |
|
|||
35 |
|
||||
36 | def _raise_it(f): |
|
36 | def _raise_it(f): | |
37 | try: |
|
37 | try: | |
38 | f.raiseException() |
|
38 | f.raiseException() | |
39 | except CompositeError, e: |
|
39 | except CompositeError, e: | |
40 | e.raise_exception() |
|
40 | e.raise_exception() | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | class FullSynchronousMultiEngineTestCase(DeferredTestCase, IFullSynchronousMultiEngineTestCase): |
|
43 | class FullSynchronousMultiEngineTestCase(DeferredTestCase, IFullSynchronousMultiEngineTestCase): | |
44 |
|
44 | |||
45 | def setUp(self): |
|
45 | def setUp(self): | |
46 |
|
46 | |||
47 | self.engines = [] |
|
47 | self.engines = [] | |
48 |
|
48 | |||
49 | self.controller = ControllerService() |
|
49 | self.controller = ControllerService() | |
50 | self.controller.startService() |
|
50 | self.controller.startService() | |
51 | self.imultiengine = IMultiEngine(self.controller) |
|
51 | self.imultiengine = IMultiEngine(self.controller) | |
52 | self.mec_referenceable = IFCSynchronousMultiEngine(self.imultiengine) |
|
52 | self.mec_referenceable = IFCSynchronousMultiEngine(self.imultiengine) | |
53 |
|
53 | |||
54 | self.controller_tub = Tub() |
|
54 | self.controller_tub = Tub() | |
55 | self.controller_tub.listenOn('tcp:10105:interface=127.0.0.1') |
|
55 | self.controller_tub.listenOn('tcp:10105:interface=127.0.0.1') | |
56 | self.controller_tub.setLocation('127.0.0.1:10105') |
|
56 | self.controller_tub.setLocation('127.0.0.1:10105') | |
57 |
|
57 | |||
58 | furl = self.controller_tub.registerReference(self.mec_referenceable) |
|
58 | furl = self.controller_tub.registerReference(self.mec_referenceable) | |
59 | self.controller_tub.startService() |
|
59 | self.controller_tub.startService() | |
60 |
|
60 | |||
61 | self.client_tub = ClientConnector() |
|
61 | self.client_tub = ClientConnector() | |
62 | d = self.client_tub.get_multiengine_client(furl) |
|
62 | d = self.client_tub.get_multiengine_client(furl) | |
63 | d.addCallback(self.handle_got_client) |
|
63 | d.addCallback(self.handle_got_client) | |
64 | return d |
|
64 | return d | |
65 |
|
65 | |||
66 | def handle_got_client(self, client): |
|
66 | def handle_got_client(self, client): | |
67 | self.multiengine = client |
|
67 | self.multiengine = client | |
68 |
|
68 | |||
69 | def tearDown(self): |
|
69 | def tearDown(self): | |
70 | dlist = [] |
|
70 | dlist = [] | |
71 | # Shut down the multiengine client |
|
71 | # Shut down the multiengine client | |
72 | d = self.client_tub.tub.stopService() |
|
72 | d = self.client_tub.tub.stopService() | |
73 | dlist.append(d) |
|
73 | dlist.append(d) | |
74 | # Shut down the engines |
|
74 | # Shut down the engines | |
75 | for e in self.engines: |
|
75 | for e in self.engines: | |
76 | e.stopService() |
|
76 | e.stopService() | |
77 | # Shut down the controller |
|
77 | # Shut down the controller | |
78 | d = self.controller_tub.stopService() |
|
78 | d = self.controller_tub.stopService() | |
79 | d.addBoth(lambda _: self.controller.stopService()) |
|
79 | d.addBoth(lambda _: self.controller.stopService()) | |
80 | dlist.append(d) |
|
80 | dlist.append(d) | |
81 | return defer.DeferredList(dlist) |
|
81 | return defer.DeferredList(dlist) | |
82 |
|
82 | |||
83 | def test_mapper(self): |
|
83 | def test_mapper(self): | |
84 | self.addEngine(4) |
|
84 | self.addEngine(4) | |
85 | m = self.multiengine.mapper() |
|
85 | m = self.multiengine.mapper() | |
86 | self.assertEquals(m.multiengine,self.multiengine) |
|
86 | self.assertEquals(m.multiengine,self.multiengine) | |
87 | self.assertEquals(m.dist,'b') |
|
87 | self.assertEquals(m.dist,'b') | |
88 | self.assertEquals(m.targets,'all') |
|
88 | self.assertEquals(m.targets,'all') | |
89 | self.assertEquals(m.block,True) |
|
89 | self.assertEquals(m.block,True) | |
90 |
|
90 | |||
91 | def test_map_default(self): |
|
91 | def test_map_default(self): | |
92 | self.addEngine(4) |
|
92 | self.addEngine(4) | |
93 | m = self.multiengine.mapper() |
|
93 | m = self.multiengine.mapper() | |
94 | d = m.map(lambda x: 2*x, range(10)) |
|
94 | d = m.map(lambda x: 2*x, range(10)) | |
95 | d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)])) |
|
95 | d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)])) | |
96 | d.addCallback(lambda _: self.multiengine.map(lambda x: 2*x, range(10))) |
|
96 | d.addCallback(lambda _: self.multiengine.map(lambda x: 2*x, range(10))) | |
97 | d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)])) |
|
97 | d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)])) | |
98 | return d |
|
98 | return d | |
99 |
|
99 | |||
100 | def test_map_noblock(self): |
|
100 | def test_map_noblock(self): | |
101 | self.addEngine(4) |
|
101 | self.addEngine(4) | |
102 | m = self.multiengine.mapper(block=False) |
|
102 | m = self.multiengine.mapper(block=False) | |
103 | d = m.map(lambda x: 2*x, range(10)) |
|
103 | d = m.map(lambda x: 2*x, range(10)) | |
104 | d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True)) |
|
104 | d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True)) | |
105 | d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)])) |
|
105 | d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)])) | |
106 | return d |
|
106 | return d | |
107 |
|
107 | |||
108 | def test_mapper_fail(self): |
|
108 | def test_mapper_fail(self): | |
109 | self.addEngine(4) |
|
109 | self.addEngine(4) | |
110 | m = self.multiengine.mapper() |
|
110 | m = self.multiengine.mapper() | |
111 | d = m.map(lambda x: 1/0, range(10)) |
|
111 | d = m.map(lambda x: 1/0, range(10)) | |
112 | d.addBoth(lambda f: self.assertRaises(ZeroDivisionError, _raise_it, f)) |
|
112 | d.addBoth(lambda f: self.assertRaises(ZeroDivisionError, _raise_it, f)) | |
113 | return d |
|
113 | return d | |
114 |
|
114 | |||
115 | def test_parallel(self): |
|
115 | def test_parallel(self): | |
116 | self.addEngine(4) |
|
116 | self.addEngine(4) | |
117 | p = self.multiengine.parallel() |
|
117 | p = self.multiengine.parallel() | |
118 | self.assert_(isinstance(p, ParallelFunction)) |
|
118 | self.assert_(isinstance(p, ParallelFunction)) | |
119 | @p |
|
119 | @p | |
120 | def f(x): return 2*x |
|
120 | def f(x): return 2*x | |
121 | d = f(range(10)) |
|
121 | d = f(range(10)) | |
122 | d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)])) |
|
122 | d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)])) | |
123 | return d |
|
123 | return d | |
124 |
|
124 | |||
125 | def test_parallel_noblock(self): |
|
125 | def test_parallel_noblock(self): | |
126 | self.addEngine(1) |
|
126 | self.addEngine(1) | |
127 | p = self.multiengine.parallel(block=False) |
|
127 | p = self.multiengine.parallel(block=False) | |
128 | self.assert_(isinstance(p, ParallelFunction)) |
|
128 | self.assert_(isinstance(p, ParallelFunction)) | |
129 | @p |
|
129 | @p | |
130 | def f(x): return 2*x |
|
130 | def f(x): return 2*x | |
131 | d = f(range(10)) |
|
131 | d = f(range(10)) | |
132 | d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True)) |
|
132 | d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True)) | |
133 | d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)])) |
|
133 | d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)])) | |
134 | return d |
|
134 | return d | |
135 |
|
135 | |||
136 | def test_parallel_fail(self): |
|
136 | def test_parallel_fail(self): | |
137 | self.addEngine(4) |
|
137 | self.addEngine(4) | |
138 | p = self.multiengine.parallel() |
|
138 | p = self.multiengine.parallel() | |
139 | self.assert_(isinstance(p, ParallelFunction)) |
|
139 | self.assert_(isinstance(p, ParallelFunction)) | |
140 | @p |
|
140 | @p | |
141 | def f(x): return 1/0 |
|
141 | def f(x): return 1/0 | |
142 | d = f(range(10)) |
|
142 | d = f(range(10)) | |
143 | d.addBoth(lambda f: self.assertRaises(ZeroDivisionError, _raise_it, f)) |
|
143 | d.addBoth(lambda f: self.assertRaises(ZeroDivisionError, _raise_it, f)) | |
144 | return d No newline at end of file |
|
144 | return d |
@@ -1,102 +1,102 b'' | |||||
1 | # encoding: utf-8 |
|
1 | # encoding: utf-8 | |
2 |
|
2 | |||
3 | """This file contains unittests for the shell.py module.""" |
|
3 | """This file contains unittests for the shell.py module.""" | |
4 |
|
4 | |||
5 | __docformat__ = "restructuredtext en" |
|
5 | __docformat__ = "restructuredtext en" | |
6 |
|
6 | |||
7 | #----------------------------------------------------------------------------- |
|
7 | #----------------------------------------------------------------------------- | |
8 | # Copyright (C) 2008 The IPython Development Team |
|
8 | # Copyright (C) 2008 The IPython Development Team | |
9 | # |
|
9 | # | |
10 | # Distributed under the terms of the BSD License. The full license is in |
|
10 | # Distributed under the terms of the BSD License. The full license is in | |
11 | # the file COPYING, distributed as part of this software. |
|
11 | # the file COPYING, distributed as part of this software. | |
12 | #----------------------------------------------------------------------------- |
|
12 | #----------------------------------------------------------------------------- | |
13 |
|
13 | |||
14 | #----------------------------------------------------------------------------- |
|
14 | #----------------------------------------------------------------------------- | |
15 | # Imports |
|
15 | # Imports | |
16 | #----------------------------------------------------------------------------- |
|
16 | #----------------------------------------------------------------------------- | |
17 |
|
17 | |||
18 | try: |
|
18 | # Tell nose to skip this module | |
19 | import zope.interface as zi |
|
19 | __test__ = {} | |
20 | from twisted.trial import unittest |
|
20 | ||
21 | from IPython.testing.util import DeferredTestCase |
|
21 | import zope.interface as zi | |
|
22 | from twisted.trial import unittest | |||
|
23 | from IPython.testing.util import DeferredTestCase | |||
|
24 | ||||
|
25 | from IPython.kernel.newserialized import \ | |||
|
26 | ISerialized, \ | |||
|
27 | IUnSerialized, \ | |||
|
28 | Serialized, \ | |||
|
29 | UnSerialized, \ | |||
|
30 | SerializeIt, \ | |||
|
31 | UnSerializeIt | |||
22 |
|
32 | |||
23 | from IPython.kernel.newserialized import \ |
|
|||
24 | ISerialized, \ |
|
|||
25 | IUnSerialized, \ |
|
|||
26 | Serialized, \ |
|
|||
27 | UnSerialized, \ |
|
|||
28 | SerializeIt, \ |
|
|||
29 | UnSerializeIt |
|
|||
30 | except ImportError: |
|
|||
31 | import nose |
|
|||
32 | raise nose.SkipTest("This test requires zope.interface, Twisted and Foolscap") |
|
|||
33 |
|
33 | |||
34 | #----------------------------------------------------------------------------- |
|
34 | #----------------------------------------------------------------------------- | |
35 | # Tests |
|
35 | # Tests | |
36 | #----------------------------------------------------------------------------- |
|
36 | #----------------------------------------------------------------------------- | |
37 |
|
37 | |||
38 | class SerializedTestCase(unittest.TestCase): |
|
38 | class SerializedTestCase(unittest.TestCase): | |
39 |
|
39 | |||
40 | def setUp(self): |
|
40 | def setUp(self): | |
41 | pass |
|
41 | pass | |
42 |
|
42 | |||
43 | def tearDown(self): |
|
43 | def tearDown(self): | |
44 | pass |
|
44 | pass | |
45 |
|
45 | |||
46 | def testSerializedInterfaces(self): |
|
46 | def testSerializedInterfaces(self): | |
47 |
|
47 | |||
48 | us = UnSerialized({'a':10, 'b':range(10)}) |
|
48 | us = UnSerialized({'a':10, 'b':range(10)}) | |
49 | s = ISerialized(us) |
|
49 | s = ISerialized(us) | |
50 | uss = IUnSerialized(s) |
|
50 | uss = IUnSerialized(s) | |
51 | self.assert_(ISerialized.providedBy(s)) |
|
51 | self.assert_(ISerialized.providedBy(s)) | |
52 | self.assert_(IUnSerialized.providedBy(us)) |
|
52 | self.assert_(IUnSerialized.providedBy(us)) | |
53 | self.assert_(IUnSerialized.providedBy(uss)) |
|
53 | self.assert_(IUnSerialized.providedBy(uss)) | |
54 | for m in list(ISerialized): |
|
54 | for m in list(ISerialized): | |
55 | self.assert_(hasattr(s, m)) |
|
55 | self.assert_(hasattr(s, m)) | |
56 | for m in list(IUnSerialized): |
|
56 | for m in list(IUnSerialized): | |
57 | self.assert_(hasattr(us, m)) |
|
57 | self.assert_(hasattr(us, m)) | |
58 | for m in list(IUnSerialized): |
|
58 | for m in list(IUnSerialized): | |
59 | self.assert_(hasattr(uss, m)) |
|
59 | self.assert_(hasattr(uss, m)) | |
60 |
|
60 | |||
61 | def testPickleSerialized(self): |
|
61 | def testPickleSerialized(self): | |
62 | obj = {'a':1.45345, 'b':'asdfsdf', 'c':10000L} |
|
62 | obj = {'a':1.45345, 'b':'asdfsdf', 'c':10000L} | |
63 | original = UnSerialized(obj) |
|
63 | original = UnSerialized(obj) | |
64 | originalSer = ISerialized(original) |
|
64 | originalSer = ISerialized(original) | |
65 | firstData = originalSer.getData() |
|
65 | firstData = originalSer.getData() | |
66 | firstTD = originalSer.getTypeDescriptor() |
|
66 | firstTD = originalSer.getTypeDescriptor() | |
67 | firstMD = originalSer.getMetadata() |
|
67 | firstMD = originalSer.getMetadata() | |
68 | self.assert_(firstTD == 'pickle') |
|
68 | self.assert_(firstTD == 'pickle') | |
69 | self.assert_(firstMD == {}) |
|
69 | self.assert_(firstMD == {}) | |
70 | unSerialized = IUnSerialized(originalSer) |
|
70 | unSerialized = IUnSerialized(originalSer) | |
71 | secondObj = unSerialized.getObject() |
|
71 | secondObj = unSerialized.getObject() | |
72 | for k, v in secondObj.iteritems(): |
|
72 | for k, v in secondObj.iteritems(): | |
73 | self.assert_(obj[k] == v) |
|
73 | self.assert_(obj[k] == v) | |
74 | secondSer = ISerialized(UnSerialized(secondObj)) |
|
74 | secondSer = ISerialized(UnSerialized(secondObj)) | |
75 | self.assert_(firstData == secondSer.getData()) |
|
75 | self.assert_(firstData == secondSer.getData()) | |
76 | self.assert_(firstTD == secondSer.getTypeDescriptor() ) |
|
76 | self.assert_(firstTD == secondSer.getTypeDescriptor() ) | |
77 | self.assert_(firstMD == secondSer.getMetadata()) |
|
77 | self.assert_(firstMD == secondSer.getMetadata()) | |
78 |
|
78 | |||
79 | def testNDArraySerialized(self): |
|
79 | def testNDArraySerialized(self): | |
80 | try: |
|
80 | try: | |
81 | import numpy |
|
81 | import numpy | |
82 | except ImportError: |
|
82 | except ImportError: | |
83 | pass |
|
83 | pass | |
84 | else: |
|
84 | else: | |
85 | a = numpy.linspace(0.0, 1.0, 1000) |
|
85 | a = numpy.linspace(0.0, 1.0, 1000) | |
86 | unSer1 = UnSerialized(a) |
|
86 | unSer1 = UnSerialized(a) | |
87 | ser1 = ISerialized(unSer1) |
|
87 | ser1 = ISerialized(unSer1) | |
88 | td = ser1.getTypeDescriptor() |
|
88 | td = ser1.getTypeDescriptor() | |
89 | self.assert_(td == 'ndarray') |
|
89 | self.assert_(td == 'ndarray') | |
90 | md = ser1.getMetadata() |
|
90 | md = ser1.getMetadata() | |
91 | self.assert_(md['shape'] == a.shape) |
|
91 | self.assert_(md['shape'] == a.shape) | |
92 | self.assert_(md['dtype'] == a.dtype.str) |
|
92 | self.assert_(md['dtype'] == a.dtype.str) | |
93 | buff = ser1.getData() |
|
93 | buff = ser1.getData() | |
94 | self.assert_(buff == numpy.getbuffer(a)) |
|
94 | self.assert_(buff == numpy.getbuffer(a)) | |
95 | s = Serialized(buff, td, md) |
|
95 | s = Serialized(buff, td, md) | |
96 | us = IUnSerialized(s) |
|
96 | us = IUnSerialized(s) | |
97 | final = us.getObject() |
|
97 | final = us.getObject() | |
98 | self.assert_(numpy.getbuffer(a) == numpy.getbuffer(final)) |
|
98 | self.assert_(numpy.getbuffer(a) == numpy.getbuffer(final)) | |
99 | self.assert_(a.dtype.str == final.dtype.str) |
|
99 | self.assert_(a.dtype.str == final.dtype.str) | |
100 | self.assert_(a.shape == final.shape) |
|
100 | self.assert_(a.shape == final.shape) | |
101 |
|
101 | |||
102 |
|
102 |
@@ -1,186 +1,186 b'' | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
3 |
|
3 | |||
4 | """Tests for pendingdeferred.py""" |
|
4 | """Tests for pendingdeferred.py""" | |
5 |
|
5 | |||
6 | __docformat__ = "restructuredtext en" |
|
6 | __docformat__ = "restructuredtext en" | |
7 |
|
7 | |||
8 | #------------------------------------------------------------------------------- |
|
8 | #------------------------------------------------------------------------------- | |
9 | # Copyright (C) 2008 The IPython Development Team |
|
9 | # Copyright (C) 2008 The IPython Development Team | |
10 | # |
|
10 | # | |
11 | # Distributed under the terms of the BSD License. The full license is in |
|
11 | # Distributed under the terms of the BSD License. The full license is in | |
12 | # the file COPYING, distributed as part of this software. |
|
12 | # the file COPYING, distributed as part of this software. | |
13 | #------------------------------------------------------------------------------- |
|
13 | #------------------------------------------------------------------------------- | |
14 |
|
14 | |||
15 | #------------------------------------------------------------------------------- |
|
15 | #------------------------------------------------------------------------------- | |
16 | # Imports |
|
16 | # Imports | |
17 | #------------------------------------------------------------------------------- |
|
17 | #------------------------------------------------------------------------------- | |
18 |
|
18 | |||
19 | try: |
|
19 | # Tell nose to skip this module | |
20 | from twisted.internet import defer |
|
20 | __test__ = {} | |
21 | from twisted.python import failure |
|
21 | ||
22 |
|
22 | from twisted.internet import defer | ||
23 | from IPython.testing.util import DeferredTestCase |
|
23 | from twisted.python import failure | |
24 | import IPython.kernel.pendingdeferred as pd |
|
24 | ||
25 |
|
|
25 | from IPython.testing.util import DeferredTestCase | |
26 | from IPython.kernel.util import printer |
|
26 | import IPython.kernel.pendingdeferred as pd | |
27 | except ImportError: |
|
27 | from IPython.kernel import error | |
28 | import nose |
|
28 | from IPython.kernel.util import printer | |
29 | raise nose.SkipTest("This test requires zope.interface, Twisted and Foolscap") |
|
29 | ||
30 |
|
30 | |||
31 | class Foo(object): |
|
31 | class Foo(object): | |
32 |
|
32 | |||
33 | def bar(self, bahz): |
|
33 | def bar(self, bahz): | |
34 | return defer.succeed('blahblah: %s' % bahz) |
|
34 | return defer.succeed('blahblah: %s' % bahz) | |
35 |
|
35 | |||
36 | class TwoPhaseFoo(pd.PendingDeferredManager): |
|
36 | class TwoPhaseFoo(pd.PendingDeferredManager): | |
37 |
|
37 | |||
38 | def __init__(self, foo): |
|
38 | def __init__(self, foo): | |
39 | self.foo = foo |
|
39 | self.foo = foo | |
40 | pd.PendingDeferredManager.__init__(self) |
|
40 | pd.PendingDeferredManager.__init__(self) | |
41 |
|
41 | |||
42 | @pd.two_phase |
|
42 | @pd.two_phase | |
43 | def bar(self, bahz): |
|
43 | def bar(self, bahz): | |
44 | return self.foo.bar(bahz) |
|
44 | return self.foo.bar(bahz) | |
45 |
|
45 | |||
46 | class PendingDeferredManagerTest(DeferredTestCase): |
|
46 | class PendingDeferredManagerTest(DeferredTestCase): | |
47 |
|
47 | |||
48 | def setUp(self): |
|
48 | def setUp(self): | |
49 | self.pdm = pd.PendingDeferredManager() |
|
49 | self.pdm = pd.PendingDeferredManager() | |
50 |
|
50 | |||
51 | def tearDown(self): |
|
51 | def tearDown(self): | |
52 | pass |
|
52 | pass | |
53 |
|
53 | |||
54 | def testBasic(self): |
|
54 | def testBasic(self): | |
55 | dDict = {} |
|
55 | dDict = {} | |
56 | # Create 10 deferreds and save them |
|
56 | # Create 10 deferreds and save them | |
57 | for i in range(10): |
|
57 | for i in range(10): | |
58 | d = defer.Deferred() |
|
58 | d = defer.Deferred() | |
59 | did = self.pdm.save_pending_deferred(d) |
|
59 | did = self.pdm.save_pending_deferred(d) | |
60 | dDict[did] = d |
|
60 | dDict[did] = d | |
61 | # Make sure they are begin saved |
|
61 | # Make sure they are begin saved | |
62 | for k in dDict.keys(): |
|
62 | for k in dDict.keys(): | |
63 | self.assert_(self.pdm.quick_has_id(k)) |
|
63 | self.assert_(self.pdm.quick_has_id(k)) | |
64 | # Get the pending deferred (block=True), then callback with 'foo' and compare |
|
64 | # Get the pending deferred (block=True), then callback with 'foo' and compare | |
65 | for did in dDict.keys()[0:5]: |
|
65 | for did in dDict.keys()[0:5]: | |
66 | d = self.pdm.get_pending_deferred(did,block=True) |
|
66 | d = self.pdm.get_pending_deferred(did,block=True) | |
67 | dDict[did].callback('foo') |
|
67 | dDict[did].callback('foo') | |
68 | d.addCallback(lambda r: self.assert_(r=='foo')) |
|
68 | d.addCallback(lambda r: self.assert_(r=='foo')) | |
69 | # Get the pending deferreds with (block=False) and make sure ResultNotCompleted is raised |
|
69 | # Get the pending deferreds with (block=False) and make sure ResultNotCompleted is raised | |
70 | for did in dDict.keys()[5:10]: |
|
70 | for did in dDict.keys()[5:10]: | |
71 | d = self.pdm.get_pending_deferred(did,block=False) |
|
71 | d = self.pdm.get_pending_deferred(did,block=False) | |
72 | d.addErrback(lambda f: self.assertRaises(error.ResultNotCompleted, f.raiseException)) |
|
72 | d.addErrback(lambda f: self.assertRaises(error.ResultNotCompleted, f.raiseException)) | |
73 | # Now callback the last 5, get them and compare. |
|
73 | # Now callback the last 5, get them and compare. | |
74 | for did in dDict.keys()[5:10]: |
|
74 | for did in dDict.keys()[5:10]: | |
75 | dDict[did].callback('foo') |
|
75 | dDict[did].callback('foo') | |
76 | d = self.pdm.get_pending_deferred(did,block=False) |
|
76 | d = self.pdm.get_pending_deferred(did,block=False) | |
77 | d.addCallback(lambda r: self.assert_(r=='foo')) |
|
77 | d.addCallback(lambda r: self.assert_(r=='foo')) | |
78 |
|
78 | |||
79 | def test_save_then_delete(self): |
|
79 | def test_save_then_delete(self): | |
80 | d = defer.Deferred() |
|
80 | d = defer.Deferred() | |
81 | did = self.pdm.save_pending_deferred(d) |
|
81 | did = self.pdm.save_pending_deferred(d) | |
82 | self.assert_(self.pdm.quick_has_id(did)) |
|
82 | self.assert_(self.pdm.quick_has_id(did)) | |
83 | self.pdm.delete_pending_deferred(did) |
|
83 | self.pdm.delete_pending_deferred(did) | |
84 | self.assert_(not self.pdm.quick_has_id(did)) |
|
84 | self.assert_(not self.pdm.quick_has_id(did)) | |
85 |
|
85 | |||
86 | def test_save_get_delete(self): |
|
86 | def test_save_get_delete(self): | |
87 | d = defer.Deferred() |
|
87 | d = defer.Deferred() | |
88 | did = self.pdm.save_pending_deferred(d) |
|
88 | did = self.pdm.save_pending_deferred(d) | |
89 | d2 = self.pdm.get_pending_deferred(did,True) |
|
89 | d2 = self.pdm.get_pending_deferred(did,True) | |
90 | d2.addErrback(lambda f: self.assertRaises(error.AbortedPendingDeferredError, f.raiseException)) |
|
90 | d2.addErrback(lambda f: self.assertRaises(error.AbortedPendingDeferredError, f.raiseException)) | |
91 | self.pdm.delete_pending_deferred(did) |
|
91 | self.pdm.delete_pending_deferred(did) | |
92 | return d2 |
|
92 | return d2 | |
93 |
|
93 | |||
94 | def test_double_get(self): |
|
94 | def test_double_get(self): | |
95 | d = defer.Deferred() |
|
95 | d = defer.Deferred() | |
96 | did = self.pdm.save_pending_deferred(d) |
|
96 | did = self.pdm.save_pending_deferred(d) | |
97 | d2 = self.pdm.get_pending_deferred(did,True) |
|
97 | d2 = self.pdm.get_pending_deferred(did,True) | |
98 | d3 = self.pdm.get_pending_deferred(did,True) |
|
98 | d3 = self.pdm.get_pending_deferred(did,True) | |
99 | d3.addErrback(lambda f: self.assertRaises(error.InvalidDeferredID, f.raiseException)) |
|
99 | d3.addErrback(lambda f: self.assertRaises(error.InvalidDeferredID, f.raiseException)) | |
100 |
|
100 | |||
101 | def test_get_after_callback(self): |
|
101 | def test_get_after_callback(self): | |
102 | d = defer.Deferred() |
|
102 | d = defer.Deferred() | |
103 | did = self.pdm.save_pending_deferred(d) |
|
103 | did = self.pdm.save_pending_deferred(d) | |
104 | d.callback('foo') |
|
104 | d.callback('foo') | |
105 | d2 = self.pdm.get_pending_deferred(did,True) |
|
105 | d2 = self.pdm.get_pending_deferred(did,True) | |
106 | d2.addCallback(lambda r: self.assertEquals(r,'foo')) |
|
106 | d2.addCallback(lambda r: self.assertEquals(r,'foo')) | |
107 | self.assert_(not self.pdm.quick_has_id(did)) |
|
107 | self.assert_(not self.pdm.quick_has_id(did)) | |
108 |
|
108 | |||
109 | def test_get_before_callback(self): |
|
109 | def test_get_before_callback(self): | |
110 | d = defer.Deferred() |
|
110 | d = defer.Deferred() | |
111 | did = self.pdm.save_pending_deferred(d) |
|
111 | did = self.pdm.save_pending_deferred(d) | |
112 | d2 = self.pdm.get_pending_deferred(did,True) |
|
112 | d2 = self.pdm.get_pending_deferred(did,True) | |
113 | d.callback('foo') |
|
113 | d.callback('foo') | |
114 | d2.addCallback(lambda r: self.assertEquals(r,'foo')) |
|
114 | d2.addCallback(lambda r: self.assertEquals(r,'foo')) | |
115 | self.assert_(not self.pdm.quick_has_id(did)) |
|
115 | self.assert_(not self.pdm.quick_has_id(did)) | |
116 | d = defer.Deferred() |
|
116 | d = defer.Deferred() | |
117 | did = self.pdm.save_pending_deferred(d) |
|
117 | did = self.pdm.save_pending_deferred(d) | |
118 | d2 = self.pdm.get_pending_deferred(did,True) |
|
118 | d2 = self.pdm.get_pending_deferred(did,True) | |
119 | d2.addCallback(lambda r: self.assertEquals(r,'foo')) |
|
119 | d2.addCallback(lambda r: self.assertEquals(r,'foo')) | |
120 | d.callback('foo') |
|
120 | d.callback('foo') | |
121 | self.assert_(not self.pdm.quick_has_id(did)) |
|
121 | self.assert_(not self.pdm.quick_has_id(did)) | |
122 |
|
122 | |||
123 | def test_get_after_errback(self): |
|
123 | def test_get_after_errback(self): | |
124 | class MyError(Exception): |
|
124 | class MyError(Exception): | |
125 | pass |
|
125 | pass | |
126 | d = defer.Deferred() |
|
126 | d = defer.Deferred() | |
127 | did = self.pdm.save_pending_deferred(d) |
|
127 | did = self.pdm.save_pending_deferred(d) | |
128 | d.errback(failure.Failure(MyError('foo'))) |
|
128 | d.errback(failure.Failure(MyError('foo'))) | |
129 | d2 = self.pdm.get_pending_deferred(did,True) |
|
129 | d2 = self.pdm.get_pending_deferred(did,True) | |
130 | d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException)) |
|
130 | d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException)) | |
131 | self.assert_(not self.pdm.quick_has_id(did)) |
|
131 | self.assert_(not self.pdm.quick_has_id(did)) | |
132 |
|
132 | |||
133 | def test_get_before_errback(self): |
|
133 | def test_get_before_errback(self): | |
134 | class MyError(Exception): |
|
134 | class MyError(Exception): | |
135 | pass |
|
135 | pass | |
136 | d = defer.Deferred() |
|
136 | d = defer.Deferred() | |
137 | did = self.pdm.save_pending_deferred(d) |
|
137 | did = self.pdm.save_pending_deferred(d) | |
138 | d2 = self.pdm.get_pending_deferred(did,True) |
|
138 | d2 = self.pdm.get_pending_deferred(did,True) | |
139 | d.errback(failure.Failure(MyError('foo'))) |
|
139 | d.errback(failure.Failure(MyError('foo'))) | |
140 | d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException)) |
|
140 | d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException)) | |
141 | self.assert_(not self.pdm.quick_has_id(did)) |
|
141 | self.assert_(not self.pdm.quick_has_id(did)) | |
142 | d = defer.Deferred() |
|
142 | d = defer.Deferred() | |
143 | did = self.pdm.save_pending_deferred(d) |
|
143 | did = self.pdm.save_pending_deferred(d) | |
144 | d2 = self.pdm.get_pending_deferred(did,True) |
|
144 | d2 = self.pdm.get_pending_deferred(did,True) | |
145 | d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException)) |
|
145 | d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException)) | |
146 | d.errback(failure.Failure(MyError('foo'))) |
|
146 | d.errback(failure.Failure(MyError('foo'))) | |
147 | self.assert_(not self.pdm.quick_has_id(did)) |
|
147 | self.assert_(not self.pdm.quick_has_id(did)) | |
148 |
|
148 | |||
149 | def test_noresult_noblock(self): |
|
149 | def test_noresult_noblock(self): | |
150 | d = defer.Deferred() |
|
150 | d = defer.Deferred() | |
151 | did = self.pdm.save_pending_deferred(d) |
|
151 | did = self.pdm.save_pending_deferred(d) | |
152 | d2 = self.pdm.get_pending_deferred(did,False) |
|
152 | d2 = self.pdm.get_pending_deferred(did,False) | |
153 | d2.addErrback(lambda f: self.assertRaises(error.ResultNotCompleted, f.raiseException)) |
|
153 | d2.addErrback(lambda f: self.assertRaises(error.ResultNotCompleted, f.raiseException)) | |
154 |
|
154 | |||
155 | def test_with_callbacks(self): |
|
155 | def test_with_callbacks(self): | |
156 | d = defer.Deferred() |
|
156 | d = defer.Deferred() | |
157 | d.addCallback(lambda r: r+' foo') |
|
157 | d.addCallback(lambda r: r+' foo') | |
158 | d.addCallback(lambda r: r+' bar') |
|
158 | d.addCallback(lambda r: r+' bar') | |
159 | did = self.pdm.save_pending_deferred(d) |
|
159 | did = self.pdm.save_pending_deferred(d) | |
160 | d2 = self.pdm.get_pending_deferred(did,True) |
|
160 | d2 = self.pdm.get_pending_deferred(did,True) | |
161 | d.callback('bam') |
|
161 | d.callback('bam') | |
162 | d2.addCallback(lambda r: self.assertEquals(r,'bam foo bar')) |
|
162 | d2.addCallback(lambda r: self.assertEquals(r,'bam foo bar')) | |
163 |
|
163 | |||
164 | def test_with_errbacks(self): |
|
164 | def test_with_errbacks(self): | |
165 | class MyError(Exception): |
|
165 | class MyError(Exception): | |
166 | pass |
|
166 | pass | |
167 | d = defer.Deferred() |
|
167 | d = defer.Deferred() | |
168 | d.addCallback(lambda r: 'foo') |
|
168 | d.addCallback(lambda r: 'foo') | |
169 | d.addErrback(lambda f: 'caught error') |
|
169 | d.addErrback(lambda f: 'caught error') | |
170 | did = self.pdm.save_pending_deferred(d) |
|
170 | did = self.pdm.save_pending_deferred(d) | |
171 | d2 = self.pdm.get_pending_deferred(did,True) |
|
171 | d2 = self.pdm.get_pending_deferred(did,True) | |
172 | d.errback(failure.Failure(MyError('bam'))) |
|
172 | d.errback(failure.Failure(MyError('bam'))) | |
173 | d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException)) |
|
173 | d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException)) | |
174 |
|
174 | |||
175 | def test_nested_deferreds(self): |
|
175 | def test_nested_deferreds(self): | |
176 | d = defer.Deferred() |
|
176 | d = defer.Deferred() | |
177 | d2 = defer.Deferred() |
|
177 | d2 = defer.Deferred() | |
178 | d.addCallback(lambda r: d2) |
|
178 | d.addCallback(lambda r: d2) | |
179 | did = self.pdm.save_pending_deferred(d) |
|
179 | did = self.pdm.save_pending_deferred(d) | |
180 | d.callback('foo') |
|
180 | d.callback('foo') | |
181 | d3 = self.pdm.get_pending_deferred(did,False) |
|
181 | d3 = self.pdm.get_pending_deferred(did,False) | |
182 | d3.addErrback(lambda f: self.assertRaises(error.ResultNotCompleted, f.raiseException)) |
|
182 | d3.addErrback(lambda f: self.assertRaises(error.ResultNotCompleted, f.raiseException)) | |
183 | d2.callback('bar') |
|
183 | d2.callback('bar') | |
184 | d3 = self.pdm.get_pending_deferred(did,False) |
|
184 | d3 = self.pdm.get_pending_deferred(did,False) | |
185 | d3.addCallback(lambda r: self.assertEquals(r,'bar')) |
|
185 | d3.addCallback(lambda r: self.assertEquals(r,'bar')) | |
186 |
|
186 |
@@ -1,51 +1,51 b'' | |||||
1 | # encoding: utf-8 |
|
1 | # encoding: utf-8 | |
2 |
|
2 | |||
3 | """This file contains unittests for the kernel.task.py module.""" |
|
3 | """This file contains unittests for the kernel.task.py module.""" | |
4 |
|
4 | |||
5 | __docformat__ = "restructuredtext en" |
|
5 | __docformat__ = "restructuredtext en" | |
6 |
|
6 | |||
7 | #------------------------------------------------------------------------------- |
|
7 | #------------------------------------------------------------------------------- | |
8 | # Copyright (C) 2008 The IPython Development Team |
|
8 | # Copyright (C) 2008 The IPython Development Team | |
9 | # |
|
9 | # | |
10 | # Distributed under the terms of the BSD License. The full license is in |
|
10 | # Distributed under the terms of the BSD License. The full license is in | |
11 | # the file COPYING, distributed as part of this software. |
|
11 | # the file COPYING, distributed as part of this software. | |
12 | #------------------------------------------------------------------------------- |
|
12 | #------------------------------------------------------------------------------- | |
13 |
|
13 | |||
14 | #------------------------------------------------------------------------------- |
|
14 | #------------------------------------------------------------------------------- | |
15 | # Imports |
|
15 | # Imports | |
16 | #------------------------------------------------------------------------------- |
|
16 | #------------------------------------------------------------------------------- | |
17 |
|
17 | |||
18 | try: |
|
18 | # Tell nose to skip this module | |
19 | import time |
|
19 | __test__ = {} | |
20 |
|
20 | |||
21 | from twisted.internet import defer |
|
21 | import time | |
22 | from twisted.trial import unittest |
|
22 | ||
23 |
|
23 | from twisted.internet import defer | ||
24 | from IPython.kernel import task, controllerservice as cs, engineservice as es |
|
24 | from twisted.trial import unittest | |
25 | from IPython.kernel.multiengine import IMultiEngine |
|
25 | ||
26 | from IPython.testing.util import DeferredTestCase |
|
26 | from IPython.kernel import task, controllerservice as cs, engineservice as es | |
27 | from IPython.kernel.tests.tasktest import ITaskControllerTestCase |
|
27 | from IPython.kernel.multiengine import IMultiEngine | |
28 | except ImportError: |
|
28 | from IPython.testing.util import DeferredTestCase | |
29 | import nose |
|
29 | from IPython.kernel.tests.tasktest import ITaskControllerTestCase | |
30 | raise nose.SkipTest("This test requires zope.interface, Twisted and Foolscap") |
|
30 | ||
31 |
|
31 | |||
32 | #------------------------------------------------------------------------------- |
|
32 | #------------------------------------------------------------------------------- | |
33 | # Tests |
|
33 | # Tests | |
34 | #------------------------------------------------------------------------------- |
|
34 | #------------------------------------------------------------------------------- | |
35 |
|
35 | |||
36 | class BasicTaskControllerTestCase(DeferredTestCase, ITaskControllerTestCase): |
|
36 | class BasicTaskControllerTestCase(DeferredTestCase, ITaskControllerTestCase): | |
37 |
|
37 | |||
38 | def setUp(self): |
|
38 | def setUp(self): | |
39 | self.controller = cs.ControllerService() |
|
39 | self.controller = cs.ControllerService() | |
40 | self.controller.startService() |
|
40 | self.controller.startService() | |
41 | self.multiengine = IMultiEngine(self.controller) |
|
41 | self.multiengine = IMultiEngine(self.controller) | |
42 | self.tc = task.ITaskController(self.controller) |
|
42 | self.tc = task.ITaskController(self.controller) | |
43 | self.tc.failurePenalty = 0 |
|
43 | self.tc.failurePenalty = 0 | |
44 | self.engines=[] |
|
44 | self.engines=[] | |
45 |
|
45 | |||
46 | def tearDown(self): |
|
46 | def tearDown(self): | |
47 | self.controller.stopService() |
|
47 | self.controller.stopService() | |
48 | for e in self.engines: |
|
48 | for e in self.engines: | |
49 | e.stopService() |
|
49 | e.stopService() | |
50 |
|
50 | |||
51 |
|
51 |
@@ -1,162 +1,161 b'' | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
3 |
|
3 | |||
4 | __docformat__ = "restructuredtext en" |
|
4 | __docformat__ = "restructuredtext en" | |
5 |
|
5 | |||
6 | #------------------------------------------------------------------------------- |
|
6 | #------------------------------------------------------------------------------- | |
7 | # Copyright (C) 2008 The IPython Development Team |
|
7 | # Copyright (C) 2008 The IPython Development Team | |
8 | # |
|
8 | # | |
9 | # Distributed under the terms of the BSD License. The full license is in |
|
9 | # Distributed under the terms of the BSD License. The full license is in | |
10 | # the file COPYING, distributed as part of this software. |
|
10 | # the file COPYING, distributed as part of this software. | |
11 | #------------------------------------------------------------------------------- |
|
11 | #------------------------------------------------------------------------------- | |
12 |
|
12 | |||
13 | #------------------------------------------------------------------------------- |
|
13 | #------------------------------------------------------------------------------- | |
14 | # Imports |
|
14 | # Imports | |
15 | #------------------------------------------------------------------------------- |
|
15 | #------------------------------------------------------------------------------- | |
16 |
|
16 | |||
17 | try: |
|
17 | # Tell nose to skip this module | |
18 | import time |
|
18 | __test__ = {} | |
19 |
|
19 | |||
20 | from twisted.internet import defer, reactor |
|
20 | import time | |
21 |
|
21 | |||
22 | from IPython.kernel.fcutil import Tub, UnauthenticatedTub |
|
22 | from twisted.internet import defer, reactor | |
23 |
|
23 | |||
24 |
|
|
24 | from IPython.kernel.fcutil import Tub, UnauthenticatedTub | |
25 | from IPython.kernel import controllerservice as cs |
|
25 | ||
26 | import IPython.kernel.multiengine as me |
|
26 | from IPython.kernel import task as taskmodule | |
27 | from IPython.testing.util import DeferredTestCase |
|
27 | from IPython.kernel import controllerservice as cs | |
28 | from IPython.kernel.multienginefc import IFCSynchronousMultiEngine |
|
28 | import IPython.kernel.multiengine as me | |
29 | from IPython.kernel.taskfc import IFCTaskController |
|
29 | from IPython.testing.util import DeferredTestCase | |
30 |
|
|
30 | from IPython.kernel.multienginefc import IFCSynchronousMultiEngine | |
31 |
|
|
31 | from IPython.kernel.taskfc import IFCTaskController | |
32 |
|
|
32 | from IPython.kernel.util import printer | |
33 |
|
|
33 | from IPython.kernel.tests.tasktest import ITaskControllerTestCase | |
34 |
|
|
34 | from IPython.kernel.clientconnector import ClientConnector | |
35 | except ImportError: |
|
35 | from IPython.kernel.error import CompositeError | |
36 | import nose |
|
36 | from IPython.kernel.parallelfunction import ParallelFunction | |
37 | raise nose.SkipTest("This test requires zope.interface, Twisted and Foolscap") |
|
|||
38 |
|
37 | |||
39 |
|
38 | |||
40 | #------------------------------------------------------------------------------- |
|
39 | #------------------------------------------------------------------------------- | |
41 | # Tests |
|
40 | # Tests | |
42 | #------------------------------------------------------------------------------- |
|
41 | #------------------------------------------------------------------------------- | |
43 |
|
42 | |||
44 | def _raise_it(f): |
|
43 | def _raise_it(f): | |
45 | try: |
|
44 | try: | |
46 | f.raiseException() |
|
45 | f.raiseException() | |
47 | except CompositeError, e: |
|
46 | except CompositeError, e: | |
48 | e.raise_exception() |
|
47 | e.raise_exception() | |
49 |
|
48 | |||
50 | class TaskTest(DeferredTestCase, ITaskControllerTestCase): |
|
49 | class TaskTest(DeferredTestCase, ITaskControllerTestCase): | |
51 |
|
50 | |||
52 | def setUp(self): |
|
51 | def setUp(self): | |
53 |
|
52 | |||
54 | self.engines = [] |
|
53 | self.engines = [] | |
55 |
|
54 | |||
56 | self.controller = cs.ControllerService() |
|
55 | self.controller = cs.ControllerService() | |
57 | self.controller.startService() |
|
56 | self.controller.startService() | |
58 | self.imultiengine = me.IMultiEngine(self.controller) |
|
57 | self.imultiengine = me.IMultiEngine(self.controller) | |
59 | self.itc = taskmodule.ITaskController(self.controller) |
|
58 | self.itc = taskmodule.ITaskController(self.controller) | |
60 | self.itc.failurePenalty = 0 |
|
59 | self.itc.failurePenalty = 0 | |
61 |
|
60 | |||
62 | self.mec_referenceable = IFCSynchronousMultiEngine(self.imultiengine) |
|
61 | self.mec_referenceable = IFCSynchronousMultiEngine(self.imultiengine) | |
63 | self.tc_referenceable = IFCTaskController(self.itc) |
|
62 | self.tc_referenceable = IFCTaskController(self.itc) | |
64 |
|
63 | |||
65 | self.controller_tub = Tub() |
|
64 | self.controller_tub = Tub() | |
66 | self.controller_tub.listenOn('tcp:10105:interface=127.0.0.1') |
|
65 | self.controller_tub.listenOn('tcp:10105:interface=127.0.0.1') | |
67 | self.controller_tub.setLocation('127.0.0.1:10105') |
|
66 | self.controller_tub.setLocation('127.0.0.1:10105') | |
68 |
|
67 | |||
69 | mec_furl = self.controller_tub.registerReference(self.mec_referenceable) |
|
68 | mec_furl = self.controller_tub.registerReference(self.mec_referenceable) | |
70 | tc_furl = self.controller_tub.registerReference(self.tc_referenceable) |
|
69 | tc_furl = self.controller_tub.registerReference(self.tc_referenceable) | |
71 | self.controller_tub.startService() |
|
70 | self.controller_tub.startService() | |
72 |
|
71 | |||
73 | self.client_tub = ClientConnector() |
|
72 | self.client_tub = ClientConnector() | |
74 | d = self.client_tub.get_multiengine_client(mec_furl) |
|
73 | d = self.client_tub.get_multiengine_client(mec_furl) | |
75 | d.addCallback(self.handle_mec_client) |
|
74 | d.addCallback(self.handle_mec_client) | |
76 | d.addCallback(lambda _: self.client_tub.get_task_client(tc_furl)) |
|
75 | d.addCallback(lambda _: self.client_tub.get_task_client(tc_furl)) | |
77 | d.addCallback(self.handle_tc_client) |
|
76 | d.addCallback(self.handle_tc_client) | |
78 | return d |
|
77 | return d | |
79 |
|
78 | |||
80 | def handle_mec_client(self, client): |
|
79 | def handle_mec_client(self, client): | |
81 | self.multiengine = client |
|
80 | self.multiengine = client | |
82 |
|
81 | |||
83 | def handle_tc_client(self, client): |
|
82 | def handle_tc_client(self, client): | |
84 | self.tc = client |
|
83 | self.tc = client | |
85 |
|
84 | |||
86 | def tearDown(self): |
|
85 | def tearDown(self): | |
87 | dlist = [] |
|
86 | dlist = [] | |
88 | # Shut down the multiengine client |
|
87 | # Shut down the multiengine client | |
89 | d = self.client_tub.tub.stopService() |
|
88 | d = self.client_tub.tub.stopService() | |
90 | dlist.append(d) |
|
89 | dlist.append(d) | |
91 | # Shut down the engines |
|
90 | # Shut down the engines | |
92 | for e in self.engines: |
|
91 | for e in self.engines: | |
93 | e.stopService() |
|
92 | e.stopService() | |
94 | # Shut down the controller |
|
93 | # Shut down the controller | |
95 | d = self.controller_tub.stopService() |
|
94 | d = self.controller_tub.stopService() | |
96 | d.addBoth(lambda _: self.controller.stopService()) |
|
95 | d.addBoth(lambda _: self.controller.stopService()) | |
97 | dlist.append(d) |
|
96 | dlist.append(d) | |
98 | return defer.DeferredList(dlist) |
|
97 | return defer.DeferredList(dlist) | |
99 |
|
98 | |||
100 | def test_mapper(self): |
|
99 | def test_mapper(self): | |
101 | self.addEngine(1) |
|
100 | self.addEngine(1) | |
102 | m = self.tc.mapper() |
|
101 | m = self.tc.mapper() | |
103 | self.assertEquals(m.task_controller,self.tc) |
|
102 | self.assertEquals(m.task_controller,self.tc) | |
104 | self.assertEquals(m.clear_before,False) |
|
103 | self.assertEquals(m.clear_before,False) | |
105 | self.assertEquals(m.clear_after,False) |
|
104 | self.assertEquals(m.clear_after,False) | |
106 | self.assertEquals(m.retries,0) |
|
105 | self.assertEquals(m.retries,0) | |
107 | self.assertEquals(m.recovery_task,None) |
|
106 | self.assertEquals(m.recovery_task,None) | |
108 | self.assertEquals(m.depend,None) |
|
107 | self.assertEquals(m.depend,None) | |
109 | self.assertEquals(m.block,True) |
|
108 | self.assertEquals(m.block,True) | |
110 |
|
109 | |||
111 | def test_map_default(self): |
|
110 | def test_map_default(self): | |
112 | self.addEngine(1) |
|
111 | self.addEngine(1) | |
113 | m = self.tc.mapper() |
|
112 | m = self.tc.mapper() | |
114 | d = m.map(lambda x: 2*x, range(10)) |
|
113 | d = m.map(lambda x: 2*x, range(10)) | |
115 | d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)])) |
|
114 | d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)])) | |
116 | d.addCallback(lambda _: self.tc.map(lambda x: 2*x, range(10))) |
|
115 | d.addCallback(lambda _: self.tc.map(lambda x: 2*x, range(10))) | |
117 | d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)])) |
|
116 | d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)])) | |
118 | return d |
|
117 | return d | |
119 |
|
118 | |||
120 | def test_map_noblock(self): |
|
119 | def test_map_noblock(self): | |
121 | self.addEngine(1) |
|
120 | self.addEngine(1) | |
122 | m = self.tc.mapper(block=False) |
|
121 | m = self.tc.mapper(block=False) | |
123 | d = m.map(lambda x: 2*x, range(10)) |
|
122 | d = m.map(lambda x: 2*x, range(10)) | |
124 | d.addCallback(lambda r: self.assertEquals(r,[x for x in range(10)])) |
|
123 | d.addCallback(lambda r: self.assertEquals(r,[x for x in range(10)])) | |
125 | return d |
|
124 | return d | |
126 |
|
125 | |||
127 | def test_mapper_fail(self): |
|
126 | def test_mapper_fail(self): | |
128 | self.addEngine(1) |
|
127 | self.addEngine(1) | |
129 | m = self.tc.mapper() |
|
128 | m = self.tc.mapper() | |
130 | d = m.map(lambda x: 1/0, range(10)) |
|
129 | d = m.map(lambda x: 1/0, range(10)) | |
131 | d.addBoth(lambda f: self.assertRaises(ZeroDivisionError, _raise_it, f)) |
|
130 | d.addBoth(lambda f: self.assertRaises(ZeroDivisionError, _raise_it, f)) | |
132 | return d |
|
131 | return d | |
133 |
|
132 | |||
134 | def test_parallel(self): |
|
133 | def test_parallel(self): | |
135 | self.addEngine(1) |
|
134 | self.addEngine(1) | |
136 | p = self.tc.parallel() |
|
135 | p = self.tc.parallel() | |
137 | self.assert_(isinstance(p, ParallelFunction)) |
|
136 | self.assert_(isinstance(p, ParallelFunction)) | |
138 | @p |
|
137 | @p | |
139 | def f(x): return 2*x |
|
138 | def f(x): return 2*x | |
140 | d = f(range(10)) |
|
139 | d = f(range(10)) | |
141 | d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)])) |
|
140 | d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)])) | |
142 | return d |
|
141 | return d | |
143 |
|
142 | |||
144 | def test_parallel_noblock(self): |
|
143 | def test_parallel_noblock(self): | |
145 | self.addEngine(1) |
|
144 | self.addEngine(1) | |
146 | p = self.tc.parallel(block=False) |
|
145 | p = self.tc.parallel(block=False) | |
147 | self.assert_(isinstance(p, ParallelFunction)) |
|
146 | self.assert_(isinstance(p, ParallelFunction)) | |
148 | @p |
|
147 | @p | |
149 | def f(x): return 2*x |
|
148 | def f(x): return 2*x | |
150 | d = f(range(10)) |
|
149 | d = f(range(10)) | |
151 | d.addCallback(lambda r: self.assertEquals(r,[x for x in range(10)])) |
|
150 | d.addCallback(lambda r: self.assertEquals(r,[x for x in range(10)])) | |
152 | return d |
|
151 | return d | |
153 |
|
152 | |||
154 | def test_parallel_fail(self): |
|
153 | def test_parallel_fail(self): | |
155 | self.addEngine(1) |
|
154 | self.addEngine(1) | |
156 | p = self.tc.parallel() |
|
155 | p = self.tc.parallel() | |
157 | self.assert_(isinstance(p, ParallelFunction)) |
|
156 | self.assert_(isinstance(p, ParallelFunction)) | |
158 | @p |
|
157 | @p | |
159 | def f(x): return 1/0 |
|
158 | def f(x): return 1/0 | |
160 | d = f(range(10)) |
|
159 | d = f(range(10)) | |
161 | d.addBoth(lambda f: self.assertRaises(ZeroDivisionError, _raise_it, f)) |
|
160 | d.addBoth(lambda f: self.assertRaises(ZeroDivisionError, _raise_it, f)) | |
162 | return d No newline at end of file |
|
161 | return d |
@@ -1,48 +1,51 b'' | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
3 |
|
3 | |||
4 | #----------------------------------------------------------------------------- |
|
4 | #----------------------------------------------------------------------------- | |
5 | # Copyright (C) 2008 The IPython Development Team |
|
5 | # Copyright (C) 2008 The IPython Development Team | |
6 | # |
|
6 | # | |
7 | # Distributed under the terms of the BSD License. The full license is in |
|
7 | # Distributed under the terms of the BSD License. The full license is in | |
8 | # the file COPYING, distributed as part of this software. |
|
8 | # the file COPYING, distributed as part of this software. | |
9 | #----------------------------------------------------------------------------- |
|
9 | #----------------------------------------------------------------------------- | |
10 |
|
10 | |||
11 | #----------------------------------------------------------------------------- |
|
11 | #----------------------------------------------------------------------------- | |
12 | # Imports |
|
12 | # Imports | |
13 | #----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
14 |
|
14 | |||
|
15 | # Tell nose to skip this module | |||
|
16 | __test__ = {} | |||
|
17 | ||||
15 | import tempfile |
|
18 | import tempfile | |
16 | import os, sys |
|
19 | import os, sys | |
17 |
|
20 | |||
18 | from twisted.internet import reactor |
|
21 | from twisted.internet import reactor | |
19 | from twisted.trial import unittest |
|
22 | from twisted.trial import unittest | |
20 |
|
23 | |||
21 | from IPython.kernel.error import FileTimeoutError |
|
24 | from IPython.kernel.error import FileTimeoutError | |
22 | from IPython.kernel.twistedutil import wait_for_file |
|
25 | from IPython.kernel.twistedutil import wait_for_file | |
23 |
|
26 | |||
24 | #----------------------------------------------------------------------------- |
|
27 | #----------------------------------------------------------------------------- | |
25 | # Tests |
|
28 | # Tests | |
26 | #----------------------------------------------------------------------------- |
|
29 | #----------------------------------------------------------------------------- | |
27 |
|
30 | |||
28 | class TestWaitForFile(unittest.TestCase): |
|
31 | class TestWaitForFile(unittest.TestCase): | |
29 |
|
32 | |||
30 | def test_delay(self): |
|
33 | def test_delay(self): | |
31 | filename = tempfile.mktemp() |
|
34 | filename = tempfile.mktemp() | |
32 | def _create_file(): |
|
35 | def _create_file(): | |
33 | open(filename,'w').write('####') |
|
36 | open(filename,'w').write('####') | |
34 | dcall = reactor.callLater(0.5, _create_file) |
|
37 | dcall = reactor.callLater(0.5, _create_file) | |
35 | d = wait_for_file(filename,delay=0.1) |
|
38 | d = wait_for_file(filename,delay=0.1) | |
36 | d.addCallback(lambda r: self.assert_(r)) |
|
39 | d.addCallback(lambda r: self.assert_(r)) | |
37 | def _cancel_dcall(r): |
|
40 | def _cancel_dcall(r): | |
38 | if dcall.active(): |
|
41 | if dcall.active(): | |
39 | dcall.cancel() |
|
42 | dcall.cancel() | |
40 | d.addCallback(_cancel_dcall) |
|
43 | d.addCallback(_cancel_dcall) | |
41 | return d |
|
44 | return d | |
42 |
|
45 | |||
43 | def test_timeout(self): |
|
46 | def test_timeout(self): | |
44 | filename = tempfile.mktemp() |
|
47 | filename = tempfile.mktemp() | |
45 | d = wait_for_file(filename,delay=0.1,max_tries=1) |
|
48 | d = wait_for_file(filename,delay=0.1,max_tries=1) | |
46 | d.addErrback(lambda f: self.assertRaises(FileTimeoutError,f.raiseException)) |
|
49 | d.addErrback(lambda f: self.assertRaises(FileTimeoutError,f.raiseException)) | |
47 | return d |
|
50 | return d | |
48 | No newline at end of file |
|
51 |
@@ -1,254 +1,254 b'' | |||||
1 | """Decorators for labeling test objects. |
|
1 | """Decorators for labeling test objects. | |
2 |
|
2 | |||
3 | Decorators that merely return a modified version of the original function |
|
3 | Decorators that merely return a modified version of the original function | |
4 | object are straightforward. Decorators that return a new function object need |
|
4 | object are straightforward. Decorators that return a new function object need | |
5 | to use nose.tools.make_decorator(original_function)(decorator) in returning the |
|
5 | to use nose.tools.make_decorator(original_function)(decorator) in returning the | |
6 | decorator, in order to preserve metadata such as function name, setup and |
|
6 | decorator, in order to preserve metadata such as function name, setup and | |
7 | teardown functions and so on - see nose.tools for more information. |
|
7 | teardown functions and so on - see nose.tools for more information. | |
8 |
|
8 | |||
9 | This module provides a set of useful decorators meant to be ready to use in |
|
9 | This module provides a set of useful decorators meant to be ready to use in | |
10 | your own tests. See the bottom of the file for the ready-made ones, and if you |
|
10 | your own tests. See the bottom of the file for the ready-made ones, and if you | |
11 | find yourself writing a new one that may be of generic use, add it here. |
|
11 | find yourself writing a new one that may be of generic use, add it here. | |
12 |
|
12 | |||
13 | NOTE: This file contains IPython-specific decorators and imports the |
|
13 | NOTE: This file contains IPython-specific decorators and imports the | |
14 | numpy.testing.decorators file, which we've copied verbatim. Any of our own |
|
14 | numpy.testing.decorators file, which we've copied verbatim. Any of our own | |
15 | code will be added at the bottom if we end up extending this. |
|
15 | code will be added at the bottom if we end up extending this. | |
16 | """ |
|
16 | """ | |
17 |
|
17 | |||
18 | # Stdlib imports |
|
18 | # Stdlib imports | |
19 | import inspect |
|
19 | import inspect | |
20 | import sys |
|
20 | import sys | |
21 |
|
21 | |||
22 | # Third-party imports |
|
22 | # Third-party imports | |
23 |
|
23 | |||
24 | # This is Michele Simionato's decorator module, also kept verbatim. |
|
24 | # This is Michele Simionato's decorator module, also kept verbatim. | |
25 | from decorator_msim import decorator, update_wrapper |
|
25 | from decorator_msim import decorator, update_wrapper | |
26 |
|
26 | |||
27 | # Grab the numpy-specific decorators which we keep in a file that we |
|
27 | # Grab the numpy-specific decorators which we keep in a file that we | |
28 | # occasionally update from upstream: decorators_numpy.py is an IDENTICAL copy |
|
28 | # occasionally update from upstream: decorators_numpy.py is an IDENTICAL copy | |
29 | # of numpy.testing.decorators. |
|
29 | # of numpy.testing.decorators. | |
30 | from decorators_numpy import * |
|
30 | from decorators_numpy import * | |
31 |
|
31 | |||
32 | ############################################################################## |
|
32 | ############################################################################## | |
33 | # Local code begins |
|
33 | # Local code begins | |
34 |
|
34 | |||
35 | # Utility functions |
|
35 | # Utility functions | |
36 |
|
36 | |||
37 | def apply_wrapper(wrapper,func): |
|
37 | def apply_wrapper(wrapper,func): | |
38 | """Apply a wrapper to a function for decoration. |
|
38 | """Apply a wrapper to a function for decoration. | |
39 |
|
39 | |||
40 | This mixes Michele Simionato's decorator tool with nose's make_decorator, |
|
40 | This mixes Michele Simionato's decorator tool with nose's make_decorator, | |
41 | to apply a wrapper in a decorator so that all nose attributes, as well as |
|
41 | to apply a wrapper in a decorator so that all nose attributes, as well as | |
42 | function signature and other properties, survive the decoration cleanly. |
|
42 | function signature and other properties, survive the decoration cleanly. | |
43 | This will ensure that wrapped functions can still be well introspected via |
|
43 | This will ensure that wrapped functions can still be well introspected via | |
44 | IPython, for example. |
|
44 | IPython, for example. | |
45 | """ |
|
45 | """ | |
46 | import nose.tools |
|
46 | import nose.tools | |
47 |
|
47 | |||
48 | return decorator(wrapper,nose.tools.make_decorator(func)(wrapper)) |
|
48 | return decorator(wrapper,nose.tools.make_decorator(func)(wrapper)) | |
49 |
|
49 | |||
50 |
|
50 | |||
51 | def make_label_dec(label,ds=None): |
|
51 | def make_label_dec(label,ds=None): | |
52 | """Factory function to create a decorator that applies one or more labels. |
|
52 | """Factory function to create a decorator that applies one or more labels. | |
53 |
|
53 | |||
54 | :Parameters: |
|
54 | :Parameters: | |
55 | label : string or sequence |
|
55 | label : string or sequence | |
56 | One or more labels that will be applied by the decorator to the functions |
|
56 | One or more labels that will be applied by the decorator to the functions | |
57 | it decorates. Labels are attributes of the decorated function with their |
|
57 | it decorates. Labels are attributes of the decorated function with their | |
58 | value set to True. |
|
58 | value set to True. | |
59 |
|
59 | |||
60 | :Keywords: |
|
60 | :Keywords: | |
61 | ds : string |
|
61 | ds : string | |
62 | An optional docstring for the resulting decorator. If not given, a |
|
62 | An optional docstring for the resulting decorator. If not given, a | |
63 | default docstring is auto-generated. |
|
63 | default docstring is auto-generated. | |
64 |
|
64 | |||
65 | :Returns: |
|
65 | :Returns: | |
66 | A decorator. |
|
66 | A decorator. | |
67 |
|
67 | |||
68 | :Examples: |
|
68 | :Examples: | |
69 |
|
69 | |||
70 | A simple labeling decorator: |
|
70 | A simple labeling decorator: | |
71 | >>> slow = make_label_dec('slow') |
|
71 | >>> slow = make_label_dec('slow') | |
72 | >>> print slow.__doc__ |
|
72 | >>> print slow.__doc__ | |
73 | Labels a test as 'slow'. |
|
73 | Labels a test as 'slow'. | |
74 |
|
74 | |||
75 | And one that uses multiple labels and a custom docstring: |
|
75 | And one that uses multiple labels and a custom docstring: | |
76 | >>> rare = make_label_dec(['slow','hard'], |
|
76 | >>> rare = make_label_dec(['slow','hard'], | |
77 | ... "Mix labels 'slow' and 'hard' for rare tests.") |
|
77 | ... "Mix labels 'slow' and 'hard' for rare tests.") | |
78 | >>> print rare.__doc__ |
|
78 | >>> print rare.__doc__ | |
79 | Mix labels 'slow' and 'hard' for rare tests. |
|
79 | Mix labels 'slow' and 'hard' for rare tests. | |
80 |
|
80 | |||
81 | Now, let's test using this one: |
|
81 | Now, let's test using this one: | |
82 | >>> @rare |
|
82 | >>> @rare | |
83 | ... def f(): pass |
|
83 | ... def f(): pass | |
84 | ... |
|
84 | ... | |
85 | >>> |
|
85 | >>> | |
86 | >>> f.slow |
|
86 | >>> f.slow | |
87 | True |
|
87 | True | |
88 | >>> f.hard |
|
88 | >>> f.hard | |
89 | True |
|
89 | True | |
90 | """ |
|
90 | """ | |
91 |
|
91 | |||
92 | if isinstance(label,basestring): |
|
92 | if isinstance(label,basestring): | |
93 | labels = [label] |
|
93 | labels = [label] | |
94 | else: |
|
94 | else: | |
95 | labels = label |
|
95 | labels = label | |
96 |
|
96 | |||
97 | # Validate that the given label(s) are OK for use in setattr() by doing a |
|
97 | # Validate that the given label(s) are OK for use in setattr() by doing a | |
98 | # dry run on a dummy function. |
|
98 | # dry run on a dummy function. | |
99 | tmp = lambda : None |
|
99 | tmp = lambda : None | |
100 | for label in labels: |
|
100 | for label in labels: | |
101 | setattr(tmp,label,True) |
|
101 | setattr(tmp,label,True) | |
102 |
|
102 | |||
103 | # This is the actual decorator we'll return |
|
103 | # This is the actual decorator we'll return | |
104 | def decor(f): |
|
104 | def decor(f): | |
105 | for label in labels: |
|
105 | for label in labels: | |
106 | setattr(f,label,True) |
|
106 | setattr(f,label,True) | |
107 | return f |
|
107 | return f | |
108 |
|
108 | |||
109 | # Apply the user's docstring, or autogenerate a basic one |
|
109 | # Apply the user's docstring, or autogenerate a basic one | |
110 | if ds is None: |
|
110 | if ds is None: | |
111 | ds = "Labels a test as %r." % label |
|
111 | ds = "Labels a test as %r." % label | |
112 | decor.__doc__ = ds |
|
112 | decor.__doc__ = ds | |
113 |
|
113 | |||
114 | return decor |
|
114 | return decor | |
115 |
|
115 | |||
116 |
|
116 | |||
117 | # Inspired by numpy's skipif, but uses the full apply_wrapper utility to |
|
117 | # Inspired by numpy's skipif, but uses the full apply_wrapper utility to | |
118 | # preserve function metadata better and allows the skip condition to be a |
|
118 | # preserve function metadata better and allows the skip condition to be a | |
119 | # callable. |
|
119 | # callable. | |
120 | def skipif(skip_condition, msg=None): |
|
120 | def skipif(skip_condition, msg=None): | |
121 | ''' Make function raise SkipTest exception if skip_condition is true |
|
121 | ''' Make function raise SkipTest exception if skip_condition is true | |
122 |
|
122 | |||
123 | Parameters |
|
123 | Parameters | |
124 | ---------- |
|
124 | ---------- | |
125 | skip_condition : bool or callable. |
|
125 | skip_condition : bool or callable. | |
126 |
|
|
126 | Flag to determine whether to skip test. If the condition is a | |
127 |
|
|
127 | callable, it is used at runtime to dynamically make the decision. This | |
128 |
|
|
128 | is useful for tests that may require costly imports, to delay the cost | |
129 |
|
|
129 | until the test suite is actually executed. | |
130 | msg : string |
|
130 | msg : string | |
131 | Message to give on raising a SkipTest exception |
|
131 | Message to give on raising a SkipTest exception | |
132 |
|
132 | |||
133 | Returns |
|
133 | Returns | |
134 | ------- |
|
134 | ------- | |
135 | decorator : function |
|
135 | decorator : function | |
136 | Decorator, which, when applied to a function, causes SkipTest |
|
136 | Decorator, which, when applied to a function, causes SkipTest | |
137 | to be raised when the skip_condition was True, and the function |
|
137 | to be raised when the skip_condition was True, and the function | |
138 | to be called normally otherwise. |
|
138 | to be called normally otherwise. | |
139 |
|
139 | |||
140 | Notes |
|
140 | Notes | |
141 | ----- |
|
141 | ----- | |
142 | You will see from the code that we had to further decorate the |
|
142 | You will see from the code that we had to further decorate the | |
143 | decorator with the nose.tools.make_decorator function in order to |
|
143 | decorator with the nose.tools.make_decorator function in order to | |
144 | transmit function name, and various other metadata. |
|
144 | transmit function name, and various other metadata. | |
145 | ''' |
|
145 | ''' | |
146 |
|
146 | |||
147 | def skip_decorator(f): |
|
147 | def skip_decorator(f): | |
148 | # Local import to avoid a hard nose dependency and only incur the |
|
148 | # Local import to avoid a hard nose dependency and only incur the | |
149 | # import time overhead at actual test-time. |
|
149 | # import time overhead at actual test-time. | |
150 | import nose |
|
150 | import nose | |
151 |
|
151 | |||
152 | # Allow for both boolean or callable skip conditions. |
|
152 | # Allow for both boolean or callable skip conditions. | |
153 | if callable(skip_condition): |
|
153 | if callable(skip_condition): | |
154 | skip_val = lambda : skip_condition() |
|
154 | skip_val = lambda : skip_condition() | |
155 | else: |
|
155 | else: | |
156 | skip_val = lambda : skip_condition |
|
156 | skip_val = lambda : skip_condition | |
157 |
|
157 | |||
158 | def get_msg(func,msg=None): |
|
158 | def get_msg(func,msg=None): | |
159 | """Skip message with information about function being skipped.""" |
|
159 | """Skip message with information about function being skipped.""" | |
160 | if msg is None: out = 'Test skipped due to test condition.' |
|
160 | if msg is None: out = 'Test skipped due to test condition.' | |
161 | else: out = msg |
|
161 | else: out = msg | |
162 | return "Skipping test: %s. %s" % (func.__name__,out) |
|
162 | return "Skipping test: %s. %s" % (func.__name__,out) | |
163 |
|
163 | |||
164 | # We need to define *two* skippers because Python doesn't allow both |
|
164 | # We need to define *two* skippers because Python doesn't allow both | |
165 | # return with value and yield inside the same function. |
|
165 | # return with value and yield inside the same function. | |
166 | def skipper_func(*args, **kwargs): |
|
166 | def skipper_func(*args, **kwargs): | |
167 | """Skipper for normal test functions.""" |
|
167 | """Skipper for normal test functions.""" | |
168 | if skip_val(): |
|
168 | if skip_val(): | |
169 | raise nose.SkipTest(get_msg(f,msg)) |
|
169 | raise nose.SkipTest(get_msg(f,msg)) | |
170 | else: |
|
170 | else: | |
171 | return f(*args, **kwargs) |
|
171 | return f(*args, **kwargs) | |
172 |
|
172 | |||
173 | def skipper_gen(*args, **kwargs): |
|
173 | def skipper_gen(*args, **kwargs): | |
174 | """Skipper for test generators.""" |
|
174 | """Skipper for test generators.""" | |
175 | if skip_val(): |
|
175 | if skip_val(): | |
176 | raise nose.SkipTest(get_msg(f,msg)) |
|
176 | raise nose.SkipTest(get_msg(f,msg)) | |
177 | else: |
|
177 | else: | |
178 | for x in f(*args, **kwargs): |
|
178 | for x in f(*args, **kwargs): | |
179 | yield x |
|
179 | yield x | |
180 |
|
180 | |||
181 | # Choose the right skipper to use when building the actual generator. |
|
181 | # Choose the right skipper to use when building the actual generator. | |
182 | if nose.util.isgenerator(f): |
|
182 | if nose.util.isgenerator(f): | |
183 | skipper = skipper_gen |
|
183 | skipper = skipper_gen | |
184 | else: |
|
184 | else: | |
185 | skipper = skipper_func |
|
185 | skipper = skipper_func | |
186 |
|
186 | |||
187 | return nose.tools.make_decorator(f)(skipper) |
|
187 | return nose.tools.make_decorator(f)(skipper) | |
188 |
|
188 | |||
189 | return skip_decorator |
|
189 | return skip_decorator | |
190 |
|
190 | |||
191 | # A version with the condition set to true, common case just to attacha message |
|
191 | # A version with the condition set to true, common case just to attacha message | |
192 | # to a skip decorator |
|
192 | # to a skip decorator | |
193 | def skip(msg=None): |
|
193 | def skip(msg=None): | |
194 | """Decorator factory - mark a test function for skipping from test suite. |
|
194 | """Decorator factory - mark a test function for skipping from test suite. | |
195 |
|
195 | |||
196 | :Parameters: |
|
196 | :Parameters: | |
197 | msg : string |
|
197 | msg : string | |
198 | Optional message to be added. |
|
198 | Optional message to be added. | |
199 |
|
199 | |||
200 | :Returns: |
|
200 | :Returns: | |
201 | decorator : function |
|
201 | decorator : function | |
202 | Decorator, which, when applied to a function, causes SkipTest |
|
202 | Decorator, which, when applied to a function, causes SkipTest | |
203 | to be raised, with the optional message added. |
|
203 | to be raised, with the optional message added. | |
204 | """ |
|
204 | """ | |
205 |
|
205 | |||
206 | return skipif(True,msg) |
|
206 | return skipif(True,msg) | |
207 |
|
207 | |||
208 |
|
208 | |||
209 | #----------------------------------------------------------------------------- |
|
209 | #----------------------------------------------------------------------------- | |
210 | # Utility functions for decorators |
|
210 | # Utility functions for decorators | |
211 | def numpy_not_available(): |
|
211 | def numpy_not_available(): | |
212 | """Can numpy be imported? Returns true if numpy does NOT import. |
|
212 | """Can numpy be imported? Returns true if numpy does NOT import. | |
213 |
|
213 | |||
214 | This is used to make a decorator to skip tests that require numpy to be |
|
214 | This is used to make a decorator to skip tests that require numpy to be | |
215 | available, but delay the 'import numpy' to test execution time. |
|
215 | available, but delay the 'import numpy' to test execution time. | |
216 | """ |
|
216 | """ | |
217 | try: |
|
217 | try: | |
218 | import numpy |
|
218 | import numpy | |
219 | np_not_avail = False |
|
219 | np_not_avail = False | |
220 | except ImportError: |
|
220 | except ImportError: | |
221 | np_not_avail = True |
|
221 | np_not_avail = True | |
222 |
|
222 | |||
223 | return np_not_avail |
|
223 | return np_not_avail | |
224 |
|
224 | |||
225 | #----------------------------------------------------------------------------- |
|
225 | #----------------------------------------------------------------------------- | |
226 | # Decorators for public use |
|
226 | # Decorators for public use | |
227 |
|
227 | |||
228 | skip_doctest = make_label_dec('skip_doctest', |
|
228 | skip_doctest = make_label_dec('skip_doctest', | |
229 | """Decorator - mark a function or method for skipping its doctest. |
|
229 | """Decorator - mark a function or method for skipping its doctest. | |
230 |
|
230 | |||
231 | This decorator allows you to mark a function whose docstring you wish to |
|
231 | This decorator allows you to mark a function whose docstring you wish to | |
232 | omit from testing, while preserving the docstring for introspection, help, |
|
232 | omit from testing, while preserving the docstring for introspection, help, | |
233 | etc.""") |
|
233 | etc.""") | |
234 |
|
234 | |||
235 | # Decorators to skip certain tests on specific platforms. |
|
235 | # Decorators to skip certain tests on specific platforms. | |
236 | skip_win32 = skipif(sys.platform == 'win32', |
|
236 | skip_win32 = skipif(sys.platform == 'win32', | |
237 | "This test does not run under Windows") |
|
237 | "This test does not run under Windows") | |
238 | skip_linux = skipif(sys.platform == 'linux2', |
|
238 | skip_linux = skipif(sys.platform == 'linux2', | |
239 | "This test does not run under Linux") |
|
239 | "This test does not run under Linux") | |
240 | skip_osx = skipif(sys.platform == 'darwin',"This test does not run under OS X") |
|
240 | skip_osx = skipif(sys.platform == 'darwin',"This test does not run under OS X") | |
241 |
|
241 | |||
242 |
|
242 | |||
243 | # Decorators to skip tests if not on specific platforms. |
|
243 | # Decorators to skip tests if not on specific platforms. | |
244 | skip_if_not_win32 = skipif(sys.platform != 'win32', |
|
244 | skip_if_not_win32 = skipif(sys.platform != 'win32', | |
245 | "This test only runs under Windows") |
|
245 | "This test only runs under Windows") | |
246 | skip_if_not_linux = skipif(sys.platform != 'linux2', |
|
246 | skip_if_not_linux = skipif(sys.platform != 'linux2', | |
247 | "This test only runs under Linux") |
|
247 | "This test only runs under Linux") | |
248 | skip_if_not_osx = skipif(sys.platform != 'darwin', |
|
248 | skip_if_not_osx = skipif(sys.platform != 'darwin', | |
249 | "This test only runs under OSX") |
|
249 | "This test only runs under OSX") | |
250 |
|
250 | |||
251 | # Other skip decorators |
|
251 | # Other skip decorators | |
252 | skipif_not_numpy = skipif(numpy_not_available,"This test requires numpy") |
|
252 | skipif_not_numpy = skipif(numpy_not_available,"This test requires numpy") | |
253 |
|
253 | |||
254 | skipknownfailure = skip('This test is known to fail') |
|
254 | skipknownfailure = skip('This test is known to fail') |
@@ -1,351 +1,351 b'' | |||||
1 | .. _parallel_process: |
|
1 | .. _parallel_process: | |
2 |
|
2 | |||
3 | =========================================== |
|
3 | =========================================== | |
4 | Starting the IPython controller and engines |
|
4 | Starting the IPython controller and engines | |
5 | =========================================== |
|
5 | =========================================== | |
6 |
|
6 | |||
7 | To use IPython for parallel computing, you need to start one instance of |
|
7 | To use IPython for parallel computing, you need to start one instance of | |
8 | the controller and one or more instances of the engine. The controller |
|
8 | the controller and one or more instances of the engine. The controller | |
9 | and each engine can run on different machines or on the same machine. |
|
9 | and each engine can run on different machines or on the same machine. | |
10 | Because of this, there are many different possibilities. |
|
10 | Because of this, there are many different possibilities. | |
11 |
|
11 | |||
12 | Broadly speaking, there are two ways of going about starting a controller and engines: |
|
12 | Broadly speaking, there are two ways of going about starting a controller and engines: | |
13 |
|
13 | |||
14 | * In an automated manner using the :command:`ipcluster` command. |
|
14 | * In an automated manner using the :command:`ipcluster` command. | |
15 | * In a more manual way using the :command:`ipcontroller` and |
|
15 | * In a more manual way using the :command:`ipcontroller` and | |
16 | :command:`ipengine` commands. |
|
16 | :command:`ipengine` commands. | |
17 |
|
17 | |||
18 | This document describes both of these methods. We recommend that new users start with the :command:`ipcluster` command as it simplifies many common usage cases. |
|
18 | This document describes both of these methods. We recommend that new users start with the :command:`ipcluster` command as it simplifies many common usage cases. | |
19 |
|
19 | |||
20 | General considerations |
|
20 | General considerations | |
21 | ====================== |
|
21 | ====================== | |
22 |
|
22 | |||
23 | Before delving into the details about how you can start a controller and engines using the various methods, we outline some of the general issues that come up when starting the controller and engines. These things come up no matter which method you use to start your IPython cluster. |
|
23 | Before delving into the details about how you can start a controller and engines using the various methods, we outline some of the general issues that come up when starting the controller and engines. These things come up no matter which method you use to start your IPython cluster. | |
24 |
|
24 | |||
25 | Let's say that you want to start the controller on ``host0`` and engines on hosts ``host1``-``hostn``. The following steps are then required: |
|
25 | Let's say that you want to start the controller on ``host0`` and engines on hosts ``host1``-``hostn``. The following steps are then required: | |
26 |
|
26 | |||
27 | 1. Start the controller on ``host0`` by running :command:`ipcontroller` on |
|
27 | 1. Start the controller on ``host0`` by running :command:`ipcontroller` on | |
28 | ``host0``. |
|
28 | ``host0``. | |
29 | 2. Move the FURL file (:file:`ipcontroller-engine.furl`) created by the |
|
29 | 2. Move the FURL file (:file:`ipcontroller-engine.furl`) created by the | |
30 | controller from ``host0`` to hosts ``host1``-``hostn``. |
|
30 | controller from ``host0`` to hosts ``host1``-``hostn``. | |
31 | 3. Start the engines on hosts ``host1``-``hostn`` by running |
|
31 | 3. Start the engines on hosts ``host1``-``hostn`` by running | |
32 | :command:`ipengine`. This command has to be told where the FURL file |
|
32 | :command:`ipengine`. This command has to be told where the FURL file | |
33 | (:file:`ipcontroller-engine.furl`) is located. |
|
33 | (:file:`ipcontroller-engine.furl`) is located. | |
34 |
|
34 | |||
35 | At this point, the controller and engines will be connected. By default, the |
|
35 | At this point, the controller and engines will be connected. By default, the | |
36 | FURL files created by the controller are put into the |
|
36 | FURL files created by the controller are put into the | |
37 | :file:`~/.ipython/security` directory. If the engines share a filesystem with |
|
37 | :file:`~/.ipython/security` directory. If the engines share a filesystem with | |
38 | the controller, step 2 can be skipped as the engines will automatically look |
|
38 | the controller, step 2 can be skipped as the engines will automatically look | |
39 | at that location. |
|
39 | at that location. | |
40 |
|
40 | |||
41 | The final step required required to actually use the running controller from a |
|
41 | The final step required required to actually use the running controller from a | |
42 | client is to move the FURL files :file:`ipcontroller-mec.furl` and |
|
42 | client is to move the FURL files :file:`ipcontroller-mec.furl` and | |
43 | :file:`ipcontroller-tc.furl` from ``host0`` to the host where the clients will |
|
43 | :file:`ipcontroller-tc.furl` from ``host0`` to the host where the clients will | |
44 | be run. If these file are put into the :file:`~/.ipython/security` directory of the client's host, they will be found automatically. Otherwise, the full path to them has to be passed to the client's constructor. |
|
44 | be run. If these file are put into the :file:`~/.ipython/security` directory of the client's host, they will be found automatically. Otherwise, the full path to them has to be passed to the client's constructor. | |
45 |
|
45 | |||
46 | Using :command:`ipcluster` |
|
46 | Using :command:`ipcluster` | |
47 | ========================== |
|
47 | ========================== | |
48 |
|
48 | |||
49 | The :command:`ipcluster` command provides a simple way of starting a controller and engines in the following situations: |
|
49 | The :command:`ipcluster` command provides a simple way of starting a controller and engines in the following situations: | |
50 |
|
50 | |||
51 | 1. When the controller and engines are all run on localhost. This is useful |
|
51 | 1. When the controller and engines are all run on localhost. This is useful | |
52 | for testing or running on a multicore computer. |
|
52 | for testing or running on a multicore computer. | |
53 | 2. When engines are started using the :command:`mpirun` command that comes |
|
53 | 2. When engines are started using the :command:`mpirun` command that comes | |
54 | with most MPI [MPI]_ implementations |
|
54 | with most MPI [MPI]_ implementations | |
55 | 3. When engines are started using the PBS [PBS]_ batch system. |
|
55 | 3. When engines are started using the PBS [PBS]_ batch system. | |
56 | 4. When the controller is started on localhost and the engines are started on |
|
56 | 4. When the controller is started on localhost and the engines are started on | |
57 | remote nodes using :command:`ssh`. |
|
57 | remote nodes using :command:`ssh`. | |
58 |
|
58 | |||
59 | .. note:: |
|
59 | .. note:: | |
60 |
|
60 | |||
61 | It is also possible for advanced users to add support to |
|
61 | It is also possible for advanced users to add support to | |
62 | :command:`ipcluster` for starting controllers and engines using other |
|
62 | :command:`ipcluster` for starting controllers and engines using other | |
63 | methods (like Sun's Grid Engine for example). |
|
63 | methods (like Sun's Grid Engine for example). | |
64 |
|
64 | |||
65 | .. note:: |
|
65 | .. note:: | |
66 |
|
66 | |||
67 | Currently :command:`ipcluster` requires that the |
|
67 | Currently :command:`ipcluster` requires that the | |
68 | :file:`~/.ipython/security` directory live on a shared filesystem that is |
|
68 | :file:`~/.ipython/security` directory live on a shared filesystem that is | |
69 | seen by both the controller and engines. If you don't have a shared file |
|
69 | seen by both the controller and engines. If you don't have a shared file | |
70 | system you will need to use :command:`ipcontroller` and |
|
70 | system you will need to use :command:`ipcontroller` and | |
71 | :command:`ipengine` directly. This constraint can be relaxed if you are |
|
71 | :command:`ipengine` directly. This constraint can be relaxed if you are | |
72 | using the :command:`ssh` method to start the cluster. |
|
72 | using the :command:`ssh` method to start the cluster. | |
73 |
|
73 | |||
74 | Underneath the hood, :command:`ipcluster` just uses :command:`ipcontroller` |
|
74 | Underneath the hood, :command:`ipcluster` just uses :command:`ipcontroller` | |
75 | and :command:`ipengine` to perform the steps described above. |
|
75 | and :command:`ipengine` to perform the steps described above. | |
76 |
|
76 | |||
77 | Using :command:`ipcluster` in local mode |
|
77 | Using :command:`ipcluster` in local mode | |
78 | ---------------------------------------- |
|
78 | ---------------------------------------- | |
79 |
|
79 | |||
80 | To start one controller and 4 engines on localhost, just do:: |
|
80 | To start one controller and 4 engines on localhost, just do:: | |
81 |
|
81 | |||
82 | $ ipcluster local -n 4 |
|
82 | $ ipcluster local -n 4 | |
83 |
|
83 | |||
84 | To see other command line options for the local mode, do:: |
|
84 | To see other command line options for the local mode, do:: | |
85 |
|
85 | |||
86 | $ ipcluster local -h |
|
86 | $ ipcluster local -h | |
87 |
|
87 | |||
88 | Using :command:`ipcluster` in mpiexec/mpirun mode |
|
88 | Using :command:`ipcluster` in mpiexec/mpirun mode | |
89 | ------------------------------------------------- |
|
89 | ------------------------------------------------- | |
90 |
|
90 | |||
91 | The mpiexec/mpirun mode is useful if you: |
|
91 | The mpiexec/mpirun mode is useful if you: | |
92 |
|
92 | |||
93 | 1. Have MPI installed. |
|
93 | 1. Have MPI installed. | |
94 | 2. Your systems are configured to use the :command:`mpiexec` or |
|
94 | 2. Your systems are configured to use the :command:`mpiexec` or | |
95 | :command:`mpirun` commands to start MPI processes. |
|
95 | :command:`mpirun` commands to start MPI processes. | |
96 |
|
96 | |||
97 | .. note:: |
|
97 | .. note:: | |
98 |
|
98 | |||
99 | The preferred command to use is :command:`mpiexec`. However, we also |
|
99 | The preferred command to use is :command:`mpiexec`. However, we also | |
100 | support :command:`mpirun` for backwards compatibility. The underlying |
|
100 | support :command:`mpirun` for backwards compatibility. The underlying | |
101 | logic used is exactly the same, the only difference being the name of the |
|
101 | logic used is exactly the same, the only difference being the name of the | |
102 | command line program that is called. |
|
102 | command line program that is called. | |
103 |
|
103 | |||
104 | If these are satisfied, you can start an IPython cluster using:: |
|
104 | If these are satisfied, you can start an IPython cluster using:: | |
105 |
|
105 | |||
106 | $ ipcluster mpiexec -n 4 |
|
106 | $ ipcluster mpiexec -n 4 | |
107 |
|
107 | |||
108 | This does the following: |
|
108 | This does the following: | |
109 |
|
109 | |||
110 | 1. Starts the IPython controller on current host. |
|
110 | 1. Starts the IPython controller on current host. | |
111 | 2. Uses :command:`mpiexec` to start 4 engines. |
|
111 | 2. Uses :command:`mpiexec` to start 4 engines. | |
112 |
|
112 | |||
113 | On newer MPI implementations (such as OpenMPI), this will work even if you don't make any calls to MPI or call :func:`MPI_Init`. However, older MPI implementations actually require each process to call :func:`MPI_Init` upon starting. The easiest way of having this done is to install the mpi4py [mpi4py]_ package and then call ipcluster with the ``--mpi`` option:: |
|
113 | On newer MPI implementations (such as OpenMPI), this will work even if you don't make any calls to MPI or call :func:`MPI_Init`. However, older MPI implementations actually require each process to call :func:`MPI_Init` upon starting. The easiest way of having this done is to install the mpi4py [mpi4py]_ package and then call ipcluster with the ``--mpi`` option:: | |
114 |
|
114 | |||
115 | $ ipcluster mpiexec -n 4 --mpi=mpi4py |
|
115 | $ ipcluster mpiexec -n 4 --mpi=mpi4py | |
116 |
|
116 | |||
117 | Unfortunately, even this won't work for some MPI implementations. If you are having problems with this, you will likely have to use a custom Python executable that itself calls :func:`MPI_Init` at the appropriate time. Fortunately, mpi4py comes with such a custom Python executable that is easy to install and use. However, this custom Python executable approach will not work with :command:`ipcluster` currently. |
|
117 | Unfortunately, even this won't work for some MPI implementations. If you are having problems with this, you will likely have to use a custom Python executable that itself calls :func:`MPI_Init` at the appropriate time. Fortunately, mpi4py comes with such a custom Python executable that is easy to install and use. However, this custom Python executable approach will not work with :command:`ipcluster` currently. | |
118 |
|
118 | |||
119 | Additional command line options for this mode can be found by doing:: |
|
119 | Additional command line options for this mode can be found by doing:: | |
120 |
|
120 | |||
121 | $ ipcluster mpiexec -h |
|
121 | $ ipcluster mpiexec -h | |
122 |
|
122 | |||
123 | More details on using MPI with IPython can be found :ref:`here <parallelmpi>`. |
|
123 | More details on using MPI with IPython can be found :ref:`here <parallelmpi>`. | |
124 |
|
124 | |||
125 |
|
125 | |||
126 | Using :command:`ipcluster` in PBS mode |
|
126 | Using :command:`ipcluster` in PBS mode | |
127 | -------------------------------------- |
|
127 | -------------------------------------- | |
128 |
|
128 | |||
129 | The PBS mode uses the Portable Batch System [PBS]_ to start the engines. To use this mode, you first need to create a PBS script template that will be used to start the engines. Here is a sample PBS script template: |
|
129 | The PBS mode uses the Portable Batch System [PBS]_ to start the engines. To use this mode, you first need to create a PBS script template that will be used to start the engines. Here is a sample PBS script template: | |
130 |
|
130 | |||
131 | .. sourcecode:: bash |
|
131 | .. sourcecode:: bash | |
132 |
|
132 | |||
133 | #PBS -N ipython |
|
133 | #PBS -N ipython | |
134 | #PBS -j oe |
|
134 | #PBS -j oe | |
135 | #PBS -l walltime=00:10:00 |
|
135 | #PBS -l walltime=00:10:00 | |
136 | #PBS -l nodes=${n/4}:ppn=4 |
|
136 | #PBS -l nodes=${n/4}:ppn=4 | |
137 | #PBS -q parallel |
|
137 | #PBS -q parallel | |
138 |
|
138 | |||
139 | cd $$PBS_O_WORKDIR |
|
139 | cd $$PBS_O_WORKDIR | |
140 | export PATH=$$HOME/usr/local/bin |
|
140 | export PATH=$$HOME/usr/local/bin | |
141 | export PYTHONPATH=$$HOME/usr/local/lib/python2.4/site-packages |
|
141 | export PYTHONPATH=$$HOME/usr/local/lib/python2.4/site-packages | |
142 | /usr/local/bin/mpiexec -n ${n} ipengine --logfile=$$PBS_O_WORKDIR/ipengine |
|
142 | /usr/local/bin/mpiexec -n ${n} ipengine --logfile=$$PBS_O_WORKDIR/ipengine | |
143 |
|
143 | |||
144 | There are a few important points about this template: |
|
144 | There are a few important points about this template: | |
145 |
|
145 | |||
146 | 1. This template will be rendered at runtime using IPython's :mod:`Itpl` |
|
146 | 1. This template will be rendered at runtime using IPython's :mod:`Itpl` | |
147 | template engine. |
|
147 | template engine. | |
148 |
|
148 | |||
149 | 2. Instead of putting in the actual number of engines, use the notation |
|
149 | 2. Instead of putting in the actual number of engines, use the notation | |
150 | ``${n}`` to indicate the number of engines to be started. You can also uses |
|
150 | ``${n}`` to indicate the number of engines to be started. You can also uses | |
151 | expressions like ``${n/4}`` in the template to indicate the number of |
|
151 | expressions like ``${n/4}`` in the template to indicate the number of | |
152 | nodes. |
|
152 | nodes. | |
153 |
|
153 | |||
154 | 3. Because ``$`` is a special character used by the template engine, you must |
|
154 | 3. Because ``$`` is a special character used by the template engine, you must | |
155 | escape any ``$`` by using ``$$``. This is important when referring to |
|
155 | escape any ``$`` by using ``$$``. This is important when referring to | |
156 | environment variables in the template. |
|
156 | environment variables in the template. | |
157 |
|
157 | |||
158 | 4. Any options to :command:`ipengine` should be given in the batch script |
|
158 | 4. Any options to :command:`ipengine` should be given in the batch script | |
159 | template. |
|
159 | template. | |
160 |
|
160 | |||
161 | 5. Depending on the configuration of you system, you may have to set |
|
161 | 5. Depending on the configuration of you system, you may have to set | |
162 | environment variables in the script template. |
|
162 | environment variables in the script template. | |
163 |
|
163 | |||
164 | Once you have created such a script, save it with a name like :file:`pbs.template`. Now you are ready to start your job:: |
|
164 | Once you have created such a script, save it with a name like :file:`pbs.template`. Now you are ready to start your job:: | |
165 |
|
165 | |||
166 | $ ipcluster pbs -n 128 --pbs-script=pbs.template |
|
166 | $ ipcluster pbs -n 128 --pbs-script=pbs.template | |
167 |
|
167 | |||
168 | Additional command line options for this mode can be found by doing:: |
|
168 | Additional command line options for this mode can be found by doing:: | |
169 |
|
169 | |||
170 | $ ipcluster pbs -h |
|
170 | $ ipcluster pbs -h | |
171 |
|
171 | |||
172 | Using :command:`ipcluster` in SSH mode |
|
172 | Using :command:`ipcluster` in SSH mode | |
173 | -------------------------------------- |
|
173 | -------------------------------------- | |
174 |
|
174 | |||
175 | The SSH mode uses :command:`ssh` to execute :command:`ipengine` on remote |
|
175 | The SSH mode uses :command:`ssh` to execute :command:`ipengine` on remote | |
176 | nodes and the :command:`ipcontroller` on localhost. |
|
176 | nodes and the :command:`ipcontroller` on localhost. | |
177 |
|
177 | |||
178 | When using using this mode it highly recommended that you have set up SSH keys and are using ssh-agent [SSH]_ for password-less logins. |
|
178 | When using using this mode it highly recommended that you have set up SSH keys and are using ssh-agent [SSH]_ for password-less logins. | |
179 |
|
179 | |||
180 | To use this mode you need a python file describing the cluster, here is an example of such a "clusterfile": |
|
180 | To use this mode you need a python file describing the cluster, here is an example of such a "clusterfile": | |
181 |
|
181 | |||
182 | .. sourcecode:: python |
|
182 | .. sourcecode:: python | |
183 |
|
183 | |||
184 | send_furl = True |
|
184 | send_furl = True | |
185 | engines = { 'host1.example.com' : 2, |
|
185 | engines = { 'host1.example.com' : 2, | |
186 | 'host2.example.com' : 5, |
|
186 | 'host2.example.com' : 5, | |
187 | 'host3.example.com' : 1, |
|
187 | 'host3.example.com' : 1, | |
188 | 'host4.example.com' : 8 } |
|
188 | 'host4.example.com' : 8 } | |
189 |
|
189 | |||
190 | Since this is a regular python file usual python syntax applies. Things to note: |
|
190 | Since this is a regular python file usual python syntax applies. Things to note: | |
191 |
|
191 | |||
192 | * The `engines` dict, where the keys is the host we want to run engines on and |
|
192 | * The `engines` dict, where the keys is the host we want to run engines on and | |
193 | the value is the number of engines to run on that host. |
|
193 | the value is the number of engines to run on that host. | |
194 | * send_furl can either be `True` or `False`, if `True` it will copy over the |
|
194 | * send_furl can either be `True` or `False`, if `True` it will copy over the | |
195 | furl needed for :command:`ipengine` to each host. |
|
195 | furl needed for :command:`ipengine` to each host. | |
196 |
|
196 | |||
197 | The ``--clusterfile`` command line option lets you specify the file to use for |
|
197 | The ``--clusterfile`` command line option lets you specify the file to use for | |
198 | the cluster definition. Once you have your cluster file and you can |
|
198 | the cluster definition. Once you have your cluster file and you can | |
199 | :command:`ssh` into the remote hosts with out an password you are ready to |
|
199 | :command:`ssh` into the remote hosts with out an password you are ready to | |
200 | start your cluster like so: |
|
200 | start your cluster like so: | |
201 |
|
201 | |||
202 | .. sourcecode:: bash |
|
202 | .. sourcecode:: bash | |
203 |
|
203 | |||
204 | $ ipcluster ssh --clusterfile /path/to/my/clusterfile.py |
|
204 | $ ipcluster ssh --clusterfile /path/to/my/clusterfile.py | |
205 |
|
205 | |||
206 |
|
206 | |||
207 | Two helper shell scripts are used to start and stop :command:`ipengine` on remote hosts: |
|
207 | Two helper shell scripts are used to start and stop :command:`ipengine` on remote hosts: | |
208 |
|
208 | |||
209 | * sshx.sh |
|
209 | * sshx.sh | |
210 | * engine_killer.sh |
|
210 | * engine_killer.sh | |
211 |
|
211 | |||
212 | Defaults for both of these are contained in the source code for :command:`ipcluster`. The default scripts are written to a local file in a tmep directory and then copied to a temp directory on the remote host and executed from there. On most Unix, Linux and OS X systems this is /tmp. |
|
212 | Defaults for both of these are contained in the source code for :command:`ipcluster`. The default scripts are written to a local file in a tmep directory and then copied to a temp directory on the remote host and executed from there. On most Unix, Linux and OS X systems this is /tmp. | |
213 |
|
213 | |||
214 | The default sshx.sh is the following: |
|
214 | The default sshx.sh is the following: | |
215 |
|
215 | |||
216 | .. sourcecode:: bash |
|
216 | .. sourcecode:: bash | |
217 |
|
217 | |||
218 | #!/bin/sh |
|
218 | #!/bin/sh | |
219 | "$@" &> /dev/null & |
|
219 | "$@" &> /dev/null & | |
220 | echo $! |
|
220 | echo $! | |
221 |
|
221 | |||
222 | If you want to use a custom sshx.sh script you need to use the ``--sshx`` |
|
222 | If you want to use a custom sshx.sh script you need to use the ``--sshx`` | |
223 | option and specify the file to use. Using a custom sshx.sh file could be |
|
223 | option and specify the file to use. Using a custom sshx.sh file could be | |
224 | helpful when you need to setup the environment on the remote host before |
|
224 | helpful when you need to setup the environment on the remote host before | |
225 | executing :command:`ipengine`. |
|
225 | executing :command:`ipengine`. | |
226 |
|
226 | |||
227 | For a detailed options list: |
|
227 | For a detailed options list: | |
228 |
|
228 | |||
229 | .. sourcecode:: bash |
|
229 | .. sourcecode:: bash | |
230 |
|
230 | |||
231 | $ ipcluster ssh -h |
|
231 | $ ipcluster ssh -h | |
232 |
|
232 | |||
233 | Current limitations of the SSH mode of :command:`ipcluster` are: |
|
233 | Current limitations of the SSH mode of :command:`ipcluster` are: | |
234 |
|
234 | |||
235 | * Untested on Windows. Would require a working :command:`ssh` on Windows. |
|
235 | * Untested on Windows. Would require a working :command:`ssh` on Windows. | |
236 | Also, we are using shell scripts to setup and execute commands on remote |
|
236 | Also, we are using shell scripts to setup and execute commands on remote | |
237 | hosts. |
|
237 | hosts. | |
238 | * :command:`ipcontroller` is started on localhost, with no option to start it |
|
238 | * :command:`ipcontroller` is started on localhost, with no option to start it | |
239 | on a remote node. |
|
239 | on a remote node. | |
240 |
|
240 | |||
241 | Using the :command:`ipcontroller` and :command:`ipengine` commands |
|
241 | Using the :command:`ipcontroller` and :command:`ipengine` commands | |
242 | ================================================================== |
|
242 | ================================================================== | |
243 |
|
243 | |||
244 | It is also possible to use the :command:`ipcontroller` and :command:`ipengine` commands to start your controller and engines. This approach gives you full control over all aspects of the startup process. |
|
244 | It is also possible to use the :command:`ipcontroller` and :command:`ipengine` commands to start your controller and engines. This approach gives you full control over all aspects of the startup process. | |
245 |
|
245 | |||
246 | Starting the controller and engine on your local machine |
|
246 | Starting the controller and engine on your local machine | |
247 | -------------------------------------------------------- |
|
247 | -------------------------------------------------------- | |
248 |
|
248 | |||
249 | To use :command:`ipcontroller` and :command:`ipengine` to start things on your |
|
249 | To use :command:`ipcontroller` and :command:`ipengine` to start things on your | |
250 | local machine, do the following. |
|
250 | local machine, do the following. | |
251 |
|
251 | |||
252 | First start the controller:: |
|
252 | First start the controller:: | |
253 |
|
253 | |||
254 | $ ipcontroller |
|
254 | $ ipcontroller | |
255 |
|
255 | |||
256 | Next, start however many instances of the engine you want using (repeatedly) the command:: |
|
256 | Next, start however many instances of the engine you want using (repeatedly) the command:: | |
257 |
|
257 | |||
258 | $ ipengine |
|
258 | $ ipengine | |
259 |
|
259 | |||
260 | The engines should start and automatically connect to the controller using the FURL files in :file:`~./ipython/security`. You are now ready to use the controller and engines from IPython. |
|
260 | The engines should start and automatically connect to the controller using the FURL files in :file:`~./ipython/security`. You are now ready to use the controller and engines from IPython. | |
261 |
|
261 | |||
262 | .. warning:: |
|
262 | .. warning:: | |
263 |
|
263 | |||
264 | The order of the above operations is very important. You *must* |
|
264 | The order of the above operations is very important. You *must* | |
265 | start the controller before the engines, since the engines connect |
|
265 | start the controller before the engines, since the engines connect | |
266 | to the controller as they get started. |
|
266 | to the controller as they get started. | |
267 |
|
267 | |||
268 | .. note:: |
|
268 | .. note:: | |
269 |
|
269 | |||
270 | On some platforms (OS X), to put the controller and engine into the |
|
270 | On some platforms (OS X), to put the controller and engine into the | |
271 | background you may need to give these commands in the form ``(ipcontroller |
|
271 | background you may need to give these commands in the form ``(ipcontroller | |
272 | &)`` and ``(ipengine &)`` (with the parentheses) for them to work |
|
272 | &)`` and ``(ipengine &)`` (with the parentheses) for them to work | |
273 | properly. |
|
273 | properly. | |
274 |
|
274 | |||
275 | Starting the controller and engines on different hosts |
|
275 | Starting the controller and engines on different hosts | |
276 | ------------------------------------------------------ |
|
276 | ------------------------------------------------------ | |
277 |
|
277 | |||
278 | When the controller and engines are running on different hosts, things are |
|
278 | When the controller and engines are running on different hosts, things are | |
279 | slightly more complicated, but the underlying ideas are the same: |
|
279 | slightly more complicated, but the underlying ideas are the same: | |
280 |
|
280 | |||
281 | 1. Start the controller on a host using :command:`ipcontroller`. |
|
281 | 1. Start the controller on a host using :command:`ipcontroller`. | |
282 | 2. Copy :file:`ipcontroller-engine.furl` from :file:`~./ipython/security` on the controller's host to the host where the engines will run. |
|
282 | 2. Copy :file:`ipcontroller-engine.furl` from :file:`~./ipython/security` on the controller's host to the host where the engines will run. | |
283 | 3. Use :command:`ipengine` on the engine's hosts to start the engines. |
|
283 | 3. Use :command:`ipengine` on the engine's hosts to start the engines. | |
284 |
|
284 | |||
285 | The only thing you have to be careful of is to tell :command:`ipengine` where the :file:`ipcontroller-engine.furl` file is located. There are two ways you can do this: |
|
285 | The only thing you have to be careful of is to tell :command:`ipengine` where the :file:`ipcontroller-engine.furl` file is located. There are two ways you can do this: | |
286 |
|
286 | |||
287 | * Put :file:`ipcontroller-engine.furl` in the :file:`~./ipython/security` |
|
287 | * Put :file:`ipcontroller-engine.furl` in the :file:`~./ipython/security` | |
288 | directory on the engine's host, where it will be found automatically. |
|
288 | directory on the engine's host, where it will be found automatically. | |
289 | * Call :command:`ipengine` with the ``--furl-file=full_path_to_the_file`` |
|
289 | * Call :command:`ipengine` with the ``--furl-file=full_path_to_the_file`` | |
290 | flag. |
|
290 | flag. | |
291 |
|
291 | |||
292 | The ``--furl-file`` flag works like this:: |
|
292 | The ``--furl-file`` flag works like this:: | |
293 |
|
293 | |||
294 | $ ipengine --furl-file=/path/to/my/ipcontroller-engine.furl |
|
294 | $ ipengine --furl-file=/path/to/my/ipcontroller-engine.furl | |
295 |
|
295 | |||
296 | .. note:: |
|
296 | .. note:: | |
297 |
|
297 | |||
298 | If the controller's and engine's hosts all have a shared file system |
|
298 | If the controller's and engine's hosts all have a shared file system | |
299 | (:file:`~./ipython/security` is the same on all of them), then things |
|
299 | (:file:`~./ipython/security` is the same on all of them), then things | |
300 | will just work! |
|
300 | will just work! | |
301 |
|
301 | |||
302 | Make FURL files persistent |
|
302 | Make FURL files persistent | |
303 | --------------------------- |
|
303 | --------------------------- | |
304 |
|
304 | |||
305 | At fist glance it may seem that that managing the FURL files is a bit |
|
305 | At fist glance it may seem that that managing the FURL files is a bit | |
306 | annoying. Going back to the house and key analogy, copying the FURL around |
|
306 | annoying. Going back to the house and key analogy, copying the FURL around | |
307 | each time you start the controller is like having to make a new key every time |
|
307 | each time you start the controller is like having to make a new key every time | |
308 | you want to unlock the door and enter your house. As with your house, you want |
|
308 | you want to unlock the door and enter your house. As with your house, you want | |
309 | to be able to create the key (or FURL file) once, and then simply use it at |
|
309 | to be able to create the key (or FURL file) once, and then simply use it at | |
310 | any point in the future. |
|
310 | any point in the future. | |
311 |
|
311 | |||
312 |
This is possible |
|
312 | This is possible, but before you do this, you **must** remove any old FURL | |
313 | files in the :file:`~/.ipython/security` directory. |
|
313 | files in the :file:`~/.ipython/security` directory. | |
314 |
|
314 | |||
315 | .. warning:: |
|
315 | .. warning:: | |
316 |
|
316 | |||
317 | You **must** remove old FURL files before using persistent FURL files. |
|
317 | You **must** remove old FURL files before using persistent FURL files. | |
318 |
|
318 | |||
319 | Then, The only thing you have to do is decide what ports the controller will |
|
319 | Then, The only thing you have to do is decide what ports the controller will | |
320 | listen on for the engines and clients. This is done as follows:: |
|
320 | listen on for the engines and clients. This is done as follows:: | |
321 |
|
321 | |||
322 | $ ipcontroller -r --client-port=10101 --engine-port=10102 |
|
322 | $ ipcontroller -r --client-port=10101 --engine-port=10102 | |
323 |
|
323 | |||
324 | These options also work with all of the various modes of |
|
324 | These options also work with all of the various modes of | |
325 | :command:`ipcluster`:: |
|
325 | :command:`ipcluster`:: | |
326 |
|
326 | |||
327 | $ ipcluster local -n 2 -r --client-port=10101 --engine-port=10102 |
|
327 | $ ipcluster local -n 2 -r --client-port=10101 --engine-port=10102 | |
328 |
|
328 | |||
329 | Then, just copy the furl files over the first time and you are set. You can |
|
329 | Then, just copy the furl files over the first time and you are set. You can | |
330 | start and stop the controller and engines any many times as you want in the |
|
330 | start and stop the controller and engines any many times as you want in the | |
331 | future, just make sure to tell the controller to use the *same* ports. |
|
331 | future, just make sure to tell the controller to use the *same* ports. | |
332 |
|
332 | |||
333 | .. note:: |
|
333 | .. note:: | |
334 |
|
334 | |||
335 | You may ask the question: what ports does the controller listen on if you |
|
335 | You may ask the question: what ports does the controller listen on if you | |
336 | don't tell is to use specific ones? The default is to use high random port |
|
336 | don't tell is to use specific ones? The default is to use high random port | |
337 | numbers. We do this for two reasons: i) to increase security through |
|
337 | numbers. We do this for two reasons: i) to increase security through | |
338 | obscurity and ii) to multiple controllers on a given host to start and |
|
338 | obscurity and ii) to multiple controllers on a given host to start and | |
339 | automatically use different ports. |
|
339 | automatically use different ports. | |
340 |
|
340 | |||
341 | Log files |
|
341 | Log files | |
342 | --------- |
|
342 | --------- | |
343 |
|
343 | |||
344 | All of the components of IPython have log files associated with them. |
|
344 | All of the components of IPython have log files associated with them. | |
345 | These log files can be extremely useful in debugging problems with |
|
345 | These log files can be extremely useful in debugging problems with | |
346 | IPython and can be found in the directory :file:`~/.ipython/log`. Sending |
|
346 | IPython and can be found in the directory :file:`~/.ipython/log`. Sending | |
347 | the log files to us will often help us to debug any problems. |
|
347 | the log files to us will often help us to debug any problems. | |
348 |
|
348 | |||
349 |
|
349 | |||
350 | .. [PBS] Portable Batch System. http://www.openpbs.org/ |
|
350 | .. [PBS] Portable Batch System. http://www.openpbs.org/ | |
351 | .. [SSH] SSH-Agent http://en.wikipedia.org/wiki/Ssh-agent |
|
351 | .. [SSH] SSH-Agent http://en.wikipedia.org/wiki/Ssh-agent |
General Comments 0
You need to be logged in to leave comments.
Login now