##// END OF EJS Templates
Merge with upstream
Fernando Perez -
r1405:dc02b179 merge
parent child Browse files
Show More
@@ -0,0 +1,233 b''
1 # encoding: utf-8
2
3 """A parallelized version of Python's builtin map."""
4
5 __docformat__ = "restructuredtext en"
6
7 #----------------------------------------------------------------------------
8 # Copyright (C) 2008 The IPython Development Team
9 #
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
12 #----------------------------------------------------------------------------
13
14 #----------------------------------------------------------------------------
15 # Imports
16 #----------------------------------------------------------------------------
17
18 from types import FunctionType
19 from zope.interface import Interface, implements
20 from IPython.kernel.task import MapTask
21 from IPython.kernel.twistedutil import DeferredList, gatherBoth
22 from IPython.kernel.util import printer
23 from IPython.kernel.error import collect_exceptions
24
25 #----------------------------------------------------------------------------
26 # Code
27 #----------------------------------------------------------------------------
28
29 class IMapper(Interface):
30 """The basic interface for a Mapper.
31
32 This defines a generic interface for mapping. The idea of this is
33 similar to that of Python's builtin `map` function, which applies a function
34 elementwise to a sequence.
35 """
36
37 def map(func, *seqs):
38 """Do map in parallel.
39
40 Equivalent to map(func, *seqs) or:
41
42 [func(seqs[0][0], seqs[1][0],...), func(seqs[0][1], seqs[1][1],...),...]
43
44 :Parameters:
45 func : FunctionType
46 The function to apply to the sequence
47 sequences : tuple of iterables
48 A sequence of iterables that are used for sucessive function
49 arguments. This work just like map
50 """
51
52 class IMultiEngineMapperFactory(Interface):
53 """
54 An interface for something that creates `IMapper` instances.
55 """
56
57 def mapper(dist='b', targets='all', block=True):
58 """
59 Create an `IMapper` implementer with a given set of arguments.
60
61 The `IMapper` created using a multiengine controller is
62 not load balanced.
63 """
64
65 class ITaskMapperFactory(Interface):
66 """
67 An interface for something that creates `IMapper` instances.
68 """
69
70 def mapper(clear_before=False, clear_after=False, retries=0,
71 recovery_task=None, depend=None, block=True):
72 """
73 Create an `IMapper` implementer with a given set of arguments.
74
75 The `IMapper` created using a task controller is load balanced.
76
77 See the documentation for `IPython.kernel.task.BaseTask` for
78 documentation on the arguments to this method.
79 """
80
81
82 class MultiEngineMapper(object):
83 """
84 A Mapper for `IMultiEngine` implementers.
85 """
86
87 implements(IMapper)
88
89 def __init__(self, multiengine, dist='b', targets='all', block=True):
90 """
91 Create a Mapper for a multiengine.
92
93 The value of all arguments are used for all calls to `map`. This
94 class allows these arguemnts to be set for a series of map calls.
95
96 :Parameters:
97 multiengine : `IMultiEngine` implementer
98 The multiengine to use for running the map commands
99 dist : str
100 The type of decomposition to use. Only block ('b') is
101 supported currently
102 targets : (str, int, tuple of ints)
103 The engines to use in the map
104 block : boolean
105 Whether to block when the map is applied
106 """
107 self.multiengine = multiengine
108 self.dist = dist
109 self.targets = targets
110 self.block = block
111
112 def map(self, func, *sequences):
113 """
114 Apply func to *sequences elementwise. Like Python's builtin map.
115
116 This version is not load balanced.
117 """
118 max_len = max(len(s) for s in sequences)
119 for s in sequences:
120 if len(s)!=max_len:
121 raise ValueError('all sequences must have equal length')
122 assert isinstance(func, (str, FunctionType)), "func must be a fuction or str"
123 return self.multiengine.raw_map(func, sequences, dist=self.dist,
124 targets=self.targets, block=self.block)
125
126 class TaskMapper(object):
127 """
128 Make an `ITaskController` look like an `IMapper`.
129
130 This class provides a load balanced version of `map`.
131 """
132
133 def __init__(self, task_controller, clear_before=False, clear_after=False, retries=0,
134 recovery_task=None, depend=None, block=True):
135 """
136 Create a `IMapper` given a `TaskController` and arguments.
137
138 The additional arguments are those that are common to all types of
139 tasks and are described in the documentation for
140 `IPython.kernel.task.BaseTask`.
141
142 :Parameters:
143 task_controller : an `IBlockingTaskClient` implementer
144 The `TaskController` to use for calls to `map`
145 """
146 self.task_controller = task_controller
147 self.clear_before = clear_before
148 self.clear_after = clear_after
149 self.retries = retries
150 self.recovery_task = recovery_task
151 self.depend = depend
152 self.block = block
153
154 def map(self, func, *sequences):
155 """
156 Apply func to *sequences elementwise. Like Python's builtin map.
157
158 This version is load balanced.
159 """
160 max_len = max(len(s) for s in sequences)
161 for s in sequences:
162 if len(s)!=max_len:
163 raise ValueError('all sequences must have equal length')
164 task_args = zip(*sequences)
165 task_ids = []
166 dlist = []
167 for ta in task_args:
168 task = MapTask(func, ta, clear_before=self.clear_before,
169 clear_after=self.clear_after, retries=self.retries,
170 recovery_task=self.recovery_task, depend=self.depend)
171 dlist.append(self.task_controller.run(task))
172 dlist = gatherBoth(dlist, consumeErrors=1)
173 dlist.addCallback(collect_exceptions,'map')
174 if self.block:
175 def get_results(task_ids):
176 d = self.task_controller.barrier(task_ids)
177 d.addCallback(lambda _: gatherBoth([self.task_controller.get_task_result(tid) for tid in task_ids], consumeErrors=1))
178 d.addCallback(collect_exceptions, 'map')
179 return d
180 dlist.addCallback(get_results)
181 return dlist
182
183 class SynchronousTaskMapper(object):
184 """
185 Make an `IBlockingTaskClient` look like an `IMapper`.
186
187 This class provides a load balanced version of `map`.
188 """
189
190 def __init__(self, task_controller, clear_before=False, clear_after=False, retries=0,
191 recovery_task=None, depend=None, block=True):
192 """
193 Create a `IMapper` given a `IBlockingTaskClient` and arguments.
194
195 The additional arguments are those that are common to all types of
196 tasks and are described in the documentation for
197 `IPython.kernel.task.BaseTask`.
198
199 :Parameters:
200 task_controller : an `IBlockingTaskClient` implementer
201 The `TaskController` to use for calls to `map`
202 """
203 self.task_controller = task_controller
204 self.clear_before = clear_before
205 self.clear_after = clear_after
206 self.retries = retries
207 self.recovery_task = recovery_task
208 self.depend = depend
209 self.block = block
210
211 def map(self, func, *sequences):
212 """
213 Apply func to *sequences elementwise. Like Python's builtin map.
214
215 This version is load balanced.
216 """
217 max_len = max(len(s) for s in sequences)
218 for s in sequences:
219 if len(s)!=max_len:
220 raise ValueError('all sequences must have equal length')
221 task_args = zip(*sequences)
222 task_ids = []
223 for ta in task_args:
224 task = MapTask(func, ta, clear_before=self.clear_before,
225 clear_after=self.clear_after, retries=self.retries,
226 recovery_task=self.recovery_task, depend=self.depend)
227 task_ids.append(self.task_controller.run(task))
228 if self.block:
229 self.task_controller.barrier(task_ids)
230 task_results = [self.task_controller.get_task_result(tid) for tid in task_ids]
231 return task_results
232 else:
233 return task_ids No newline at end of file
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
@@ -0,0 +1,18 b''
1 from IPython.kernel import client
2
3 mec = client.MultiEngineClient()
4
5 result = mec.map(lambda x: 2*x, range(10))
6 print "Simple, default map: ", result
7
8 m = mec.mapper(block=False)
9 pr = m.map(lambda x: 2*x, range(10))
10 print "Submitted map, got PendingResult: ", pr
11 result = pr.r
12 print "Using a mapper: ", result
13
14 @mec.parallel()
15 def f(x): return 2*x
16
17 result = f(range(10))
18 print "Using a parallel function: ", result No newline at end of file
@@ -0,0 +1,19 b''
1 from IPython.kernel import client
2
3 tc = client.TaskClient()
4
5 result = tc.map(lambda x: 2*x, range(10))
6 print "Simple, default map: ", result
7
8 m = tc.mapper(block=False, clear_after=True, clear_before=True)
9 tids = m.map(lambda x: 2*x, range(10))
10 print "Submitted tasks, got ids: ", tids
11 tc.barrier(tids)
12 result = [tc.get_task_result(tid) for tid in tids]
13 print "Using a mapper: ", result
14
15 @tc.parallel()
16 def f(x): return 2*x
17
18 result = f(range(10))
19 print "Using a parallel function: ", result No newline at end of file
@@ -1,151 +1,151 b''
1 # encoding: utf-8
1 # encoding: utf-8
2
2
3 """This file contains unittests for the frontendbase module."""
3 """This file contains unittests for the frontendbase module."""
4
4
5 __docformat__ = "restructuredtext en"
5 __docformat__ = "restructuredtext en"
6
6
7 #---------------------------------------------------------------------------
7 #---------------------------------------------------------------------------
8 # Copyright (C) 2008 The IPython Development Team
8 # Copyright (C) 2008 The IPython Development Team
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #---------------------------------------------------------------------------
12 #---------------------------------------------------------------------------
13
13
14 #---------------------------------------------------------------------------
14 #---------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #---------------------------------------------------------------------------
16 #---------------------------------------------------------------------------
17
17
18 import unittest
18 import unittest
19 from IPython.frontend import frontendbase
19 from IPython.frontend import frontendbase
20 from IPython.kernel.engineservice import EngineService
20 from IPython.kernel.engineservice import EngineService
21
21
22 class FrontEndCallbackChecker(frontendbase.AsyncFrontEndBase):
22 class FrontEndCallbackChecker(frontendbase.AsyncFrontEndBase):
23 """FrontEndBase subclass for checking callbacks"""
23 """FrontEndBase subclass for checking callbacks"""
24 def __init__(self, engine=None, history=None):
24 def __init__(self, engine=None, history=None):
25 super(FrontEndCallbackChecker, self).__init__(engine=engine,
25 super(FrontEndCallbackChecker, self).__init__(engine=engine,
26 history=history)
26 history=history)
27 self.updateCalled = False
27 self.updateCalled = False
28 self.renderResultCalled = False
28 self.renderResultCalled = False
29 self.renderErrorCalled = False
29 self.renderErrorCalled = False
30
30
31 def update_cell_prompt(self, result, blockID=None):
31 def update_cell_prompt(self, result, blockID=None):
32 self.updateCalled = True
32 self.updateCalled = True
33 return result
33 return result
34
34
35 def render_result(self, result):
35 def render_result(self, result):
36 self.renderResultCalled = True
36 self.renderResultCalled = True
37 return result
37 return result
38
38
39
39
40 def render_error(self, failure):
40 def render_error(self, failure):
41 self.renderErrorCalled = True
41 self.renderErrorCalled = True
42 return failure
42 return failure
43
43
44
44
45
45
46
46
47 class TestAsyncFrontendBase(unittest.TestCase):
47 class TestAsyncFrontendBase(unittest.TestCase):
48 def setUp(self):
48 def setUp(self):
49 """Setup the EngineService and FrontEndBase"""
49 """Setup the EngineService and FrontEndBase"""
50
50
51 self.fb = FrontEndCallbackChecker(engine=EngineService())
51 self.fb = FrontEndCallbackChecker(engine=EngineService())
52
52
53
53
54 def test_implements_IFrontEnd(self):
54 def test_implements_IFrontEnd(self):
55 assert(frontendbase.IFrontEnd.implementedBy(
55 assert(frontendbase.IFrontEnd.implementedBy(
56 frontendbase.AsyncFrontEndBase))
56 frontendbase.AsyncFrontEndBase))
57
57
58
58
59 def test_is_complete_returns_False_for_incomplete_block(self):
59 def test_is_complete_returns_False_for_incomplete_block(self):
60 """"""
60 """"""
61
61
62 block = """def test(a):"""
62 block = """def test(a):"""
63
63
64 assert(self.fb.is_complete(block) == False)
64 assert(self.fb.is_complete(block) == False)
65
65
66 def test_is_complete_returns_True_for_complete_block(self):
66 def test_is_complete_returns_True_for_complete_block(self):
67 """"""
67 """"""
68
68
69 block = """def test(a): pass"""
69 block = """def test(a): pass"""
70
70
71 assert(self.fb.is_complete(block))
71 assert(self.fb.is_complete(block))
72
72
73 block = """a=3"""
73 block = """a=3"""
74
74
75 assert(self.fb.is_complete(block))
75 assert(self.fb.is_complete(block))
76
76
77
77
78 def test_blockID_added_to_result(self):
78 def test_blockID_added_to_result(self):
79 block = """3+3"""
79 block = """3+3"""
80
80
81 d = self.fb.execute(block, blockID='TEST_ID')
81 d = self.fb.execute(block, blockID='TEST_ID')
82
82
83 d.addCallback(self.checkBlockID, expected='TEST_ID')
83 d.addCallback(self.checkBlockID, expected='TEST_ID')
84
84
85 def test_blockID_added_to_failure(self):
85 def test_blockID_added_to_failure(self):
86 block = "raise Exception()"
86 block = "raise Exception()"
87
87
88 d = self.fb.execute(block,blockID='TEST_ID')
88 d = self.fb.execute(block,blockID='TEST_ID')
89 d.addErrback(self.checkFailureID, expected='TEST_ID')
89 d.addErrback(self.checkFailureID, expected='TEST_ID')
90
90
91 def checkBlockID(self, result, expected=""):
91 def checkBlockID(self, result, expected=""):
92 assert(result['blockID'] == expected)
92 assert(result['blockID'] == expected)
93
93
94
94
95 def checkFailureID(self, failure, expected=""):
95 def checkFailureID(self, failure, expected=""):
96 assert(failure.blockID == expected)
96 assert(failure.blockID == expected)
97
97
98
98
99 def test_callbacks_added_to_execute(self):
99 def test_callbacks_added_to_execute(self):
100 """test that
100 """test that
101 update_cell_prompt
101 update_cell_prompt
102 render_result
102 render_result
103
103
104 are added to execute request
104 are added to execute request
105 """
105 """
106
106
107 d = self.fb.execute("10+10")
107 d = self.fb.execute("10+10")
108 d.addCallback(self.checkCallbacks)
108 d.addCallback(self.checkCallbacks)
109
109
110
110
111 def checkCallbacks(self, result):
111 def checkCallbacks(self, result):
112 assert(self.fb.updateCalled)
112 assert(self.fb.updateCalled)
113 assert(self.fb.renderResultCalled)
113 assert(self.fb.renderResultCalled)
114
114
115
115
116 def test_error_callback_added_to_execute(self):
116 def test_error_callback_added_to_execute(self):
117 """test that render_error called on execution error"""
117 """test that render_error called on execution error"""
118
118
119 d = self.fb.execute("raise Exception()")
119 d = self.fb.execute("raise Exception()")
120 d.addCallback(self.checkRenderError)
120 d.addCallback(self.checkRenderError)
121
121
122 def checkRenderError(self, result):
122 def checkRenderError(self, result):
123 assert(self.fb.renderErrorCalled)
123 assert(self.fb.renderErrorCalled)
124
124
125 def test_history_returns_expected_block(self):
125 def test_history_returns_expected_block(self):
126 """Make sure history browsing doesn't fail"""
126 """Make sure history browsing doesn't fail"""
127
127
128 blocks = ["a=1","a=2","a=3"]
128 blocks = ["a=1","a=2","a=3"]
129 for b in blocks:
129 for b in blocks:
130 d = self.fb.execute(b)
130 d = self.fb.execute(b)
131
131
132 # d is now the deferred for the last executed block
132 # d is now the deferred for the last executed block
133 d.addCallback(self.historyTests, blocks)
133 d.addCallback(self.historyTests, blocks)
134
134
135
135
136 def historyTests(self, result, blocks):
136 def historyTests(self, result, blocks):
137 """historyTests"""
137 """historyTests"""
138
138
139 assert(len(blocks) >= 3)
139 assert(len(blocks) >= 3)
140 assert(self.fb.get_history_previous("") == blocks[-2])
140 assert(self.fb.get_history_previous("") == blocks[-2])
141 assert(self.fb.get_history_previous("") == blocks[-3])
141 assert(self.fb.get_history_previous("") == blocks[-3])
142 assert(self.fb.get_history_next() == blocks[-2])
142 assert(self.fb.get_history_next() == blocks[-2])
143
143
144
144
145 def test_history_returns_none_at_startup(self):
145 def test_history_returns_none_at_startup(self):
146 """test_history_returns_none_at_startup"""
146 """test_history_returns_none_at_startup"""
147
147
148 assert(self.fb.get_history_previous("")==None)
148 assert(self.fb.get_history_previous("")==None)
149 assert(self.fb.get_history_next()==None)
149 assert(self.fb.get_history_next()==None)
150
150
151
151
@@ -1,41 +1,41 b''
1 # encoding: utf-8
1 # encoding: utf-8
2
2
3 """Asynchronous clients for the IPython controller.
3 """Asynchronous clients for the IPython controller.
4
4
5 This module has clients for using the various interfaces of the controller
5 This module has clients for using the various interfaces of the controller
6 in a fully asynchronous manner. This means that you will need to run the
6 in a fully asynchronous manner. This means that you will need to run the
7 Twisted reactor yourself and that all methods of the client classes return
7 Twisted reactor yourself and that all methods of the client classes return
8 deferreds to the result.
8 deferreds to the result.
9
9
10 The main methods are are `get_*_client` and `get_client`.
10 The main methods are are `get_*_client` and `get_client`.
11 """
11 """
12
12
13 __docformat__ = "restructuredtext en"
13 __docformat__ = "restructuredtext en"
14
14
15 #-------------------------------------------------------------------------------
15 #-------------------------------------------------------------------------------
16 # Copyright (C) 2008 The IPython Development Team
16 # Copyright (C) 2008 The IPython Development Team
17 #
17 #
18 # Distributed under the terms of the BSD License. The full license is in
18 # Distributed under the terms of the BSD License. The full license is in
19 # the file COPYING, distributed as part of this software.
19 # the file COPYING, distributed as part of this software.
20 #-------------------------------------------------------------------------------
20 #-------------------------------------------------------------------------------
21
21
22 #-------------------------------------------------------------------------------
22 #-------------------------------------------------------------------------------
23 # Imports
23 # Imports
24 #-------------------------------------------------------------------------------
24 #-------------------------------------------------------------------------------
25
25
26 from IPython.kernel import codeutil
26 from IPython.kernel import codeutil
27 from IPython.kernel.clientconnector import ClientConnector
27 from IPython.kernel.clientconnector import ClientConnector
28
28
29 # Other things that the user will need
29 # Other things that the user will need
30 from IPython.kernel.task import Task
30 from IPython.kernel.task import MapTask, StringTask
31 from IPython.kernel.error import CompositeError
31 from IPython.kernel.error import CompositeError
32
32
33 #-------------------------------------------------------------------------------
33 #-------------------------------------------------------------------------------
34 # Code
34 # Code
35 #-------------------------------------------------------------------------------
35 #-------------------------------------------------------------------------------
36
36
37 _client_tub = ClientConnector()
37 _client_tub = ClientConnector()
38 get_multiengine_client = _client_tub.get_multiengine_client
38 get_multiengine_client = _client_tub.get_multiengine_client
39 get_task_client = _client_tub.get_task_client
39 get_task_client = _client_tub.get_task_client
40 get_client = _client_tub.get_client
40 get_client = _client_tub.get_client
41
41
@@ -1,96 +1,96 b''
1 # encoding: utf-8
1 # encoding: utf-8
2
2
3 """This module contains blocking clients for the controller interfaces.
3 """This module contains blocking clients for the controller interfaces.
4
4
5 Unlike the clients in `asyncclient.py`, the clients in this module are fully
5 Unlike the clients in `asyncclient.py`, the clients in this module are fully
6 blocking. This means that methods on the clients return the actual results
6 blocking. This means that methods on the clients return the actual results
7 rather than a deferred to the result. Also, we manage the Twisted reactor
7 rather than a deferred to the result. Also, we manage the Twisted reactor
8 for you. This is done by running the reactor in a thread.
8 for you. This is done by running the reactor in a thread.
9
9
10 The main classes in this module are:
10 The main classes in this module are:
11
11
12 * MultiEngineClient
12 * MultiEngineClient
13 * TaskClient
13 * TaskClient
14 * Task
14 * Task
15 * CompositeError
15 * CompositeError
16 """
16 """
17
17
18 __docformat__ = "restructuredtext en"
18 __docformat__ = "restructuredtext en"
19
19
20 #-------------------------------------------------------------------------------
20 #-------------------------------------------------------------------------------
21 # Copyright (C) 2008 The IPython Development Team
21 # Copyright (C) 2008 The IPython Development Team
22 #
22 #
23 # Distributed under the terms of the BSD License. The full license is in
23 # Distributed under the terms of the BSD License. The full license is in
24 # the file COPYING, distributed as part of this software.
24 # the file COPYING, distributed as part of this software.
25 #-------------------------------------------------------------------------------
25 #-------------------------------------------------------------------------------
26
26
27 #-------------------------------------------------------------------------------
27 #-------------------------------------------------------------------------------
28 # Imports
28 # Imports
29 #-------------------------------------------------------------------------------
29 #-------------------------------------------------------------------------------
30
30
31 import sys
31 import sys
32
32
33 # from IPython.tools import growl
33 # from IPython.tools import growl
34 # growl.start("IPython1 Client")
34 # growl.start("IPython1 Client")
35
35
36
36
37 from twisted.internet import reactor
37 from twisted.internet import reactor
38 from IPython.kernel.clientconnector import ClientConnector
38 from IPython.kernel.clientconnector import ClientConnector
39 from IPython.kernel.twistedutil import ReactorInThread
39 from IPython.kernel.twistedutil import ReactorInThread
40 from IPython.kernel.twistedutil import blockingCallFromThread
40 from IPython.kernel.twistedutil import blockingCallFromThread
41
41
42 # These enable various things
42 # These enable various things
43 from IPython.kernel import codeutil
43 from IPython.kernel import codeutil
44 import IPython.kernel.magic
44 import IPython.kernel.magic
45
45
46 # Other things that the user will need
46 # Other things that the user will need
47 from IPython.kernel.task import Task
47 from IPython.kernel.task import MapTask, StringTask
48 from IPython.kernel.error import CompositeError
48 from IPython.kernel.error import CompositeError
49
49
50 #-------------------------------------------------------------------------------
50 #-------------------------------------------------------------------------------
51 # Code
51 # Code
52 #-------------------------------------------------------------------------------
52 #-------------------------------------------------------------------------------
53
53
54 _client_tub = ClientConnector()
54 _client_tub = ClientConnector()
55
55
56
56
57 def get_multiengine_client(furl_or_file=''):
57 def get_multiengine_client(furl_or_file=''):
58 """Get the blocking MultiEngine client.
58 """Get the blocking MultiEngine client.
59
59
60 :Parameters:
60 :Parameters:
61 furl_or_file : str
61 furl_or_file : str
62 A furl or a filename containing a furl. If empty, the
62 A furl or a filename containing a furl. If empty, the
63 default furl_file will be used
63 default furl_file will be used
64
64
65 :Returns:
65 :Returns:
66 The connected MultiEngineClient instance
66 The connected MultiEngineClient instance
67 """
67 """
68 client = blockingCallFromThread(_client_tub.get_multiengine_client,
68 client = blockingCallFromThread(_client_tub.get_multiengine_client,
69 furl_or_file)
69 furl_or_file)
70 return client.adapt_to_blocking_client()
70 return client.adapt_to_blocking_client()
71
71
72 def get_task_client(furl_or_file=''):
72 def get_task_client(furl_or_file=''):
73 """Get the blocking Task client.
73 """Get the blocking Task client.
74
74
75 :Parameters:
75 :Parameters:
76 furl_or_file : str
76 furl_or_file : str
77 A furl or a filename containing a furl. If empty, the
77 A furl or a filename containing a furl. If empty, the
78 default furl_file will be used
78 default furl_file will be used
79
79
80 :Returns:
80 :Returns:
81 The connected TaskClient instance
81 The connected TaskClient instance
82 """
82 """
83 client = blockingCallFromThread(_client_tub.get_task_client,
83 client = blockingCallFromThread(_client_tub.get_task_client,
84 furl_or_file)
84 furl_or_file)
85 return client.adapt_to_blocking_client()
85 return client.adapt_to_blocking_client()
86
86
87
87
88 MultiEngineClient = get_multiengine_client
88 MultiEngineClient = get_multiengine_client
89 TaskClient = get_task_client
89 TaskClient = get_task_client
90
90
91
91
92
92
93 # Now we start the reactor in a thread
93 # Now we start the reactor in a thread
94 rit = ReactorInThread()
94 rit = ReactorInThread()
95 rit.setDaemon(True)
95 rit.setDaemon(True)
96 rit.start() No newline at end of file
96 rit.start()
@@ -1,186 +1,143 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 # -*- test-case-name: IPython.kernel.test.test_contexts -*-
2 # -*- test-case-name: IPython.kernel.test.test_contexts -*-
3 """Context managers for IPython.
3 """Context managers for IPython.
4
4
5 Python 2.5 introduced the `with` statement, which is based on the context
5 Python 2.5 introduced the `with` statement, which is based on the context
6 manager protocol. This module offers a few context managers for common cases,
6 manager protocol. This module offers a few context managers for common cases,
7 which can also be useful as templates for writing new, application-specific
7 which can also be useful as templates for writing new, application-specific
8 managers.
8 managers.
9 """
9 """
10
10
11 from __future__ import with_statement
11 from __future__ import with_statement
12
12
13 __docformat__ = "restructuredtext en"
13 __docformat__ = "restructuredtext en"
14
14
15 #-------------------------------------------------------------------------------
15 #-------------------------------------------------------------------------------
16 # Copyright (C) 2008 The IPython Development Team
16 # Copyright (C) 2008 The IPython Development Team
17 #
17 #
18 # Distributed under the terms of the BSD License. The full license is in
18 # Distributed under the terms of the BSD License. The full license is in
19 # the file COPYING, distributed as part of this software.
19 # the file COPYING, distributed as part of this software.
20 #-------------------------------------------------------------------------------
20 #-------------------------------------------------------------------------------
21
21
22 #-------------------------------------------------------------------------------
22 #-------------------------------------------------------------------------------
23 # Imports
23 # Imports
24 #-------------------------------------------------------------------------------
24 #-------------------------------------------------------------------------------
25
25
26 import linecache
26 import linecache
27 import sys
27 import sys
28
28
29 from twisted.internet.error import ConnectionRefusedError
29 from twisted.internet.error import ConnectionRefusedError
30
30
31 from IPython.ultraTB import _fixed_getinnerframes, findsource
31 from IPython.ultraTB import _fixed_getinnerframes, findsource
32 from IPython import ipapi
32 from IPython import ipapi
33
33
34 from IPython.kernel import error
34 from IPython.kernel import error
35
35
36 #---------------------------------------------------------------------------
36 #---------------------------------------------------------------------------
37 # Utility functions needed by all context managers.
37 # Utility functions needed by all context managers.
38 #---------------------------------------------------------------------------
38 #---------------------------------------------------------------------------
39
39
40 def remote():
40 def remote():
41 """Raises a special exception meant to be caught by context managers.
41 """Raises a special exception meant to be caught by context managers.
42 """
42 """
43 m = 'Special exception to stop local execution of parallel code.'
43 m = 'Special exception to stop local execution of parallel code.'
44 raise error.StopLocalExecution(m)
44 raise error.StopLocalExecution(m)
45
45
46
46
47 def strip_whitespace(source,require_remote=True):
47 def strip_whitespace(source,require_remote=True):
48 """strip leading whitespace from input source.
48 """strip leading whitespace from input source.
49
49
50 :Parameters:
50 :Parameters:
51
51
52 """
52 """
53 remote_mark = 'remote()'
53 remote_mark = 'remote()'
54 # Expand tabs to avoid any confusion.
54 # Expand tabs to avoid any confusion.
55 wsource = [l.expandtabs(4) for l in source]
55 wsource = [l.expandtabs(4) for l in source]
56 # Detect the indentation level
56 # Detect the indentation level
57 done = False
57 done = False
58 for line in wsource:
58 for line in wsource:
59 if line.isspace():
59 if line.isspace():
60 continue
60 continue
61 for col,char in enumerate(line):
61 for col,char in enumerate(line):
62 if char != ' ':
62 if char != ' ':
63 done = True
63 done = True
64 break
64 break
65 if done:
65 if done:
66 break
66 break
67 # Now we know how much leading space there is in the code. Next, we
67 # Now we know how much leading space there is in the code. Next, we
68 # extract up to the first line that has less indentation.
68 # extract up to the first line that has less indentation.
69 # WARNINGS: we skip comments that may be misindented, but we do NOT yet
69 # WARNINGS: we skip comments that may be misindented, but we do NOT yet
70 # detect triple quoted strings that may have flush left text.
70 # detect triple quoted strings that may have flush left text.
71 for lno,line in enumerate(wsource):
71 for lno,line in enumerate(wsource):
72 lead = line[:col]
72 lead = line[:col]
73 if lead.isspace():
73 if lead.isspace():
74 continue
74 continue
75 else:
75 else:
76 if not lead.lstrip().startswith('#'):
76 if not lead.lstrip().startswith('#'):
77 break
77 break
78 # The real 'with' source is up to lno
78 # The real 'with' source is up to lno
79 src_lines = [l[col:] for l in wsource[:lno+1]]
79 src_lines = [l[col:] for l in wsource[:lno+1]]
80
80
81 # Finally, check that the source's first non-comment line begins with the
81 # Finally, check that the source's first non-comment line begins with the
82 # special call 'remote()'
82 # special call 'remote()'
83 if require_remote:
83 if require_remote:
84 for nline,line in enumerate(src_lines):
84 for nline,line in enumerate(src_lines):
85 if line.isspace() or line.startswith('#'):
85 if line.isspace() or line.startswith('#'):
86 continue
86 continue
87 if line.startswith(remote_mark):
87 if line.startswith(remote_mark):
88 break
88 break
89 else:
89 else:
90 raise ValueError('%s call missing at the start of code' %
90 raise ValueError('%s call missing at the start of code' %
91 remote_mark)
91 remote_mark)
92 out_lines = src_lines[nline+1:]
92 out_lines = src_lines[nline+1:]
93 else:
93 else:
94 # If the user specified that the remote() call wasn't mandatory
94 # If the user specified that the remote() call wasn't mandatory
95 out_lines = src_lines
95 out_lines = src_lines
96
96
97 # src = ''.join(out_lines) # dbg
97 # src = ''.join(out_lines) # dbg
98 #print 'SRC:\n<<<<<<<>>>>>>>\n%s<<<<<>>>>>>' % src # dbg
98 #print 'SRC:\n<<<<<<<>>>>>>>\n%s<<<<<>>>>>>' % src # dbg
99 return ''.join(out_lines)
99 return ''.join(out_lines)
100
100
101 class RemoteContextBase(object):
101 class RemoteContextBase(object):
102 def __init__(self):
102 def __init__(self):
103 self.ip = ipapi.get()
103 self.ip = ipapi.get()
104
104
105 def _findsource_file(self,f):
105 def _findsource_file(self,f):
106 linecache.checkcache()
106 linecache.checkcache()
107 s = findsource(f.f_code)
107 s = findsource(f.f_code)
108 lnum = f.f_lineno
108 lnum = f.f_lineno
109 wsource = s[0][f.f_lineno:]
109 wsource = s[0][f.f_lineno:]
110 return strip_whitespace(wsource)
110 return strip_whitespace(wsource)
111
111
112 def _findsource_ipython(self,f):
112 def _findsource_ipython(self,f):
113 from IPython import ipapi
113 from IPython import ipapi
114 self.ip = ipapi.get()
114 self.ip = ipapi.get()
115 buf = self.ip.IP.input_hist_raw[-1].splitlines()[1:]
115 buf = self.ip.IP.input_hist_raw[-1].splitlines()[1:]
116 wsource = [l+'\n' for l in buf ]
116 wsource = [l+'\n' for l in buf ]
117
117
118 return strip_whitespace(wsource)
118 return strip_whitespace(wsource)
119
119
120 def findsource(self,frame):
120 def findsource(self,frame):
121 local_ns = frame.f_locals
121 local_ns = frame.f_locals
122 global_ns = frame.f_globals
122 global_ns = frame.f_globals
123 if frame.f_code.co_filename == '<ipython console>':
123 if frame.f_code.co_filename == '<ipython console>':
124 src = self._findsource_ipython(frame)
124 src = self._findsource_ipython(frame)
125 else:
125 else:
126 src = self._findsource_file(frame)
126 src = self._findsource_file(frame)
127 return src
127 return src
128
128
129 def __enter__(self):
129 def __enter__(self):
130 raise NotImplementedError
130 raise NotImplementedError
131
131
132 def __exit__ (self, etype, value, tb):
132 def __exit__ (self, etype, value, tb):
133 if issubclass(etype,error.StopLocalExecution):
133 if issubclass(etype,error.StopLocalExecution):
134 return True
134 return True
135
135
136 class RemoteMultiEngine(RemoteContextBase):
136 class RemoteMultiEngine(RemoteContextBase):
137 def __init__(self,mec):
137 def __init__(self,mec):
138 self.mec = mec
138 self.mec = mec
139 RemoteContextBase.__init__(self)
139 RemoteContextBase.__init__(self)
140
140
141 def __enter__(self):
141 def __enter__(self):
142 src = self.findsource(sys._getframe(1))
142 src = self.findsource(sys._getframe(1))
143 return self.mec.execute(src)
143 return self.mec.execute(src)
144
145
146 # XXX - Temporary hackish testing, we'll move this into proper tests right
147 # away
148
149 if __name__ == '__main__':
150
151 # XXX - for now, we need a running cluster to be started separately. The
152 # daemon work is almost finished, and will make much of this unnecessary.
153 from IPython.kernel import client
154 mec = client.MultiEngineClient(('127.0.0.1',10105))
155
156 try:
157 mec.get_ids()
158 except ConnectionRefusedError:
159 import os, time
160 os.system('ipcluster -n 2 &')
161 time.sleep(2)
162 mec = client.MultiEngineClient(('127.0.0.1',10105))
163
164 mec.block = False
165
166 import itertools
167 c = itertools.count()
168
169 parallel = RemoteMultiEngine(mec)
170
171 mec.pushAll()
172
173 with parallel as pr:
174 # A comment
175 remote() # this means the code below only runs remotely
176 print 'Hello remote world'
177 x = range(10)
178 # Comments are OK
179 # Even misindented.
180 y = x+1
181
182
183 with pfor('i',sequence) as pr:
184 print x[i]
185
186 print pr.x + pr.y
@@ -1,171 +1,171 b''
1 # encoding: utf-8
1 # encoding: utf-8
2
2
3 """Magic command interface for interactive parallel work."""
3 """Magic command interface for interactive parallel work."""
4
4
5 __docformat__ = "restructuredtext en"
5 __docformat__ = "restructuredtext en"
6
6
7 #-------------------------------------------------------------------------------
7 #-------------------------------------------------------------------------------
8 # Copyright (C) 2008 The IPython Development Team
8 # Copyright (C) 2008 The IPython Development Team
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #-------------------------------------------------------------------------------
12 #-------------------------------------------------------------------------------
13
13
14 #-------------------------------------------------------------------------------
14 #-------------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #-------------------------------------------------------------------------------
16 #-------------------------------------------------------------------------------
17
17
18 import new
18 import new
19
19
20 from IPython.iplib import InteractiveShell
20 from IPython.iplib import InteractiveShell
21 from IPython.Shell import MTInteractiveShell
21 from IPython.Shell import MTInteractiveShell
22
22
23 from twisted.internet.defer import Deferred
23 from twisted.internet.defer import Deferred
24
24
25
25
26 #-------------------------------------------------------------------------------
26 #-------------------------------------------------------------------------------
27 # Definitions of magic functions for use with IPython
27 # Definitions of magic functions for use with IPython
28 #-------------------------------------------------------------------------------
28 #-------------------------------------------------------------------------------
29
29
30 NO_ACTIVE_CONTROLLER = """
30 NO_ACTIVE_CONTROLLER = """
31 Error: No Controller is activated
31 Error: No Controller is activated
32 Use activate() on a RemoteController object to activate it for magics.
32 Use activate() on a RemoteController object to activate it for magics.
33 """
33 """
34
34
35 def magic_result(self,parameter_s=''):
35 def magic_result(self,parameter_s=''):
36 """Print the result of command i on all engines of the active controller.
36 """Print the result of command i on all engines of the active controller.
37
37
38 To activate a controller in IPython, first create it and then call
38 To activate a controller in IPython, first create it and then call
39 the activate() method.
39 the activate() method.
40
40
41 Then you can do the following:
41 Then you can do the following:
42
42
43 >>> result # Print the latest result
43 >>> result # Print the latest result
44 Printing result...
44 Printing result...
45 [127.0.0.1:0] In [1]: b = 10
45 [127.0.0.1:0] In [1]: b = 10
46 [127.0.0.1:1] In [1]: b = 10
46 [127.0.0.1:1] In [1]: b = 10
47
47
48 >>> result 0 # Print result 0
48 >>> result 0 # Print result 0
49 In [14]: result 0
49 In [14]: result 0
50 Printing result...
50 Printing result...
51 [127.0.0.1:0] In [0]: a = 5
51 [127.0.0.1:0] In [0]: a = 5
52 [127.0.0.1:1] In [0]: a = 5
52 [127.0.0.1:1] In [0]: a = 5
53 """
53 """
54 try:
54 try:
55 activeController = __IPYTHON__.activeController
55 activeController = __IPYTHON__.activeController
56 except AttributeError:
56 except AttributeError:
57 print NO_ACTIVE_CONTROLLER
57 print NO_ACTIVE_CONTROLLER
58 else:
58 else:
59 try:
59 try:
60 index = int(parameter_s)
60 index = int(parameter_s)
61 except:
61 except:
62 index = None
62 index = None
63 result = activeController.get_result(index)
63 result = activeController.get_result(index)
64 return result
64 return result
65
65
66 def magic_px(self,parameter_s=''):
66 def magic_px(self,parameter_s=''):
67 """Executes the given python command on the active IPython Controller.
67 """Executes the given python command on the active IPython Controller.
68
68
69 To activate a Controller in IPython, first create it and then call
69 To activate a Controller in IPython, first create it and then call
70 the activate() method.
70 the activate() method.
71
71
72 Then you can do the following:
72 Then you can do the following:
73
73
74 >>> %px a = 5 # Runs a = 5 on all nodes
74 >>> %px a = 5 # Runs a = 5 on all nodes
75 """
75 """
76
76
77 try:
77 try:
78 activeController = __IPYTHON__.activeController
78 activeController = __IPYTHON__.activeController
79 except AttributeError:
79 except AttributeError:
80 print NO_ACTIVE_CONTROLLER
80 print NO_ACTIVE_CONTROLLER
81 else:
81 else:
82 print "Executing command on Controller"
82 print "Parallel execution on engines: %s" % activeController.targets
83 result = activeController.execute(parameter_s)
83 result = activeController.execute(parameter_s)
84 return result
84 return result
85
85
86 def pxrunsource(self, source, filename="<input>", symbol="single"):
86 def pxrunsource(self, source, filename="<input>", symbol="single"):
87
87
88 try:
88 try:
89 code = self.compile(source, filename, symbol)
89 code = self.compile(source, filename, symbol)
90 except (OverflowError, SyntaxError, ValueError):
90 except (OverflowError, SyntaxError, ValueError):
91 # Case 1
91 # Case 1
92 self.showsyntaxerror(filename)
92 self.showsyntaxerror(filename)
93 return None
93 return None
94
94
95 if code is None:
95 if code is None:
96 # Case 2
96 # Case 2
97 return True
97 return True
98
98
99 # Case 3
99 # Case 3
100 # Because autopx is enabled, we now call executeAll or disable autopx if
100 # Because autopx is enabled, we now call executeAll or disable autopx if
101 # %autopx or autopx has been called
101 # %autopx or autopx has been called
102 if '_ip.magic("%autopx' in source or '_ip.magic("autopx' in source:
102 if '_ip.magic("%autopx' in source or '_ip.magic("autopx' in source:
103 _disable_autopx(self)
103 _disable_autopx(self)
104 return False
104 return False
105 else:
105 else:
106 try:
106 try:
107 result = self.activeController.execute(source)
107 result = self.activeController.execute(source)
108 except:
108 except:
109 self.showtraceback()
109 self.showtraceback()
110 else:
110 else:
111 print result.__repr__()
111 print result.__repr__()
112 return False
112 return False
113
113
114 def magic_autopx(self, parameter_s=''):
114 def magic_autopx(self, parameter_s=''):
115 """Toggles auto parallel mode for the active IPython Controller.
115 """Toggles auto parallel mode for the active IPython Controller.
116
116
117 To activate a Controller in IPython, first create it and then call
117 To activate a Controller in IPython, first create it and then call
118 the activate() method.
118 the activate() method.
119
119
120 Then you can do the following:
120 Then you can do the following:
121
121
122 >>> %autopx # Now all commands are executed in parallel
122 >>> %autopx # Now all commands are executed in parallel
123 Auto Parallel Enabled
123 Auto Parallel Enabled
124 Type %autopx to disable
124 Type %autopx to disable
125 ...
125 ...
126 >>> %autopx # Now all commands are locally executed
126 >>> %autopx # Now all commands are locally executed
127 Auto Parallel Disabled
127 Auto Parallel Disabled
128 """
128 """
129
129
130 if hasattr(self, 'autopx'):
130 if hasattr(self, 'autopx'):
131 if self.autopx == True:
131 if self.autopx == True:
132 _disable_autopx(self)
132 _disable_autopx(self)
133 else:
133 else:
134 _enable_autopx(self)
134 _enable_autopx(self)
135 else:
135 else:
136 _enable_autopx(self)
136 _enable_autopx(self)
137
137
138 def _enable_autopx(self):
138 def _enable_autopx(self):
139 """Enable %autopx mode by saving the original runsource and installing
139 """Enable %autopx mode by saving the original runsource and installing
140 pxrunsource.
140 pxrunsource.
141 """
141 """
142 try:
142 try:
143 activeController = __IPYTHON__.activeController
143 activeController = __IPYTHON__.activeController
144 except AttributeError:
144 except AttributeError:
145 print "No active RemoteController found, use RemoteController.activate()."
145 print "No active RemoteController found, use RemoteController.activate()."
146 else:
146 else:
147 self._original_runsource = self.runsource
147 self._original_runsource = self.runsource
148 self.runsource = new.instancemethod(pxrunsource, self, self.__class__)
148 self.runsource = new.instancemethod(pxrunsource, self, self.__class__)
149 self.autopx = True
149 self.autopx = True
150 print "Auto Parallel Enabled\nType %autopx to disable"
150 print "Auto Parallel Enabled\nType %autopx to disable"
151
151
152 def _disable_autopx(self):
152 def _disable_autopx(self):
153 """Disable %autopx by restoring the original runsource."""
153 """Disable %autopx by restoring the original runsource."""
154 if hasattr(self, 'autopx'):
154 if hasattr(self, 'autopx'):
155 if self.autopx == True:
155 if self.autopx == True:
156 self.runsource = self._original_runsource
156 self.runsource = self._original_runsource
157 self.autopx = False
157 self.autopx = False
158 print "Auto Parallel Disabled"
158 print "Auto Parallel Disabled"
159
159
160 # Add the new magic function to the class dict:
160 # Add the new magic function to the class dict:
161
161
162 InteractiveShell.magic_result = magic_result
162 InteractiveShell.magic_result = magic_result
163 InteractiveShell.magic_px = magic_px
163 InteractiveShell.magic_px = magic_px
164 InteractiveShell.magic_autopx = magic_autopx
164 InteractiveShell.magic_autopx = magic_autopx
165
165
166 # And remove the global name to keep global namespace clean. Don't worry, the
166 # And remove the global name to keep global namespace clean. Don't worry, the
167 # copy bound to IPython stays, we're just removing the global name.
167 # copy bound to IPython stays, we're just removing the global name.
168 del magic_result
168 del magic_result
169 del magic_px
169 del magic_px
170 del magic_autopx
170 del magic_autopx
171
171
@@ -1,121 +1,121 b''
1 # encoding: utf-8
1 # encoding: utf-8
2
2
3 """Classes used in scattering and gathering sequences.
3 """Classes used in scattering and gathering sequences.
4
4
5 Scattering consists of partitioning a sequence and sending the various
5 Scattering consists of partitioning a sequence and sending the various
6 pieces to individual nodes in a cluster.
6 pieces to individual nodes in a cluster.
7 """
7 """
8
8
9 __docformat__ = "restructuredtext en"
9 __docformat__ = "restructuredtext en"
10
10
11 #-------------------------------------------------------------------------------
11 #-------------------------------------------------------------------------------
12 # Copyright (C) 2008 The IPython Development Team
12 # Copyright (C) 2008 The IPython Development Team
13 #
13 #
14 # Distributed under the terms of the BSD License. The full license is in
14 # Distributed under the terms of the BSD License. The full license is in
15 # the file COPYING, distributed as part of this software.
15 # the file COPYING, distributed as part of this software.
16 #-------------------------------------------------------------------------------
16 #-------------------------------------------------------------------------------
17
17
18 #-------------------------------------------------------------------------------
18 #-------------------------------------------------------------------------------
19 # Imports
19 # Imports
20 #-------------------------------------------------------------------------------
20 #-------------------------------------------------------------------------------
21
21
22 import types
22 import types
23
23
24 from IPython.genutils import flatten as genutil_flatten
24 from IPython.genutils import flatten as genutil_flatten
25
25
26 #-------------------------------------------------------------------------------
26 #-------------------------------------------------------------------------------
27 # Figure out which array packages are present and their array types
27 # Figure out which array packages are present and their array types
28 #-------------------------------------------------------------------------------
28 #-------------------------------------------------------------------------------
29
29
30 arrayModules = []
30 arrayModules = []
31 try:
31 try:
32 import Numeric
32 import Numeric
33 except ImportError:
33 except ImportError:
34 pass
34 pass
35 else:
35 else:
36 arrayModules.append({'module':Numeric, 'type':Numeric.arraytype})
36 arrayModules.append({'module':Numeric, 'type':Numeric.arraytype})
37 try:
37 try:
38 import numpy
38 import numpy
39 except ImportError:
39 except ImportError:
40 pass
40 pass
41 else:
41 else:
42 arrayModules.append({'module':numpy, 'type':numpy.ndarray})
42 arrayModules.append({'module':numpy, 'type':numpy.ndarray})
43 try:
43 try:
44 import numarray
44 import numarray
45 except ImportError:
45 except ImportError:
46 pass
46 pass
47 else:
47 else:
48 arrayModules.append({'module':numarray,
48 arrayModules.append({'module':numarray,
49 'type':numarray.numarraycore.NumArray})
49 'type':numarray.numarraycore.NumArray})
50
50
51 class Map:
51 class Map:
52 """A class for partitioning a sequence using a map."""
52 """A class for partitioning a sequence using a map."""
53
53
54 def getPartition(self, seq, p, q):
54 def getPartition(self, seq, p, q):
55 """Returns the pth partition of q partitions of seq."""
55 """Returns the pth partition of q partitions of seq."""
56
56
57 # Test for error conditions here
57 # Test for error conditions here
58 if p<0 or p>=q:
58 if p<0 or p>=q:
59 print "No partition exists."
59 print "No partition exists."
60 return
60 return
61
61
62 remainder = len(seq)%q
62 remainder = len(seq)%q
63 basesize = len(seq)/q
63 basesize = len(seq)/q
64 hi = []
64 hi = []
65 lo = []
65 lo = []
66 for n in range(q):
66 for n in range(q):
67 if n < remainder:
67 if n < remainder:
68 lo.append(n * (basesize + 1))
68 lo.append(n * (basesize + 1))
69 hi.append(lo[-1] + basesize + 1)
69 hi.append(lo[-1] + basesize + 1)
70 else:
70 else:
71 lo.append(n*basesize + remainder)
71 lo.append(n*basesize + remainder)
72 hi.append(lo[-1] + basesize)
72 hi.append(lo[-1] + basesize)
73
73
74
74
75 result = seq[lo[p]:hi[p]]
75 result = seq[lo[p]:hi[p]]
76 return result
76 return result
77
77
78 def joinPartitions(self, listOfPartitions):
78 def joinPartitions(self, listOfPartitions):
79 return self.concatenate(listOfPartitions)
79 return self.concatenate(listOfPartitions)
80
80
81 def concatenate(self, listOfPartitions):
81 def concatenate(self, listOfPartitions):
82 testObject = listOfPartitions[0]
82 testObject = listOfPartitions[0]
83 # First see if we have a known array type
83 # First see if we have a known array type
84 for m in arrayModules:
84 for m in arrayModules:
85 #print m
85 #print m
86 if isinstance(testObject, m['type']):
86 if isinstance(testObject, m['type']):
87 return m['module'].concatenate(listOfPartitions)
87 return m['module'].concatenate(listOfPartitions)
88 # Next try for Python sequence types
88 # Next try for Python sequence types
89 if isinstance(testObject, (types.ListType, types.TupleType)):
89 if isinstance(testObject, (types.ListType, types.TupleType)):
90 return genutil_flatten(listOfPartitions)
90 return genutil_flatten(listOfPartitions)
91 # If we have scalars, just return listOfPartitions
91 # If we have scalars, just return listOfPartitions
92 return listOfPartitions
92 return listOfPartitions
93
93
94 class RoundRobinMap(Map):
94 class RoundRobinMap(Map):
95 """Partitions a sequence in a roun robin fashion.
95 """Partitions a sequence in a roun robin fashion.
96
96
97 This currently does not work!
97 This currently does not work!
98 """
98 """
99
99
100 def getPartition(self, seq, p, q):
100 def getPartition(self, seq, p, q):
101 return seq[p:len(seq):q]
101 return seq[p:len(seq):q]
102 #result = []
102 #result = []
103 #for i in range(p,len(seq),q):
103 #for i in range(p,len(seq),q):
104 # result.append(seq[i])
104 # result.append(seq[i])
105 #return result
105 #return result
106
106
107 def joinPartitions(self, listOfPartitions):
107 def joinPartitions(self, listOfPartitions):
108 #lengths = [len(x) for x in listOfPartitions]
108 #lengths = [len(x) for x in listOfPartitions]
109 #maxPartitionLength = len(listOfPartitions[0])
109 #maxPartitionLength = len(listOfPartitions[0])
110 #numberOfPartitions = len(listOfPartitions)
110 #numberOfPartitions = len(listOfPartitions)
111 #concat = self.concatenate(listOfPartitions)
111 #concat = self.concatenate(listOfPartitions)
112 #totalLength = len(concat)
112 #totalLength = len(concat)
113 #result = []
113 #result = []
114 #for i in range(maxPartitionLength):
114 #for i in range(maxPartitionLength):
115 # result.append(concat[i:totalLength:maxPartitionLength])
115 # result.append(concat[i:totalLength:maxPartitionLength])
116 return self.concatenate(listOfPartitions)
116 return self.concatenate(listOfPartitions)
117
117
118 styles = {'basic':Map}
118 dists = {'b':Map}
119
119
120
120
121
121
@@ -1,780 +1,753 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 # -*- test-case-name: IPython.kernel.test.test_multiengine -*-
2 # -*- test-case-name: IPython.kernel.test.test_multiengine -*-
3
3
4 """Adapt the IPython ControllerServer to IMultiEngine.
4 """Adapt the IPython ControllerServer to IMultiEngine.
5
5
6 This module provides classes that adapt a ControllerService to the
6 This module provides classes that adapt a ControllerService to the
7 IMultiEngine interface. This interface is a basic interactive interface
7 IMultiEngine interface. This interface is a basic interactive interface
8 for working with a set of engines where it is desired to have explicit
8 for working with a set of engines where it is desired to have explicit
9 access to each registered engine.
9 access to each registered engine.
10
10
11 The classes here are exposed to the network in files like:
11 The classes here are exposed to the network in files like:
12
12
13 * multienginevanilla.py
13 * multienginevanilla.py
14 * multienginepb.py
14 * multienginepb.py
15 """
15 """
16
16
17 __docformat__ = "restructuredtext en"
17 __docformat__ = "restructuredtext en"
18
18
19 #-------------------------------------------------------------------------------
19 #-------------------------------------------------------------------------------
20 # Copyright (C) 2008 The IPython Development Team
20 # Copyright (C) 2008 The IPython Development Team
21 #
21 #
22 # Distributed under the terms of the BSD License. The full license is in
22 # Distributed under the terms of the BSD License. The full license is in
23 # the file COPYING, distributed as part of this software.
23 # the file COPYING, distributed as part of this software.
24 #-------------------------------------------------------------------------------
24 #-------------------------------------------------------------------------------
25
25
26 #-------------------------------------------------------------------------------
26 #-------------------------------------------------------------------------------
27 # Imports
27 # Imports
28 #-------------------------------------------------------------------------------
28 #-------------------------------------------------------------------------------
29
29
30 from new import instancemethod
30 from new import instancemethod
31 from types import FunctionType
31 from types import FunctionType
32
32
33 from twisted.application import service
33 from twisted.application import service
34 from twisted.internet import defer, reactor
34 from twisted.internet import defer, reactor
35 from twisted.python import log, components, failure
35 from twisted.python import log, components, failure
36 from zope.interface import Interface, implements, Attribute
36 from zope.interface import Interface, implements, Attribute
37
37
38 from IPython.tools import growl
38 from IPython.tools import growl
39 from IPython.kernel.util import printer
39 from IPython.kernel.util import printer
40 from IPython.kernel.twistedutil import gatherBoth
40 from IPython.kernel.twistedutil import gatherBoth
41 from IPython.kernel import map as Map
41 from IPython.kernel import map as Map
42 from IPython.kernel import error
42 from IPython.kernel import error
43 from IPython.kernel.pendingdeferred import PendingDeferredManager, two_phase
43 from IPython.kernel.pendingdeferred import PendingDeferredManager, two_phase
44 from IPython.kernel.controllerservice import \
44 from IPython.kernel.controllerservice import \
45 ControllerAdapterBase, \
45 ControllerAdapterBase, \
46 ControllerService, \
46 ControllerService, \
47 IControllerBase
47 IControllerBase
48
48
49
49
50 #-------------------------------------------------------------------------------
50 #-------------------------------------------------------------------------------
51 # Interfaces for the MultiEngine representation of a controller
51 # Interfaces for the MultiEngine representation of a controller
52 #-------------------------------------------------------------------------------
52 #-------------------------------------------------------------------------------
53
53
54 class IEngineMultiplexer(Interface):
54 class IEngineMultiplexer(Interface):
55 """Interface to multiple engines implementing IEngineCore/Serialized/Queued.
55 """Interface to multiple engines implementing IEngineCore/Serialized/Queued.
56
56
57 This class simply acts as a multiplexer of methods that are in the
57 This class simply acts as a multiplexer of methods that are in the
58 various IEngines* interfaces. Thus the methods here are jut like those
58 various IEngines* interfaces. Thus the methods here are jut like those
59 in the IEngine* interfaces, but with an extra first argument, targets.
59 in the IEngine* interfaces, but with an extra first argument, targets.
60 The targets argument can have the following forms:
60 The targets argument can have the following forms:
61
61
62 * targets = 10 # Engines are indexed by ints
62 * targets = 10 # Engines are indexed by ints
63 * targets = [0,1,2,3] # A list of ints
63 * targets = [0,1,2,3] # A list of ints
64 * targets = 'all' # A string to indicate all targets
64 * targets = 'all' # A string to indicate all targets
65
65
66 If targets is bad in any way, an InvalidEngineID will be raised. This
66 If targets is bad in any way, an InvalidEngineID will be raised. This
67 includes engines not being registered.
67 includes engines not being registered.
68
68
69 All IEngineMultiplexer multiplexer methods must return a Deferred to a list
69 All IEngineMultiplexer multiplexer methods must return a Deferred to a list
70 with length equal to the number of targets. The elements of the list will
70 with length equal to the number of targets. The elements of the list will
71 correspond to the return of the corresponding IEngine method.
71 correspond to the return of the corresponding IEngine method.
72
72
73 Failures are aggressive, meaning that if an action fails for any target,
73 Failures are aggressive, meaning that if an action fails for any target,
74 the overall action will fail immediately with that Failure.
74 the overall action will fail immediately with that Failure.
75
75
76 :Parameters:
76 :Parameters:
77 targets : int, list of ints, or 'all'
77 targets : int, list of ints, or 'all'
78 Engine ids the action will apply to.
78 Engine ids the action will apply to.
79
79
80 :Returns: Deferred to a list of results for each engine.
80 :Returns: Deferred to a list of results for each engine.
81
81
82 :Exception:
82 :Exception:
83 InvalidEngineID
83 InvalidEngineID
84 If the targets argument is bad or engines aren't registered.
84 If the targets argument is bad or engines aren't registered.
85 NoEnginesRegistered
85 NoEnginesRegistered
86 If there are no engines registered and targets='all'
86 If there are no engines registered and targets='all'
87 """
87 """
88
88
89 #---------------------------------------------------------------------------
89 #---------------------------------------------------------------------------
90 # Mutiplexed methods
90 # Mutiplexed methods
91 #---------------------------------------------------------------------------
91 #---------------------------------------------------------------------------
92
92
93 def execute(lines, targets='all'):
93 def execute(lines, targets='all'):
94 """Execute lines of Python code on targets.
94 """Execute lines of Python code on targets.
95
95
96 See the class docstring for information about targets and possible
96 See the class docstring for information about targets and possible
97 exceptions this method can raise.
97 exceptions this method can raise.
98
98
99 :Parameters:
99 :Parameters:
100 lines : str
100 lines : str
101 String of python code to be executed on targets.
101 String of python code to be executed on targets.
102 """
102 """
103
103
104 def push(namespace, targets='all'):
104 def push(namespace, targets='all'):
105 """Push dict namespace into the user's namespace on targets.
105 """Push dict namespace into the user's namespace on targets.
106
106
107 See the class docstring for information about targets and possible
107 See the class docstring for information about targets and possible
108 exceptions this method can raise.
108 exceptions this method can raise.
109
109
110 :Parameters:
110 :Parameters:
111 namspace : dict
111 namspace : dict
112 Dict of key value pairs to be put into the users namspace.
112 Dict of key value pairs to be put into the users namspace.
113 """
113 """
114
114
115 def pull(keys, targets='all'):
115 def pull(keys, targets='all'):
116 """Pull values out of the user's namespace on targets by keys.
116 """Pull values out of the user's namespace on targets by keys.
117
117
118 See the class docstring for information about targets and possible
118 See the class docstring for information about targets and possible
119 exceptions this method can raise.
119 exceptions this method can raise.
120
120
121 :Parameters:
121 :Parameters:
122 keys : tuple of strings
122 keys : tuple of strings
123 Sequence of keys to be pulled from user's namespace.
123 Sequence of keys to be pulled from user's namespace.
124 """
124 """
125
125
126 def push_function(namespace, targets='all'):
126 def push_function(namespace, targets='all'):
127 """"""
127 """"""
128
128
129 def pull_function(keys, targets='all'):
129 def pull_function(keys, targets='all'):
130 """"""
130 """"""
131
131
132 def get_result(i=None, targets='all'):
132 def get_result(i=None, targets='all'):
133 """Get the result for command i from targets.
133 """Get the result for command i from targets.
134
134
135 See the class docstring for information about targets and possible
135 See the class docstring for information about targets and possible
136 exceptions this method can raise.
136 exceptions this method can raise.
137
137
138 :Parameters:
138 :Parameters:
139 i : int or None
139 i : int or None
140 Command index or None to indicate most recent command.
140 Command index or None to indicate most recent command.
141 """
141 """
142
142
143 def reset(targets='all'):
143 def reset(targets='all'):
144 """Reset targets.
144 """Reset targets.
145
145
146 This clears the users namespace of the Engines, but won't cause
146 This clears the users namespace of the Engines, but won't cause
147 modules to be reloaded.
147 modules to be reloaded.
148 """
148 """
149
149
150 def keys(targets='all'):
150 def keys(targets='all'):
151 """Get variable names defined in user's namespace on targets."""
151 """Get variable names defined in user's namespace on targets."""
152
152
153 def kill(controller=False, targets='all'):
153 def kill(controller=False, targets='all'):
154 """Kill the targets Engines and possibly the controller.
154 """Kill the targets Engines and possibly the controller.
155
155
156 :Parameters:
156 :Parameters:
157 controller : boolean
157 controller : boolean
158 Should the controller be killed as well. If so all the
158 Should the controller be killed as well. If so all the
159 engines will be killed first no matter what targets is.
159 engines will be killed first no matter what targets is.
160 """
160 """
161
161
162 def push_serialized(namespace, targets='all'):
162 def push_serialized(namespace, targets='all'):
163 """Push a namespace of Serialized objects to targets.
163 """Push a namespace of Serialized objects to targets.
164
164
165 :Parameters:
165 :Parameters:
166 namespace : dict
166 namespace : dict
167 A dict whose keys are the variable names and whose values
167 A dict whose keys are the variable names and whose values
168 are serialized version of the objects.
168 are serialized version of the objects.
169 """
169 """
170
170
171 def pull_serialized(keys, targets='all'):
171 def pull_serialized(keys, targets='all'):
172 """Pull Serialized objects by keys from targets.
172 """Pull Serialized objects by keys from targets.
173
173
174 :Parameters:
174 :Parameters:
175 keys : tuple of strings
175 keys : tuple of strings
176 Sequence of variable names to pull as serialized objects.
176 Sequence of variable names to pull as serialized objects.
177 """
177 """
178
178
179 def clear_queue(targets='all'):
179 def clear_queue(targets='all'):
180 """Clear the queue of pending command for targets."""
180 """Clear the queue of pending command for targets."""
181
181
182 def queue_status(targets='all'):
182 def queue_status(targets='all'):
183 """Get the status of the queue on the targets."""
183 """Get the status of the queue on the targets."""
184
184
185 def set_properties(properties, targets='all'):
185 def set_properties(properties, targets='all'):
186 """set properties by key and value"""
186 """set properties by key and value"""
187
187
188 def get_properties(keys=None, targets='all'):
188 def get_properties(keys=None, targets='all'):
189 """get a list of properties by `keys`, if no keys specified, get all"""
189 """get a list of properties by `keys`, if no keys specified, get all"""
190
190
191 def del_properties(keys, targets='all'):
191 def del_properties(keys, targets='all'):
192 """delete properties by `keys`"""
192 """delete properties by `keys`"""
193
193
194 def has_properties(keys, targets='all'):
194 def has_properties(keys, targets='all'):
195 """get a list of bool values for whether `properties` has `keys`"""
195 """get a list of bool values for whether `properties` has `keys`"""
196
196
197 def clear_properties(targets='all'):
197 def clear_properties(targets='all'):
198 """clear the properties dict"""
198 """clear the properties dict"""
199
199
200
200
201 class IMultiEngine(IEngineMultiplexer):
201 class IMultiEngine(IEngineMultiplexer):
202 """A controller that exposes an explicit interface to all of its engines.
202 """A controller that exposes an explicit interface to all of its engines.
203
203
204 This is the primary inteface for interactive usage.
204 This is the primary inteface for interactive usage.
205 """
205 """
206
206
207 def get_ids():
207 def get_ids():
208 """Return list of currently registered ids.
208 """Return list of currently registered ids.
209
209
210 :Returns: A Deferred to a list of registered engine ids.
210 :Returns: A Deferred to a list of registered engine ids.
211 """
211 """
212
212
213
213
214
214
215 #-------------------------------------------------------------------------------
215 #-------------------------------------------------------------------------------
216 # Implementation of the core MultiEngine classes
216 # Implementation of the core MultiEngine classes
217 #-------------------------------------------------------------------------------
217 #-------------------------------------------------------------------------------
218
218
219 class MultiEngine(ControllerAdapterBase):
219 class MultiEngine(ControllerAdapterBase):
220 """The representation of a ControllerService as a IMultiEngine.
220 """The representation of a ControllerService as a IMultiEngine.
221
221
222 Although it is not implemented currently, this class would be where a
222 Although it is not implemented currently, this class would be where a
223 client/notification API is implemented. It could inherit from something
223 client/notification API is implemented. It could inherit from something
224 like results.NotifierParent and then use the notify method to send
224 like results.NotifierParent and then use the notify method to send
225 notifications.
225 notifications.
226 """
226 """
227
227
228 implements(IMultiEngine)
228 implements(IMultiEngine)
229
229
230 def __init(self, controller):
230 def __init(self, controller):
231 ControllerAdapterBase.__init__(self, controller)
231 ControllerAdapterBase.__init__(self, controller)
232
232
233 #---------------------------------------------------------------------------
233 #---------------------------------------------------------------------------
234 # Helper methods
234 # Helper methods
235 #---------------------------------------------------------------------------
235 #---------------------------------------------------------------------------
236
236
237 def engineList(self, targets):
237 def engineList(self, targets):
238 """Parse the targets argument into a list of valid engine objects.
238 """Parse the targets argument into a list of valid engine objects.
239
239
240 :Parameters:
240 :Parameters:
241 targets : int, list of ints or 'all'
241 targets : int, list of ints or 'all'
242 The targets argument to be parsed.
242 The targets argument to be parsed.
243
243
244 :Returns: List of engine objects.
244 :Returns: List of engine objects.
245
245
246 :Exception:
246 :Exception:
247 InvalidEngineID
247 InvalidEngineID
248 If targets is not valid or if an engine is not registered.
248 If targets is not valid or if an engine is not registered.
249 """
249 """
250 if isinstance(targets, int):
250 if isinstance(targets, int):
251 if targets not in self.engines.keys():
251 if targets not in self.engines.keys():
252 log.msg("Engine with id %i is not registered" % targets)
252 log.msg("Engine with id %i is not registered" % targets)
253 raise error.InvalidEngineID("Engine with id %i is not registered" % targets)
253 raise error.InvalidEngineID("Engine with id %i is not registered" % targets)
254 else:
254 else:
255 return [self.engines[targets]]
255 return [self.engines[targets]]
256 elif isinstance(targets, (list, tuple)):
256 elif isinstance(targets, (list, tuple)):
257 for id in targets:
257 for id in targets:
258 if id not in self.engines.keys():
258 if id not in self.engines.keys():
259 log.msg("Engine with id %r is not registered" % id)
259 log.msg("Engine with id %r is not registered" % id)
260 raise error.InvalidEngineID("Engine with id %r is not registered" % id)
260 raise error.InvalidEngineID("Engine with id %r is not registered" % id)
261 return map(self.engines.get, targets)
261 return map(self.engines.get, targets)
262 elif targets == 'all':
262 elif targets == 'all':
263 eList = self.engines.values()
263 eList = self.engines.values()
264 if len(eList) == 0:
264 if len(eList) == 0:
265 msg = """There are no engines registered.
265 msg = """There are no engines registered.
266 Check the logs in ~/.ipython/log if you think there should have been."""
266 Check the logs in ~/.ipython/log if you think there should have been."""
267 raise error.NoEnginesRegistered(msg)
267 raise error.NoEnginesRegistered(msg)
268 else:
268 else:
269 return eList
269 return eList
270 else:
270 else:
271 raise error.InvalidEngineID("targets argument is not an int, list of ints or 'all': %r"%targets)
271 raise error.InvalidEngineID("targets argument is not an int, list of ints or 'all': %r"%targets)
272
272
273 def _performOnEngines(self, methodName, *args, **kwargs):
273 def _performOnEngines(self, methodName, *args, **kwargs):
274 """Calls a method on engines and returns deferred to list of results.
274 """Calls a method on engines and returns deferred to list of results.
275
275
276 :Parameters:
276 :Parameters:
277 methodName : str
277 methodName : str
278 Name of the method to be called.
278 Name of the method to be called.
279 targets : int, list of ints, 'all'
279 targets : int, list of ints, 'all'
280 The targets argument to be parsed into a list of engine objects.
280 The targets argument to be parsed into a list of engine objects.
281 args
281 args
282 The positional keyword arguments to be passed to the engines.
282 The positional keyword arguments to be passed to the engines.
283 kwargs
283 kwargs
284 The keyword arguments passed to the method
284 The keyword arguments passed to the method
285
285
286 :Returns: List of deferreds to the results on each engine
286 :Returns: List of deferreds to the results on each engine
287
287
288 :Exception:
288 :Exception:
289 InvalidEngineID
289 InvalidEngineID
290 If the targets argument is bad in any way.
290 If the targets argument is bad in any way.
291 AttributeError
291 AttributeError
292 If the method doesn't exist on one of the engines.
292 If the method doesn't exist on one of the engines.
293 """
293 """
294 targets = kwargs.pop('targets')
294 targets = kwargs.pop('targets')
295 log.msg("Performing %s on %r" % (methodName, targets))
295 log.msg("Performing %s on %r" % (methodName, targets))
296 # log.msg("Performing %s(%r, %r) on %r" % (methodName, args, kwargs, targets))
296 # log.msg("Performing %s(%r, %r) on %r" % (methodName, args, kwargs, targets))
297 # This will and should raise if targets is not valid!
297 # This will and should raise if targets is not valid!
298 engines = self.engineList(targets)
298 engines = self.engineList(targets)
299 dList = []
299 dList = []
300 for e in engines:
300 for e in engines:
301 meth = getattr(e, methodName, None)
301 meth = getattr(e, methodName, None)
302 if meth is not None:
302 if meth is not None:
303 dList.append(meth(*args, **kwargs))
303 dList.append(meth(*args, **kwargs))
304 else:
304 else:
305 raise AttributeError("Engine %i does not have method %s" % (e.id, methodName))
305 raise AttributeError("Engine %i does not have method %s" % (e.id, methodName))
306 return dList
306 return dList
307
307
308 def _performOnEnginesAndGatherBoth(self, methodName, *args, **kwargs):
308 def _performOnEnginesAndGatherBoth(self, methodName, *args, **kwargs):
309 """Called _performOnEngines and wraps result/exception into deferred."""
309 """Called _performOnEngines and wraps result/exception into deferred."""
310 try:
310 try:
311 dList = self._performOnEngines(methodName, *args, **kwargs)
311 dList = self._performOnEngines(methodName, *args, **kwargs)
312 except (error.InvalidEngineID, AttributeError, KeyError, error.NoEnginesRegistered):
312 except (error.InvalidEngineID, AttributeError, KeyError, error.NoEnginesRegistered):
313 return defer.fail(failure.Failure())
313 return defer.fail(failure.Failure())
314 else:
314 else:
315 # Having fireOnOneErrback is causing problems with the determinacy
315 # Having fireOnOneErrback is causing problems with the determinacy
316 # of the system. Basically, once a single engine has errbacked, this
316 # of the system. Basically, once a single engine has errbacked, this
317 # method returns. In some cases, this will cause client to submit
317 # method returns. In some cases, this will cause client to submit
318 # another command. Because the previous command is still running
318 # another command. Because the previous command is still running
319 # on some engines, this command will be queued. When those commands
319 # on some engines, this command will be queued. When those commands
320 # then errback, the second command will raise QueueCleared. Ahhh!
320 # then errback, the second command will raise QueueCleared. Ahhh!
321 d = gatherBoth(dList,
321 d = gatherBoth(dList,
322 fireOnOneErrback=0,
322 fireOnOneErrback=0,
323 consumeErrors=1,
323 consumeErrors=1,
324 logErrors=0)
324 logErrors=0)
325 d.addCallback(error.collect_exceptions, methodName)
325 d.addCallback(error.collect_exceptions, methodName)
326 return d
326 return d
327
327
328 #---------------------------------------------------------------------------
328 #---------------------------------------------------------------------------
329 # General IMultiEngine methods
329 # General IMultiEngine methods
330 #---------------------------------------------------------------------------
330 #---------------------------------------------------------------------------
331
331
332 def get_ids(self):
332 def get_ids(self):
333 return defer.succeed(self.engines.keys())
333 return defer.succeed(self.engines.keys())
334
334
335 #---------------------------------------------------------------------------
335 #---------------------------------------------------------------------------
336 # IEngineMultiplexer methods
336 # IEngineMultiplexer methods
337 #---------------------------------------------------------------------------
337 #---------------------------------------------------------------------------
338
338
339 def execute(self, lines, targets='all'):
339 def execute(self, lines, targets='all'):
340 return self._performOnEnginesAndGatherBoth('execute', lines, targets=targets)
340 return self._performOnEnginesAndGatherBoth('execute', lines, targets=targets)
341
341
342 def push(self, ns, targets='all'):
342 def push(self, ns, targets='all'):
343 return self._performOnEnginesAndGatherBoth('push', ns, targets=targets)
343 return self._performOnEnginesAndGatherBoth('push', ns, targets=targets)
344
344
345 def pull(self, keys, targets='all'):
345 def pull(self, keys, targets='all'):
346 return self._performOnEnginesAndGatherBoth('pull', keys, targets=targets)
346 return self._performOnEnginesAndGatherBoth('pull', keys, targets=targets)
347
347
348 def push_function(self, ns, targets='all'):
348 def push_function(self, ns, targets='all'):
349 return self._performOnEnginesAndGatherBoth('push_function', ns, targets=targets)
349 return self._performOnEnginesAndGatherBoth('push_function', ns, targets=targets)
350
350
351 def pull_function(self, keys, targets='all'):
351 def pull_function(self, keys, targets='all'):
352 return self._performOnEnginesAndGatherBoth('pull_function', keys, targets=targets)
352 return self._performOnEnginesAndGatherBoth('pull_function', keys, targets=targets)
353
353
354 def get_result(self, i=None, targets='all'):
354 def get_result(self, i=None, targets='all'):
355 return self._performOnEnginesAndGatherBoth('get_result', i, targets=targets)
355 return self._performOnEnginesAndGatherBoth('get_result', i, targets=targets)
356
356
357 def reset(self, targets='all'):
357 def reset(self, targets='all'):
358 return self._performOnEnginesAndGatherBoth('reset', targets=targets)
358 return self._performOnEnginesAndGatherBoth('reset', targets=targets)
359
359
360 def keys(self, targets='all'):
360 def keys(self, targets='all'):
361 return self._performOnEnginesAndGatherBoth('keys', targets=targets)
361 return self._performOnEnginesAndGatherBoth('keys', targets=targets)
362
362
363 def kill(self, controller=False, targets='all'):
363 def kill(self, controller=False, targets='all'):
364 if controller:
364 if controller:
365 targets = 'all'
365 targets = 'all'
366 d = self._performOnEnginesAndGatherBoth('kill', targets=targets)
366 d = self._performOnEnginesAndGatherBoth('kill', targets=targets)
367 if controller:
367 if controller:
368 log.msg("Killing controller")
368 log.msg("Killing controller")
369 d.addCallback(lambda _: reactor.callLater(2.0, reactor.stop))
369 d.addCallback(lambda _: reactor.callLater(2.0, reactor.stop))
370 # Consume any weird stuff coming back
370 # Consume any weird stuff coming back
371 d.addBoth(lambda _: None)
371 d.addBoth(lambda _: None)
372 return d
372 return d
373
373
374 def push_serialized(self, namespace, targets='all'):
374 def push_serialized(self, namespace, targets='all'):
375 for k, v in namespace.iteritems():
375 for k, v in namespace.iteritems():
376 log.msg("Pushed object %s is %f MB" % (k, v.getDataSize()))
376 log.msg("Pushed object %s is %f MB" % (k, v.getDataSize()))
377 d = self._performOnEnginesAndGatherBoth('push_serialized', namespace, targets=targets)
377 d = self._performOnEnginesAndGatherBoth('push_serialized', namespace, targets=targets)
378 return d
378 return d
379
379
380 def pull_serialized(self, keys, targets='all'):
380 def pull_serialized(self, keys, targets='all'):
381 try:
381 try:
382 dList = self._performOnEngines('pull_serialized', keys, targets=targets)
382 dList = self._performOnEngines('pull_serialized', keys, targets=targets)
383 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
383 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
384 return defer.fail(failure.Failure())
384 return defer.fail(failure.Failure())
385 else:
385 else:
386 for d in dList:
386 for d in dList:
387 d.addCallback(self._logSizes)
387 d.addCallback(self._logSizes)
388 d = gatherBoth(dList,
388 d = gatherBoth(dList,
389 fireOnOneErrback=0,
389 fireOnOneErrback=0,
390 consumeErrors=1,
390 consumeErrors=1,
391 logErrors=0)
391 logErrors=0)
392 d.addCallback(error.collect_exceptions, 'pull_serialized')
392 d.addCallback(error.collect_exceptions, 'pull_serialized')
393 return d
393 return d
394
394
395 def _logSizes(self, listOfSerialized):
395 def _logSizes(self, listOfSerialized):
396 if isinstance(listOfSerialized, (list, tuple)):
396 if isinstance(listOfSerialized, (list, tuple)):
397 for s in listOfSerialized:
397 for s in listOfSerialized:
398 log.msg("Pulled object is %f MB" % s.getDataSize())
398 log.msg("Pulled object is %f MB" % s.getDataSize())
399 else:
399 else:
400 log.msg("Pulled object is %f MB" % listOfSerialized.getDataSize())
400 log.msg("Pulled object is %f MB" % listOfSerialized.getDataSize())
401 return listOfSerialized
401 return listOfSerialized
402
402
403 def clear_queue(self, targets='all'):
403 def clear_queue(self, targets='all'):
404 return self._performOnEnginesAndGatherBoth('clear_queue', targets=targets)
404 return self._performOnEnginesAndGatherBoth('clear_queue', targets=targets)
405
405
406 def queue_status(self, targets='all'):
406 def queue_status(self, targets='all'):
407 log.msg("Getting queue status on %r" % targets)
407 log.msg("Getting queue status on %r" % targets)
408 try:
408 try:
409 engines = self.engineList(targets)
409 engines = self.engineList(targets)
410 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
410 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
411 return defer.fail(failure.Failure())
411 return defer.fail(failure.Failure())
412 else:
412 else:
413 dList = []
413 dList = []
414 for e in engines:
414 for e in engines:
415 dList.append(e.queue_status().addCallback(lambda s:(e.id, s)))
415 dList.append(e.queue_status().addCallback(lambda s:(e.id, s)))
416 d = gatherBoth(dList,
416 d = gatherBoth(dList,
417 fireOnOneErrback=0,
417 fireOnOneErrback=0,
418 consumeErrors=1,
418 consumeErrors=1,
419 logErrors=0)
419 logErrors=0)
420 d.addCallback(error.collect_exceptions, 'queue_status')
420 d.addCallback(error.collect_exceptions, 'queue_status')
421 return d
421 return d
422
422
423 def get_properties(self, keys=None, targets='all'):
423 def get_properties(self, keys=None, targets='all'):
424 log.msg("Getting properties on %r" % targets)
424 log.msg("Getting properties on %r" % targets)
425 try:
425 try:
426 engines = self.engineList(targets)
426 engines = self.engineList(targets)
427 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
427 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
428 return defer.fail(failure.Failure())
428 return defer.fail(failure.Failure())
429 else:
429 else:
430 dList = [e.get_properties(keys) for e in engines]
430 dList = [e.get_properties(keys) for e in engines]
431 d = gatherBoth(dList,
431 d = gatherBoth(dList,
432 fireOnOneErrback=0,
432 fireOnOneErrback=0,
433 consumeErrors=1,
433 consumeErrors=1,
434 logErrors=0)
434 logErrors=0)
435 d.addCallback(error.collect_exceptions, 'get_properties')
435 d.addCallback(error.collect_exceptions, 'get_properties')
436 return d
436 return d
437
437
438 def set_properties(self, properties, targets='all'):
438 def set_properties(self, properties, targets='all'):
439 log.msg("Setting properties on %r" % targets)
439 log.msg("Setting properties on %r" % targets)
440 try:
440 try:
441 engines = self.engineList(targets)
441 engines = self.engineList(targets)
442 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
442 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
443 return defer.fail(failure.Failure())
443 return defer.fail(failure.Failure())
444 else:
444 else:
445 dList = [e.set_properties(properties) for e in engines]
445 dList = [e.set_properties(properties) for e in engines]
446 d = gatherBoth(dList,
446 d = gatherBoth(dList,
447 fireOnOneErrback=0,
447 fireOnOneErrback=0,
448 consumeErrors=1,
448 consumeErrors=1,
449 logErrors=0)
449 logErrors=0)
450 d.addCallback(error.collect_exceptions, 'set_properties')
450 d.addCallback(error.collect_exceptions, 'set_properties')
451 return d
451 return d
452
452
453 def has_properties(self, keys, targets='all'):
453 def has_properties(self, keys, targets='all'):
454 log.msg("Checking properties on %r" % targets)
454 log.msg("Checking properties on %r" % targets)
455 try:
455 try:
456 engines = self.engineList(targets)
456 engines = self.engineList(targets)
457 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
457 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
458 return defer.fail(failure.Failure())
458 return defer.fail(failure.Failure())
459 else:
459 else:
460 dList = [e.has_properties(keys) for e in engines]
460 dList = [e.has_properties(keys) for e in engines]
461 d = gatherBoth(dList,
461 d = gatherBoth(dList,
462 fireOnOneErrback=0,
462 fireOnOneErrback=0,
463 consumeErrors=1,
463 consumeErrors=1,
464 logErrors=0)
464 logErrors=0)
465 d.addCallback(error.collect_exceptions, 'has_properties')
465 d.addCallback(error.collect_exceptions, 'has_properties')
466 return d
466 return d
467
467
468 def del_properties(self, keys, targets='all'):
468 def del_properties(self, keys, targets='all'):
469 log.msg("Deleting properties on %r" % targets)
469 log.msg("Deleting properties on %r" % targets)
470 try:
470 try:
471 engines = self.engineList(targets)
471 engines = self.engineList(targets)
472 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
472 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
473 return defer.fail(failure.Failure())
473 return defer.fail(failure.Failure())
474 else:
474 else:
475 dList = [e.del_properties(keys) for e in engines]
475 dList = [e.del_properties(keys) for e in engines]
476 d = gatherBoth(dList,
476 d = gatherBoth(dList,
477 fireOnOneErrback=0,
477 fireOnOneErrback=0,
478 consumeErrors=1,
478 consumeErrors=1,
479 logErrors=0)
479 logErrors=0)
480 d.addCallback(error.collect_exceptions, 'del_properties')
480 d.addCallback(error.collect_exceptions, 'del_properties')
481 return d
481 return d
482
482
483 def clear_properties(self, targets='all'):
483 def clear_properties(self, targets='all'):
484 log.msg("Clearing properties on %r" % targets)
484 log.msg("Clearing properties on %r" % targets)
485 try:
485 try:
486 engines = self.engineList(targets)
486 engines = self.engineList(targets)
487 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
487 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
488 return defer.fail(failure.Failure())
488 return defer.fail(failure.Failure())
489 else:
489 else:
490 dList = [e.clear_properties() for e in engines]
490 dList = [e.clear_properties() for e in engines]
491 d = gatherBoth(dList,
491 d = gatherBoth(dList,
492 fireOnOneErrback=0,
492 fireOnOneErrback=0,
493 consumeErrors=1,
493 consumeErrors=1,
494 logErrors=0)
494 logErrors=0)
495 d.addCallback(error.collect_exceptions, 'clear_properties')
495 d.addCallback(error.collect_exceptions, 'clear_properties')
496 return d
496 return d
497
497
498
498
499 components.registerAdapter(MultiEngine,
499 components.registerAdapter(MultiEngine,
500 IControllerBase,
500 IControllerBase,
501 IMultiEngine)
501 IMultiEngine)
502
502
503
503
504 #-------------------------------------------------------------------------------
504 #-------------------------------------------------------------------------------
505 # Interfaces for the Synchronous MultiEngine
505 # Interfaces for the Synchronous MultiEngine
506 #-------------------------------------------------------------------------------
506 #-------------------------------------------------------------------------------
507
507
508 class ISynchronousEngineMultiplexer(Interface):
508 class ISynchronousEngineMultiplexer(Interface):
509 pass
509 pass
510
510
511
511
512 class ISynchronousMultiEngine(ISynchronousEngineMultiplexer):
512 class ISynchronousMultiEngine(ISynchronousEngineMultiplexer):
513 """Synchronous, two-phase version of IMultiEngine.
513 """Synchronous, two-phase version of IMultiEngine.
514
514
515 Methods in this interface are identical to those of IMultiEngine, but they
515 Methods in this interface are identical to those of IMultiEngine, but they
516 take one additional argument:
516 take one additional argument:
517
517
518 execute(lines, targets='all') -> execute(lines, targets='all, block=True)
518 execute(lines, targets='all') -> execute(lines, targets='all, block=True)
519
519
520 :Parameters:
520 :Parameters:
521 block : boolean
521 block : boolean
522 Should the method return a deferred to a deferredID or the
522 Should the method return a deferred to a deferredID or the
523 actual result. If block=False a deferred to a deferredID is
523 actual result. If block=False a deferred to a deferredID is
524 returned and the user must call `get_pending_deferred` at a later
524 returned and the user must call `get_pending_deferred` at a later
525 point. If block=True, a deferred to the actual result comes back.
525 point. If block=True, a deferred to the actual result comes back.
526 """
526 """
527 def get_pending_deferred(deferredID, block=True):
527 def get_pending_deferred(deferredID, block=True):
528 """"""
528 """"""
529
529
530 def clear_pending_deferreds():
530 def clear_pending_deferreds():
531 """"""
531 """"""
532
532
533
533
534 #-------------------------------------------------------------------------------
534 #-------------------------------------------------------------------------------
535 # Implementation of the Synchronous MultiEngine
535 # Implementation of the Synchronous MultiEngine
536 #-------------------------------------------------------------------------------
536 #-------------------------------------------------------------------------------
537
537
538 class SynchronousMultiEngine(PendingDeferredManager):
538 class SynchronousMultiEngine(PendingDeferredManager):
539 """Adapt an `IMultiEngine` -> `ISynchronousMultiEngine`
539 """Adapt an `IMultiEngine` -> `ISynchronousMultiEngine`
540
540
541 Warning, this class uses a decorator that currently uses **kwargs.
541 Warning, this class uses a decorator that currently uses **kwargs.
542 Because of this block must be passed as a kwarg, not positionally.
542 Because of this block must be passed as a kwarg, not positionally.
543 """
543 """
544
544
545 implements(ISynchronousMultiEngine)
545 implements(ISynchronousMultiEngine)
546
546
547 def __init__(self, multiengine):
547 def __init__(self, multiengine):
548 self.multiengine = multiengine
548 self.multiengine = multiengine
549 PendingDeferredManager.__init__(self)
549 PendingDeferredManager.__init__(self)
550
550
551 #---------------------------------------------------------------------------
551 #---------------------------------------------------------------------------
552 # Decorated pending deferred methods
552 # Decorated pending deferred methods
553 #---------------------------------------------------------------------------
553 #---------------------------------------------------------------------------
554
554
555 @two_phase
555 @two_phase
556 def execute(self, lines, targets='all'):
556 def execute(self, lines, targets='all'):
557 d = self.multiengine.execute(lines, targets)
557 d = self.multiengine.execute(lines, targets)
558 return d
558 return d
559
559
560 @two_phase
560 @two_phase
561 def push(self, namespace, targets='all'):
561 def push(self, namespace, targets='all'):
562 return self.multiengine.push(namespace, targets)
562 return self.multiengine.push(namespace, targets)
563
563
564 @two_phase
564 @two_phase
565 def pull(self, keys, targets='all'):
565 def pull(self, keys, targets='all'):
566 d = self.multiengine.pull(keys, targets)
566 d = self.multiengine.pull(keys, targets)
567 return d
567 return d
568
568
569 @two_phase
569 @two_phase
570 def push_function(self, namespace, targets='all'):
570 def push_function(self, namespace, targets='all'):
571 return self.multiengine.push_function(namespace, targets)
571 return self.multiengine.push_function(namespace, targets)
572
572
573 @two_phase
573 @two_phase
574 def pull_function(self, keys, targets='all'):
574 def pull_function(self, keys, targets='all'):
575 d = self.multiengine.pull_function(keys, targets)
575 d = self.multiengine.pull_function(keys, targets)
576 return d
576 return d
577
577
578 @two_phase
578 @two_phase
579 def get_result(self, i=None, targets='all'):
579 def get_result(self, i=None, targets='all'):
580 return self.multiengine.get_result(i, targets='all')
580 return self.multiengine.get_result(i, targets='all')
581
581
582 @two_phase
582 @two_phase
583 def reset(self, targets='all'):
583 def reset(self, targets='all'):
584 return self.multiengine.reset(targets)
584 return self.multiengine.reset(targets)
585
585
586 @two_phase
586 @two_phase
587 def keys(self, targets='all'):
587 def keys(self, targets='all'):
588 return self.multiengine.keys(targets)
588 return self.multiengine.keys(targets)
589
589
590 @two_phase
590 @two_phase
591 def kill(self, controller=False, targets='all'):
591 def kill(self, controller=False, targets='all'):
592 return self.multiengine.kill(controller, targets)
592 return self.multiengine.kill(controller, targets)
593
593
594 @two_phase
594 @two_phase
595 def push_serialized(self, namespace, targets='all'):
595 def push_serialized(self, namespace, targets='all'):
596 return self.multiengine.push_serialized(namespace, targets)
596 return self.multiengine.push_serialized(namespace, targets)
597
597
598 @two_phase
598 @two_phase
599 def pull_serialized(self, keys, targets='all'):
599 def pull_serialized(self, keys, targets='all'):
600 return self.multiengine.pull_serialized(keys, targets)
600 return self.multiengine.pull_serialized(keys, targets)
601
601
602 @two_phase
602 @two_phase
603 def clear_queue(self, targets='all'):
603 def clear_queue(self, targets='all'):
604 return self.multiengine.clear_queue(targets)
604 return self.multiengine.clear_queue(targets)
605
605
606 @two_phase
606 @two_phase
607 def queue_status(self, targets='all'):
607 def queue_status(self, targets='all'):
608 return self.multiengine.queue_status(targets)
608 return self.multiengine.queue_status(targets)
609
609
610 @two_phase
610 @two_phase
611 def set_properties(self, properties, targets='all'):
611 def set_properties(self, properties, targets='all'):
612 return self.multiengine.set_properties(properties, targets)
612 return self.multiengine.set_properties(properties, targets)
613
613
614 @two_phase
614 @two_phase
615 def get_properties(self, keys=None, targets='all'):
615 def get_properties(self, keys=None, targets='all'):
616 return self.multiengine.get_properties(keys, targets)
616 return self.multiengine.get_properties(keys, targets)
617
617
618 @two_phase
618 @two_phase
619 def has_properties(self, keys, targets='all'):
619 def has_properties(self, keys, targets='all'):
620 return self.multiengine.has_properties(keys, targets)
620 return self.multiengine.has_properties(keys, targets)
621
621
622 @two_phase
622 @two_phase
623 def del_properties(self, keys, targets='all'):
623 def del_properties(self, keys, targets='all'):
624 return self.multiengine.del_properties(keys, targets)
624 return self.multiengine.del_properties(keys, targets)
625
625
626 @two_phase
626 @two_phase
627 def clear_properties(self, targets='all'):
627 def clear_properties(self, targets='all'):
628 return self.multiengine.clear_properties(targets)
628 return self.multiengine.clear_properties(targets)
629
629
630 #---------------------------------------------------------------------------
630 #---------------------------------------------------------------------------
631 # IMultiEngine methods
631 # IMultiEngine methods
632 #---------------------------------------------------------------------------
632 #---------------------------------------------------------------------------
633
633
634 def get_ids(self):
634 def get_ids(self):
635 """Return a list of registered engine ids.
635 """Return a list of registered engine ids.
636
636
637 Never use the two phase block/non-block stuff for this.
637 Never use the two phase block/non-block stuff for this.
638 """
638 """
639 return self.multiengine.get_ids()
639 return self.multiengine.get_ids()
640
640
641
641
642 components.registerAdapter(SynchronousMultiEngine, IMultiEngine, ISynchronousMultiEngine)
642 components.registerAdapter(SynchronousMultiEngine, IMultiEngine, ISynchronousMultiEngine)
643
643
644
644
645 #-------------------------------------------------------------------------------
645 #-------------------------------------------------------------------------------
646 # Various high-level interfaces that can be used as MultiEngine mix-ins
646 # Various high-level interfaces that can be used as MultiEngine mix-ins
647 #-------------------------------------------------------------------------------
647 #-------------------------------------------------------------------------------
648
648
649 #-------------------------------------------------------------------------------
649 #-------------------------------------------------------------------------------
650 # IMultiEngineCoordinator
650 # IMultiEngineCoordinator
651 #-------------------------------------------------------------------------------
651 #-------------------------------------------------------------------------------
652
652
653 class IMultiEngineCoordinator(Interface):
653 class IMultiEngineCoordinator(Interface):
654 """Methods that work on multiple engines explicitly."""
654 """Methods that work on multiple engines explicitly."""
655
655
656 def scatter(key, seq, style='basic', flatten=False, targets='all'):
656 def scatter(key, seq, dist='b', flatten=False, targets='all'):
657 """Partition and distribute a sequence to targets.
657 """Partition and distribute a sequence to targets."""
658
658
659 :Parameters:
659 def gather(key, dist='b', targets='all'):
660 key : str
660 """Gather object key from targets."""
661 The variable name to call the scattered sequence.
662 seq : list, tuple, array
663 The sequence to scatter. The type should be preserved.
664 style : string
665 A specification of how the sequence is partitioned. Currently
666 only 'basic' is implemented.
667 flatten : boolean
668 Should single element sequences be converted to scalars.
669 """
670
671 def gather(key, style='basic', targets='all'):
672 """Gather object key from targets.
673
661
674 :Parameters:
662 def raw_map(func, seqs, dist='b', targets='all'):
675 key : string
676 The name of a sequence on the targets to gather.
677 style : string
678 A specification of how the sequence is partitioned. Currently
679 only 'basic' is implemented.
680 """
663 """
681
664 A parallelized version of Python's builtin `map` function.
682 def map(func, seq, style='basic', targets='all'):
683 """A parallelized version of Python's builtin map.
684
665
685 This function implements the following pattern:
666 This has a slightly different syntax than the builtin `map`.
667 This is needed because we need to have keyword arguments and thus
668 can't use *args to capture all the sequences. Instead, they must
669 be passed in a list or tuple.
686
670
687 1. The sequence seq is scattered to the given targets.
671 The equivalence is:
688 2. map(functionSource, seq) is called on each engine.
689 3. The resulting sequences are gathered back to the local machine.
690
691 :Parameters:
692 targets : int, list or 'all'
693 The engine ids the action will apply to. Call `get_ids` to see
694 a list of currently available engines.
695 func : str, function
696 An actual function object or a Python string that names a
697 callable defined on the engines.
698 seq : list, tuple or numpy array
699 The local sequence to be scattered.
700 style : str
701 Only 'basic' is supported for now.
702
703 :Returns: A list of len(seq) with functionSource called on each element
704 of seq.
705
706 Example
707 =======
708
672
709 >>> rc.mapAll('lambda x: x*x', range(10000))
673 raw_map(func, seqs) -> map(func, seqs[0], seqs[1], ...)
710 [0,2,4,9,25,36,...]
674
675 Most users will want to use parallel functions or the `mapper`
676 and `map` methods for an API that follows that of the builtin
677 `map`.
711 """
678 """
712
679
713
680
714 class ISynchronousMultiEngineCoordinator(IMultiEngineCoordinator):
681 class ISynchronousMultiEngineCoordinator(IMultiEngineCoordinator):
715 """Methods that work on multiple engines explicitly."""
682 """Methods that work on multiple engines explicitly."""
716 pass
683
684 def scatter(key, seq, dist='b', flatten=False, targets='all', block=True):
685 """Partition and distribute a sequence to targets."""
686
687 def gather(key, dist='b', targets='all', block=True):
688 """Gather object key from targets"""
689
690 def raw_map(func, seqs, dist='b', targets='all', block=True):
691 """
692 A parallelized version of Python's builtin map.
693
694 This has a slightly different syntax than the builtin `map`.
695 This is needed because we need to have keyword arguments and thus
696 can't use *args to capture all the sequences. Instead, they must
697 be passed in a list or tuple.
698
699 raw_map(func, seqs) -> map(func, seqs[0], seqs[1], ...)
700
701 Most users will want to use parallel functions or the `mapper`
702 and `map` methods for an API that follows that of the builtin
703 `map`.
704 """
717
705
718
706
719 #-------------------------------------------------------------------------------
707 #-------------------------------------------------------------------------------
720 # IMultiEngineExtras
708 # IMultiEngineExtras
721 #-------------------------------------------------------------------------------
709 #-------------------------------------------------------------------------------
722
710
723 class IMultiEngineExtras(Interface):
711 class IMultiEngineExtras(Interface):
724
712
725 def zip_pull(targets, *keys):
713 def zip_pull(targets, keys):
726 """Pull, but return results in a different format from `pull`.
714 """
715 Pull, but return results in a different format from `pull`.
727
716
728 This method basically returns zip(pull(targets, *keys)), with a few
717 This method basically returns zip(pull(targets, *keys)), with a few
729 edge cases handled differently. Users of chainsaw will find this format
718 edge cases handled differently. Users of chainsaw will find this format
730 familiar.
719 familiar.
731
732 :Parameters:
733 targets : int, list or 'all'
734 The engine ids the action will apply to. Call `get_ids` to see
735 a list of currently available engines.
736 keys: list or tuple of str
737 A list of variable names as string of the Python objects to be pulled
738 back to the client.
739
740 :Returns: A list of pulled Python objects for each target.
741 """
720 """
742
721
743 def run(targets, fname):
722 def run(targets, fname):
744 """Run a .py file on targets.
723 """Run a .py file on targets."""
745
746 :Parameters:
747 targets : int, list or 'all'
748 The engine ids the action will apply to. Call `get_ids` to see
749 a list of currently available engines.
750 fname : str
751 The filename of a .py file on the local system to be sent to and run
752 on the engines.
753 block : boolean
754 Should I block or not. If block=True, wait for the action to
755 complete and return the result. If block=False, return a
756 `PendingResult` object that can be used to later get the
757 result. If block is not specified, the block attribute
758 will be used instead.
759 """
760
724
761
725
762 class ISynchronousMultiEngineExtras(IMultiEngineExtras):
726 class ISynchronousMultiEngineExtras(IMultiEngineExtras):
763 pass
727 def zip_pull(targets, keys, block=True):
764
728 """
729 Pull, but return results in a different format from `pull`.
730
731 This method basically returns zip(pull(targets, *keys)), with a few
732 edge cases handled differently. Users of chainsaw will find this format
733 familiar.
734 """
735
736 def run(targets, fname, block=True):
737 """Run a .py file on targets."""
765
738
766 #-------------------------------------------------------------------------------
739 #-------------------------------------------------------------------------------
767 # The full MultiEngine interface
740 # The full MultiEngine interface
768 #-------------------------------------------------------------------------------
741 #-------------------------------------------------------------------------------
769
742
770 class IFullMultiEngine(IMultiEngine,
743 class IFullMultiEngine(IMultiEngine,
771 IMultiEngineCoordinator,
744 IMultiEngineCoordinator,
772 IMultiEngineExtras):
745 IMultiEngineExtras):
773 pass
746 pass
774
747
775
748
776 class IFullSynchronousMultiEngine(ISynchronousMultiEngine,
749 class IFullSynchronousMultiEngine(ISynchronousMultiEngine,
777 ISynchronousMultiEngineCoordinator,
750 ISynchronousMultiEngineCoordinator,
778 ISynchronousMultiEngineExtras):
751 ISynchronousMultiEngineExtras):
779 pass
752 pass
780
753
@@ -1,833 +1,896 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 # -*- test-case-name: IPython.kernel.test.test_multiengineclient -*-
2 # -*- test-case-name: IPython.kernel.test.test_multiengineclient -*-
3
3
4 """General Classes for IMultiEngine clients."""
4 """General Classes for IMultiEngine clients."""
5
5
6 __docformat__ = "restructuredtext en"
6 __docformat__ = "restructuredtext en"
7
7
8 #-------------------------------------------------------------------------------
8 #-------------------------------------------------------------------------------
9 # Copyright (C) 2008 The IPython Development Team
9 # Copyright (C) 2008 The IPython Development Team
10 #
10 #
11 # Distributed under the terms of the BSD License. The full license is in
11 # Distributed under the terms of the BSD License. The full license is in
12 # the file COPYING, distributed as part of this software.
12 # the file COPYING, distributed as part of this software.
13 #-------------------------------------------------------------------------------
13 #-------------------------------------------------------------------------------
14
14
15 #-------------------------------------------------------------------------------
15 #-------------------------------------------------------------------------------
16 # Imports
16 # Imports
17 #-------------------------------------------------------------------------------
17 #-------------------------------------------------------------------------------
18
18
19 import sys
19 import sys
20 import cPickle as pickle
20 import cPickle as pickle
21 from types import FunctionType
21 from types import FunctionType
22 import linecache
22 import linecache
23
23
24 from twisted.internet import reactor
24 from twisted.internet import reactor
25 from twisted.python import components, log
25 from twisted.python import components, log
26 from twisted.python.failure import Failure
26 from twisted.python.failure import Failure
27 from zope.interface import Interface, implements, Attribute
27 from zope.interface import Interface, implements, Attribute
28
28
29 from IPython.ColorANSI import TermColors
29 from IPython.ColorANSI import TermColors
30
30
31 from IPython.kernel.twistedutil import blockingCallFromThread
31 from IPython.kernel.twistedutil import blockingCallFromThread
32 from IPython.kernel import error
32 from IPython.kernel import error
33 from IPython.kernel.parallelfunction import ParallelFunction
33 from IPython.kernel.parallelfunction import ParallelFunction
34 from IPython.kernel.mapper import (
35 MultiEngineMapper,
36 IMultiEngineMapperFactory,
37 IMapper
38 )
34 from IPython.kernel import map as Map
39 from IPython.kernel import map as Map
35 from IPython.kernel import multiengine as me
40 from IPython.kernel import multiengine as me
36 from IPython.kernel.multiengine import (IFullMultiEngine,
41 from IPython.kernel.multiengine import (IFullMultiEngine,
37 IFullSynchronousMultiEngine)
42 IFullSynchronousMultiEngine)
38
43
39
44
40 #-------------------------------------------------------------------------------
45 #-------------------------------------------------------------------------------
41 # Pending Result things
46 # Pending Result things
42 #-------------------------------------------------------------------------------
47 #-------------------------------------------------------------------------------
43
48
44 class IPendingResult(Interface):
49 class IPendingResult(Interface):
45 """A representation of a result that is pending.
50 """A representation of a result that is pending.
46
51
47 This class is similar to Twisted's `Deferred` object, but is designed to be
52 This class is similar to Twisted's `Deferred` object, but is designed to be
48 used in a synchronous context.
53 used in a synchronous context.
49 """
54 """
50
55
51 result_id=Attribute("ID of the deferred on the other side")
56 result_id=Attribute("ID of the deferred on the other side")
52 client=Attribute("A client that I came from")
57 client=Attribute("A client that I came from")
53 r=Attribute("An attribute that is a property that calls and returns get_result")
58 r=Attribute("An attribute that is a property that calls and returns get_result")
54
59
55 def get_result(default=None, block=True):
60 def get_result(default=None, block=True):
56 """
61 """
57 Get a result that is pending.
62 Get a result that is pending.
58
63
59 :Parameters:
64 :Parameters:
60 default
65 default
61 The value to return if the result is not ready.
66 The value to return if the result is not ready.
62 block : boolean
67 block : boolean
63 Should I block for the result.
68 Should I block for the result.
64
69
65 :Returns: The actual result or the default value.
70 :Returns: The actual result or the default value.
66 """
71 """
67
72
68 def add_callback(f, *args, **kwargs):
73 def add_callback(f, *args, **kwargs):
69 """
74 """
70 Add a callback that is called with the result.
75 Add a callback that is called with the result.
71
76
72 If the original result is foo, adding a callback will cause
77 If the original result is foo, adding a callback will cause
73 f(foo, *args, **kwargs) to be returned instead. If multiple
78 f(foo, *args, **kwargs) to be returned instead. If multiple
74 callbacks are registered, they are chained together: the result of
79 callbacks are registered, they are chained together: the result of
75 one is passed to the next and so on.
80 one is passed to the next and so on.
76
81
77 Unlike Twisted's Deferred object, there is no errback chain. Thus
82 Unlike Twisted's Deferred object, there is no errback chain. Thus
78 any exception raised will not be caught and handled. User must
83 any exception raised will not be caught and handled. User must
79 catch these by hand when calling `get_result`.
84 catch these by hand when calling `get_result`.
80 """
85 """
81
86
82
87
83 class PendingResult(object):
88 class PendingResult(object):
84 """A representation of a result that is not yet ready.
89 """A representation of a result that is not yet ready.
85
90
86 A user should not create a `PendingResult` instance by hand.
91 A user should not create a `PendingResult` instance by hand.
87
92
88 Methods
93 Methods
89 =======
94 =======
90
95
91 * `get_result`
96 * `get_result`
92 * `add_callback`
97 * `add_callback`
93
98
94 Properties
99 Properties
95 ==========
100 ==========
96 * `r`
101 * `r`
97 """
102 """
98
103
99 def __init__(self, client, result_id):
104 def __init__(self, client, result_id):
100 """Create a PendingResult with a result_id and a client instance.
105 """Create a PendingResult with a result_id and a client instance.
101
106
102 The client should implement `_getPendingResult(result_id, block)`.
107 The client should implement `_getPendingResult(result_id, block)`.
103 """
108 """
104 self.client = client
109 self.client = client
105 self.result_id = result_id
110 self.result_id = result_id
106 self.called = False
111 self.called = False
107 self.raised = False
112 self.raised = False
108 self.callbacks = []
113 self.callbacks = []
109
114
110 def get_result(self, default=None, block=True):
115 def get_result(self, default=None, block=True):
111 """Get a result that is pending.
116 """Get a result that is pending.
112
117
113 This method will connect to an IMultiEngine adapted controller
118 This method will connect to an IMultiEngine adapted controller
114 and see if the result is ready. If the action triggers an exception
119 and see if the result is ready. If the action triggers an exception
115 raise it and record it. This method records the result/exception once it is
120 raise it and record it. This method records the result/exception once it is
116 retrieved. Calling `get_result` again will get this cached result or will
121 retrieved. Calling `get_result` again will get this cached result or will
117 re-raise the exception. The .r attribute is a property that calls
122 re-raise the exception. The .r attribute is a property that calls
118 `get_result` with block=True.
123 `get_result` with block=True.
119
124
120 :Parameters:
125 :Parameters:
121 default
126 default
122 The value to return if the result is not ready.
127 The value to return if the result is not ready.
123 block : boolean
128 block : boolean
124 Should I block for the result.
129 Should I block for the result.
125
130
126 :Returns: The actual result or the default value.
131 :Returns: The actual result or the default value.
127 """
132 """
128
133
129 if self.called:
134 if self.called:
130 if self.raised:
135 if self.raised:
131 raise self.result[0], self.result[1], self.result[2]
136 raise self.result[0], self.result[1], self.result[2]
132 else:
137 else:
133 return self.result
138 return self.result
134 try:
139 try:
135 result = self.client.get_pending_deferred(self.result_id, block)
140 result = self.client.get_pending_deferred(self.result_id, block)
136 except error.ResultNotCompleted:
141 except error.ResultNotCompleted:
137 return default
142 return default
138 except:
143 except:
139 # Reraise other error, but first record them so they can be reraised
144 # Reraise other error, but first record them so they can be reraised
140 # later if .r or get_result is called again.
145 # later if .r or get_result is called again.
141 self.result = sys.exc_info()
146 self.result = sys.exc_info()
142 self.called = True
147 self.called = True
143 self.raised = True
148 self.raised = True
144 raise
149 raise
145 else:
150 else:
146 for cb in self.callbacks:
151 for cb in self.callbacks:
147 result = cb[0](result, *cb[1], **cb[2])
152 result = cb[0](result, *cb[1], **cb[2])
148 self.result = result
153 self.result = result
149 self.called = True
154 self.called = True
150 return result
155 return result
151
156
152 def add_callback(self, f, *args, **kwargs):
157 def add_callback(self, f, *args, **kwargs):
153 """Add a callback that is called with the result.
158 """Add a callback that is called with the result.
154
159
155 If the original result is result, adding a callback will cause
160 If the original result is result, adding a callback will cause
156 f(result, *args, **kwargs) to be returned instead. If multiple
161 f(result, *args, **kwargs) to be returned instead. If multiple
157 callbacks are registered, they are chained together: the result of
162 callbacks are registered, they are chained together: the result of
158 one is passed to the next and so on.
163 one is passed to the next and so on.
159
164
160 Unlike Twisted's Deferred object, there is no errback chain. Thus
165 Unlike Twisted's Deferred object, there is no errback chain. Thus
161 any exception raised will not be caught and handled. User must
166 any exception raised will not be caught and handled. User must
162 catch these by hand when calling `get_result`.
167 catch these by hand when calling `get_result`.
163 """
168 """
164 assert callable(f)
169 assert callable(f)
165 self.callbacks.append((f, args, kwargs))
170 self.callbacks.append((f, args, kwargs))
166
171
167 def __cmp__(self, other):
172 def __cmp__(self, other):
168 if self.result_id < other.result_id:
173 if self.result_id < other.result_id:
169 return -1
174 return -1
170 else:
175 else:
171 return 1
176 return 1
172
177
173 def _get_r(self):
178 def _get_r(self):
174 return self.get_result(block=True)
179 return self.get_result(block=True)
175
180
176 r = property(_get_r)
181 r = property(_get_r)
177 """This property is a shortcut to a `get_result(block=True)`."""
182 """This property is a shortcut to a `get_result(block=True)`."""
178
183
179
184
180 #-------------------------------------------------------------------------------
185 #-------------------------------------------------------------------------------
181 # Pretty printing wrappers for certain lists
186 # Pretty printing wrappers for certain lists
182 #-------------------------------------------------------------------------------
187 #-------------------------------------------------------------------------------
183
188
184 class ResultList(list):
189 class ResultList(list):
185 """A subclass of list that pretty prints the output of `execute`/`get_result`."""
190 """A subclass of list that pretty prints the output of `execute`/`get_result`."""
186
191
187 def __repr__(self):
192 def __repr__(self):
188 output = []
193 output = []
189 blue = TermColors.Blue
194 # These colored prompts were not working on Windows
190 normal = TermColors.Normal
195 if sys.platform == 'win32':
191 red = TermColors.Red
196 blue = normal = red = green = ''
192 green = TermColors.Green
197 else:
198 blue = TermColors.Blue
199 normal = TermColors.Normal
200 red = TermColors.Red
201 green = TermColors.Green
193 output.append("<Results List>\n")
202 output.append("<Results List>\n")
194 for cmd in self:
203 for cmd in self:
195 if isinstance(cmd, Failure):
204 if isinstance(cmd, Failure):
196 output.append(cmd)
205 output.append(cmd)
197 else:
206 else:
198 target = cmd.get('id',None)
207 target = cmd.get('id',None)
199 cmd_num = cmd.get('number',None)
208 cmd_num = cmd.get('number',None)
200 cmd_stdin = cmd.get('input',{}).get('translated','No Input')
209 cmd_stdin = cmd.get('input',{}).get('translated','No Input')
201 cmd_stdout = cmd.get('stdout', None)
210 cmd_stdout = cmd.get('stdout', None)
202 cmd_stderr = cmd.get('stderr', None)
211 cmd_stderr = cmd.get('stderr', None)
203 output.append("%s[%i]%s In [%i]:%s %s\n" % \
212 output.append("%s[%i]%s In [%i]:%s %s\n" % \
204 (green, target,
213 (green, target,
205 blue, cmd_num, normal, cmd_stdin))
214 blue, cmd_num, normal, cmd_stdin))
206 if cmd_stdout:
215 if cmd_stdout:
207 output.append("%s[%i]%s Out[%i]:%s %s\n" % \
216 output.append("%s[%i]%s Out[%i]:%s %s\n" % \
208 (green, target,
217 (green, target,
209 red, cmd_num, normal, cmd_stdout))
218 red, cmd_num, normal, cmd_stdout))
210 if cmd_stderr:
219 if cmd_stderr:
211 output.append("%s[%i]%s Err[%i]:\n%s %s" % \
220 output.append("%s[%i]%s Err[%i]:\n%s %s" % \
212 (green, target,
221 (green, target,
213 red, cmd_num, normal, cmd_stderr))
222 red, cmd_num, normal, cmd_stderr))
214 return ''.join(output)
223 return ''.join(output)
215
224
216
225
217 def wrapResultList(result):
226 def wrapResultList(result):
218 """A function that wraps the output of `execute`/`get_result` -> `ResultList`."""
227 """A function that wraps the output of `execute`/`get_result` -> `ResultList`."""
219 if len(result) == 0:
228 if len(result) == 0:
220 result = [result]
229 result = [result]
221 return ResultList(result)
230 return ResultList(result)
222
231
223
232
224 class QueueStatusList(list):
233 class QueueStatusList(list):
225 """A subclass of list that pretty prints the output of `queue_status`."""
234 """A subclass of list that pretty prints the output of `queue_status`."""
226
235
227 def __repr__(self):
236 def __repr__(self):
228 output = []
237 output = []
229 output.append("<Queue Status List>\n")
238 output.append("<Queue Status List>\n")
230 for e in self:
239 for e in self:
231 output.append("Engine: %s\n" % repr(e[0]))
240 output.append("Engine: %s\n" % repr(e[0]))
232 output.append(" Pending: %s\n" % repr(e[1]['pending']))
241 output.append(" Pending: %s\n" % repr(e[1]['pending']))
233 for q in e[1]['queue']:
242 for q in e[1]['queue']:
234 output.append(" Command: %s\n" % repr(q))
243 output.append(" Command: %s\n" % repr(q))
235 return ''.join(output)
244 return ''.join(output)
236
245
237
246
238 #-------------------------------------------------------------------------------
247 #-------------------------------------------------------------------------------
239 # InteractiveMultiEngineClient
248 # InteractiveMultiEngineClient
240 #-------------------------------------------------------------------------------
249 #-------------------------------------------------------------------------------
241
250
242 class InteractiveMultiEngineClient(object):
251 class InteractiveMultiEngineClient(object):
243 """A mixin class that add a few methods to a multiengine client.
252 """A mixin class that add a few methods to a multiengine client.
244
253
245 The methods in this mixin class are designed for interactive usage.
254 The methods in this mixin class are designed for interactive usage.
246 """
255 """
247
256
248 def activate(self):
257 def activate(self):
249 """Make this `MultiEngineClient` active for parallel magic commands.
258 """Make this `MultiEngineClient` active for parallel magic commands.
250
259
251 IPython has a magic command syntax to work with `MultiEngineClient` objects.
260 IPython has a magic command syntax to work with `MultiEngineClient` objects.
252 In a given IPython session there is a single active one. While
261 In a given IPython session there is a single active one. While
253 there can be many `MultiEngineClient` created and used by the user,
262 there can be many `MultiEngineClient` created and used by the user,
254 there is only one active one. The active `MultiEngineClient` is used whenever
263 there is only one active one. The active `MultiEngineClient` is used whenever
255 the magic commands %px and %autopx are used.
264 the magic commands %px and %autopx are used.
256
265
257 The activate() method is called on a given `MultiEngineClient` to make it
266 The activate() method is called on a given `MultiEngineClient` to make it
258 active. Once this has been done, the magic commands can be used.
267 active. Once this has been done, the magic commands can be used.
259 """
268 """
260
269
261 try:
270 try:
262 __IPYTHON__.activeController = self
271 __IPYTHON__.activeController = self
263 except NameError:
272 except NameError:
264 print "The IPython Controller magics only work within IPython."
273 print "The IPython Controller magics only work within IPython."
265
274
266 def __setitem__(self, key, value):
275 def __setitem__(self, key, value):
267 """Add a dictionary interface for pushing/pulling.
276 """Add a dictionary interface for pushing/pulling.
268
277
269 This functions as a shorthand for `push`.
278 This functions as a shorthand for `push`.
270
279
271 :Parameters:
280 :Parameters:
272 key : str
281 key : str
273 What to call the remote object.
282 What to call the remote object.
274 value : object
283 value : object
275 The local Python object to push.
284 The local Python object to push.
276 """
285 """
277 targets, block = self._findTargetsAndBlock()
286 targets, block = self._findTargetsAndBlock()
278 return self.push({key:value}, targets=targets, block=block)
287 return self.push({key:value}, targets=targets, block=block)
279
288
280 def __getitem__(self, key):
289 def __getitem__(self, key):
281 """Add a dictionary interface for pushing/pulling.
290 """Add a dictionary interface for pushing/pulling.
282
291
283 This functions as a shorthand to `pull`.
292 This functions as a shorthand to `pull`.
284
293
285 :Parameters:
294 :Parameters:
286 - `key`: A string representing the key.
295 - `key`: A string representing the key.
287 """
296 """
288 if isinstance(key, str):
297 if isinstance(key, str):
289 targets, block = self._findTargetsAndBlock()
298 targets, block = self._findTargetsAndBlock()
290 return self.pull(key, targets=targets, block=block)
299 return self.pull(key, targets=targets, block=block)
291 else:
300 else:
292 raise TypeError("__getitem__ only takes strs")
301 raise TypeError("__getitem__ only takes strs")
293
302
294 def __len__(self):
303 def __len__(self):
295 """Return the number of available engines."""
304 """Return the number of available engines."""
296 return len(self.get_ids())
305 return len(self.get_ids())
297
306
298 def parallelize(self, func, targets=None, block=None):
299 """Build a `ParallelFunction` object for functionName on engines.
300
301 The returned object will implement a parallel version of functionName
302 that takes a local sequence as its only argument and calls (in
303 parallel) functionName on each element of that sequence. The
304 `ParallelFunction` object has a `targets` attribute that controls
305 which engines the function is run on.
306
307 :Parameters:
308 targets : int, list or 'all'
309 The engine ids the action will apply to. Call `get_ids` to see
310 a list of currently available engines.
311 functionName : str
312 A Python string that names a callable defined on the engines.
313
314 :Returns: A `ParallelFunction` object.
315 """
316 targets, block = self._findTargetsAndBlock(targets, block)
317 return ParallelFunction(func, self, targets, block)
318
319 #---------------------------------------------------------------------------
307 #---------------------------------------------------------------------------
320 # Make this a context manager for with
308 # Make this a context manager for with
321 #---------------------------------------------------------------------------
309 #---------------------------------------------------------------------------
322
310
323 def findsource_file(self,f):
311 def findsource_file(self,f):
324 linecache.checkcache()
312 linecache.checkcache()
325 s = findsource(f.f_code)
313 s = findsource(f.f_code)
326 lnum = f.f_lineno
314 lnum = f.f_lineno
327 wsource = s[0][f.f_lineno:]
315 wsource = s[0][f.f_lineno:]
328 return strip_whitespace(wsource)
316 return strip_whitespace(wsource)
329
317
330 def findsource_ipython(self,f):
318 def findsource_ipython(self,f):
331 from IPython import ipapi
319 from IPython import ipapi
332 self.ip = ipapi.get()
320 self.ip = ipapi.get()
333 wsource = [l+'\n' for l in
321 wsource = [l+'\n' for l in
334 self.ip.IP.input_hist_raw[-1].splitlines()[1:]]
322 self.ip.IP.input_hist_raw[-1].splitlines()[1:]]
335 return strip_whitespace(wsource)
323 return strip_whitespace(wsource)
336
324
337 def __enter__(self):
325 def __enter__(self):
338 f = sys._getframe(1)
326 f = sys._getframe(1)
339 local_ns = f.f_locals
327 local_ns = f.f_locals
340 global_ns = f.f_globals
328 global_ns = f.f_globals
341 if f.f_code.co_filename == '<ipython console>':
329 if f.f_code.co_filename == '<ipython console>':
342 s = self.findsource_ipython(f)
330 s = self.findsource_ipython(f)
343 else:
331 else:
344 s = self.findsource_file(f)
332 s = self.findsource_file(f)
345
333
346 self._with_context_result = self.execute(s)
334 self._with_context_result = self.execute(s)
347
335
348 def __exit__ (self, etype, value, tb):
336 def __exit__ (self, etype, value, tb):
349 if issubclass(etype,error.StopLocalExecution):
337 if issubclass(etype,error.StopLocalExecution):
350 return True
338 return True
351
339
352
340
353 def remote():
341 def remote():
354 m = 'Special exception to stop local execution of parallel code.'
342 m = 'Special exception to stop local execution of parallel code.'
355 raise error.StopLocalExecution(m)
343 raise error.StopLocalExecution(m)
356
344
357 def strip_whitespace(source):
345 def strip_whitespace(source):
358 # Expand tabs to avoid any confusion.
346 # Expand tabs to avoid any confusion.
359 wsource = [l.expandtabs(4) for l in source]
347 wsource = [l.expandtabs(4) for l in source]
360 # Detect the indentation level
348 # Detect the indentation level
361 done = False
349 done = False
362 for line in wsource:
350 for line in wsource:
363 if line.isspace():
351 if line.isspace():
364 continue
352 continue
365 for col,char in enumerate(line):
353 for col,char in enumerate(line):
366 if char != ' ':
354 if char != ' ':
367 done = True
355 done = True
368 break
356 break
369 if done:
357 if done:
370 break
358 break
371 # Now we know how much leading space there is in the code. Next, we
359 # Now we know how much leading space there is in the code. Next, we
372 # extract up to the first line that has less indentation.
360 # extract up to the first line that has less indentation.
373 # WARNINGS: we skip comments that may be misindented, but we do NOT yet
361 # WARNINGS: we skip comments that may be misindented, but we do NOT yet
374 # detect triple quoted strings that may have flush left text.
362 # detect triple quoted strings that may have flush left text.
375 for lno,line in enumerate(wsource):
363 for lno,line in enumerate(wsource):
376 lead = line[:col]
364 lead = line[:col]
377 if lead.isspace():
365 if lead.isspace():
378 continue
366 continue
379 else:
367 else:
380 if not lead.lstrip().startswith('#'):
368 if not lead.lstrip().startswith('#'):
381 break
369 break
382 # The real 'with' source is up to lno
370 # The real 'with' source is up to lno
383 src_lines = [l[col:] for l in wsource[:lno+1]]
371 src_lines = [l[col:] for l in wsource[:lno+1]]
384
372
385 # Finally, check that the source's first non-comment line begins with the
373 # Finally, check that the source's first non-comment line begins with the
386 # special call 'remote()'
374 # special call 'remote()'
387 for nline,line in enumerate(src_lines):
375 for nline,line in enumerate(src_lines):
388 if line.isspace() or line.startswith('#'):
376 if line.isspace() or line.startswith('#'):
389 continue
377 continue
390 if 'remote()' in line:
378 if 'remote()' in line:
391 break
379 break
392 else:
380 else:
393 raise ValueError('remote() call missing at the start of code')
381 raise ValueError('remote() call missing at the start of code')
394 src = ''.join(src_lines[nline+1:])
382 src = ''.join(src_lines[nline+1:])
395 #print 'SRC:\n<<<<<<<>>>>>>>\n%s<<<<<>>>>>>' % src # dbg
383 #print 'SRC:\n<<<<<<<>>>>>>>\n%s<<<<<>>>>>>' % src # dbg
396 return src
384 return src
397
385
398
386
399 #-------------------------------------------------------------------------------
387 #-------------------------------------------------------------------------------
400 # The top-level MultiEngine client adaptor
388 # The top-level MultiEngine client adaptor
401 #-------------------------------------------------------------------------------
389 #-------------------------------------------------------------------------------
402
390
403
391
404 class IFullBlockingMultiEngineClient(Interface):
392 class IFullBlockingMultiEngineClient(Interface):
405 pass
393 pass
406
394
407
395
408 class FullBlockingMultiEngineClient(InteractiveMultiEngineClient):
396 class FullBlockingMultiEngineClient(InteractiveMultiEngineClient):
409 """
397 """
410 A blocking client to the `IMultiEngine` controller interface.
398 A blocking client to the `IMultiEngine` controller interface.
411
399
412 This class allows users to use a set of engines for a parallel
400 This class allows users to use a set of engines for a parallel
413 computation through the `IMultiEngine` interface. In this interface,
401 computation through the `IMultiEngine` interface. In this interface,
414 each engine has a specific id (an int) that is used to refer to the
402 each engine has a specific id (an int) that is used to refer to the
415 engine, run code on it, etc.
403 engine, run code on it, etc.
416 """
404 """
417
405
418 implements(IFullBlockingMultiEngineClient)
406 implements(
407 IFullBlockingMultiEngineClient,
408 IMultiEngineMapperFactory,
409 IMapper
410 )
419
411
420 def __init__(self, smultiengine):
412 def __init__(self, smultiengine):
421 self.smultiengine = smultiengine
413 self.smultiengine = smultiengine
422 self.block = True
414 self.block = True
423 self.targets = 'all'
415 self.targets = 'all'
424
416
425 def _findBlock(self, block=None):
417 def _findBlock(self, block=None):
426 if block is None:
418 if block is None:
427 return self.block
419 return self.block
428 else:
420 else:
429 if block in (True, False):
421 if block in (True, False):
430 return block
422 return block
431 else:
423 else:
432 raise ValueError("block must be True or False")
424 raise ValueError("block must be True or False")
433
425
434 def _findTargets(self, targets=None):
426 def _findTargets(self, targets=None):
435 if targets is None:
427 if targets is None:
436 return self.targets
428 return self.targets
437 else:
429 else:
438 if not isinstance(targets, (str,list,tuple,int)):
430 if not isinstance(targets, (str,list,tuple,int)):
439 raise ValueError("targets must be a str, list, tuple or int")
431 raise ValueError("targets must be a str, list, tuple or int")
440 return targets
432 return targets
441
433
442 def _findTargetsAndBlock(self, targets=None, block=None):
434 def _findTargetsAndBlock(self, targets=None, block=None):
443 return self._findTargets(targets), self._findBlock(block)
435 return self._findTargets(targets), self._findBlock(block)
444
436
445 def _blockFromThread(self, function, *args, **kwargs):
437 def _blockFromThread(self, function, *args, **kwargs):
446 block = kwargs.get('block', None)
438 block = kwargs.get('block', None)
447 if block is None:
439 if block is None:
448 raise error.MissingBlockArgument("'block' keyword argument is missing")
440 raise error.MissingBlockArgument("'block' keyword argument is missing")
449 result = blockingCallFromThread(function, *args, **kwargs)
441 result = blockingCallFromThread(function, *args, **kwargs)
450 if not block:
442 if not block:
451 result = PendingResult(self, result)
443 result = PendingResult(self, result)
452 return result
444 return result
453
445
454 def get_pending_deferred(self, deferredID, block):
446 def get_pending_deferred(self, deferredID, block):
455 return blockingCallFromThread(self.smultiengine.get_pending_deferred, deferredID, block)
447 return blockingCallFromThread(self.smultiengine.get_pending_deferred, deferredID, block)
456
448
457 def barrier(self, pendingResults):
449 def barrier(self, pendingResults):
458 """Synchronize a set of `PendingResults`.
450 """Synchronize a set of `PendingResults`.
459
451
460 This method is a synchronization primitive that waits for a set of
452 This method is a synchronization primitive that waits for a set of
461 `PendingResult` objects to complete. More specifically, barier does
453 `PendingResult` objects to complete. More specifically, barier does
462 the following.
454 the following.
463
455
464 * The `PendingResult`s are sorted by result_id.
456 * The `PendingResult`s are sorted by result_id.
465 * The `get_result` method is called for each `PendingResult` sequentially
457 * The `get_result` method is called for each `PendingResult` sequentially
466 with block=True.
458 with block=True.
467 * If a `PendingResult` gets a result that is an exception, it is
459 * If a `PendingResult` gets a result that is an exception, it is
468 trapped and can be re-raised later by calling `get_result` again.
460 trapped and can be re-raised later by calling `get_result` again.
469 * The `PendingResult`s are flushed from the controller.
461 * The `PendingResult`s are flushed from the controller.
470
462
471 After barrier has been called on a `PendingResult`, its results can
463 After barrier has been called on a `PendingResult`, its results can
472 be retrieved by calling `get_result` again or accesing the `r` attribute
464 be retrieved by calling `get_result` again or accesing the `r` attribute
473 of the instance.
465 of the instance.
474 """
466 """
475
467
476 # Convert to list for sorting and check class type
468 # Convert to list for sorting and check class type
477 prList = list(pendingResults)
469 prList = list(pendingResults)
478 for pr in prList:
470 for pr in prList:
479 if not isinstance(pr, PendingResult):
471 if not isinstance(pr, PendingResult):
480 raise error.NotAPendingResult("Objects passed to barrier must be PendingResult instances")
472 raise error.NotAPendingResult("Objects passed to barrier must be PendingResult instances")
481
473
482 # Sort the PendingResults so they are in order
474 # Sort the PendingResults so they are in order
483 prList.sort()
475 prList.sort()
484 # Block on each PendingResult object
476 # Block on each PendingResult object
485 for pr in prList:
477 for pr in prList:
486 try:
478 try:
487 result = pr.get_result(block=True)
479 result = pr.get_result(block=True)
488 except Exception:
480 except Exception:
489 pass
481 pass
490
482
491 def flush(self):
483 def flush(self):
492 """
484 """
493 Clear all pending deferreds/results from the controller.
485 Clear all pending deferreds/results from the controller.
494
486
495 For each `PendingResult` that is created by this client, the controller
487 For each `PendingResult` that is created by this client, the controller
496 holds on to the result for that `PendingResult`. This can be a problem
488 holds on to the result for that `PendingResult`. This can be a problem
497 if there are a large number of `PendingResult` objects that are created.
489 if there are a large number of `PendingResult` objects that are created.
498
490
499 Once the result of the `PendingResult` has been retrieved, the result
491 Once the result of the `PendingResult` has been retrieved, the result
500 is removed from the controller, but if a user doesn't get a result (
492 is removed from the controller, but if a user doesn't get a result (
501 they just ignore the `PendingResult`) the result is kept forever on the
493 they just ignore the `PendingResult`) the result is kept forever on the
502 controller. This method allows the user to clear out all un-retrieved
494 controller. This method allows the user to clear out all un-retrieved
503 results on the controller.
495 results on the controller.
504 """
496 """
505 r = blockingCallFromThread(self.smultiengine.clear_pending_deferreds)
497 r = blockingCallFromThread(self.smultiengine.clear_pending_deferreds)
506 return r
498 return r
507
499
508 clear_pending_results = flush
500 clear_pending_results = flush
509
501
510 #---------------------------------------------------------------------------
502 #---------------------------------------------------------------------------
511 # IEngineMultiplexer related methods
503 # IEngineMultiplexer related methods
512 #---------------------------------------------------------------------------
504 #---------------------------------------------------------------------------
513
505
514 def execute(self, lines, targets=None, block=None):
506 def execute(self, lines, targets=None, block=None):
515 """
507 """
516 Execute code on a set of engines.
508 Execute code on a set of engines.
517
509
518 :Parameters:
510 :Parameters:
519 lines : str
511 lines : str
520 The Python code to execute as a string
512 The Python code to execute as a string
521 targets : id or list of ids
513 targets : id or list of ids
522 The engine to use for the execution
514 The engine to use for the execution
523 block : boolean
515 block : boolean
524 If False, this method will return the actual result. If False,
516 If False, this method will return the actual result. If False,
525 a `PendingResult` is returned which can be used to get the result
517 a `PendingResult` is returned which can be used to get the result
526 at a later time.
518 at a later time.
527 """
519 """
528 targets, block = self._findTargetsAndBlock(targets, block)
520 targets, block = self._findTargetsAndBlock(targets, block)
529 result = blockingCallFromThread(self.smultiengine.execute, lines,
521 result = blockingCallFromThread(self.smultiengine.execute, lines,
530 targets=targets, block=block)
522 targets=targets, block=block)
531 if block:
523 if block:
532 result = ResultList(result)
524 result = ResultList(result)
533 else:
525 else:
534 result = PendingResult(self, result)
526 result = PendingResult(self, result)
535 result.add_callback(wrapResultList)
527 result.add_callback(wrapResultList)
536 return result
528 return result
537
529
538 def push(self, namespace, targets=None, block=None):
530 def push(self, namespace, targets=None, block=None):
539 """
531 """
540 Push a dictionary of keys and values to engines namespace.
532 Push a dictionary of keys and values to engines namespace.
541
533
542 Each engine has a persistent namespace. This method is used to push
534 Each engine has a persistent namespace. This method is used to push
543 Python objects into that namespace.
535 Python objects into that namespace.
544
536
545 The objects in the namespace must be pickleable.
537 The objects in the namespace must be pickleable.
546
538
547 :Parameters:
539 :Parameters:
548 namespace : dict
540 namespace : dict
549 A dict that contains Python objects to be injected into
541 A dict that contains Python objects to be injected into
550 the engine persistent namespace.
542 the engine persistent namespace.
551 targets : id or list of ids
543 targets : id or list of ids
552 The engine to use for the execution
544 The engine to use for the execution
553 block : boolean
545 block : boolean
554 If False, this method will return the actual result. If False,
546 If False, this method will return the actual result. If False,
555 a `PendingResult` is returned which can be used to get the result
547 a `PendingResult` is returned which can be used to get the result
556 at a later time.
548 at a later time.
557 """
549 """
558 targets, block = self._findTargetsAndBlock(targets, block)
550 targets, block = self._findTargetsAndBlock(targets, block)
559 return self._blockFromThread(self.smultiengine.push, namespace,
551 return self._blockFromThread(self.smultiengine.push, namespace,
560 targets=targets, block=block)
552 targets=targets, block=block)
561
553
562 def pull(self, keys, targets=None, block=None):
554 def pull(self, keys, targets=None, block=None):
563 """
555 """
564 Pull Python objects by key out of engines namespaces.
556 Pull Python objects by key out of engines namespaces.
565
557
566 :Parameters:
558 :Parameters:
567 keys : str or list of str
559 keys : str or list of str
568 The names of the variables to be pulled
560 The names of the variables to be pulled
569 targets : id or list of ids
561 targets : id or list of ids
570 The engine to use for the execution
562 The engine to use for the execution
571 block : boolean
563 block : boolean
572 If False, this method will return the actual result. If False,
564 If False, this method will return the actual result. If False,
573 a `PendingResult` is returned which can be used to get the result
565 a `PendingResult` is returned which can be used to get the result
574 at a later time.
566 at a later time.
575 """
567 """
576 targets, block = self._findTargetsAndBlock(targets, block)
568 targets, block = self._findTargetsAndBlock(targets, block)
577 return self._blockFromThread(self.smultiengine.pull, keys, targets=targets, block=block)
569 return self._blockFromThread(self.smultiengine.pull, keys, targets=targets, block=block)
578
570
579 def push_function(self, namespace, targets=None, block=None):
571 def push_function(self, namespace, targets=None, block=None):
580 """
572 """
581 Push a Python function to an engine.
573 Push a Python function to an engine.
582
574
583 This method is used to push a Python function to an engine. This
575 This method is used to push a Python function to an engine. This
584 method can then be used in code on the engines. Closures are not supported.
576 method can then be used in code on the engines. Closures are not supported.
585
577
586 :Parameters:
578 :Parameters:
587 namespace : dict
579 namespace : dict
588 A dict whose values are the functions to be pushed. The keys give
580 A dict whose values are the functions to be pushed. The keys give
589 that names that the function will appear as in the engines
581 that names that the function will appear as in the engines
590 namespace.
582 namespace.
591 targets : id or list of ids
583 targets : id or list of ids
592 The engine to use for the execution
584 The engine to use for the execution
593 block : boolean
585 block : boolean
594 If False, this method will return the actual result. If False,
586 If False, this method will return the actual result. If False,
595 a `PendingResult` is returned which can be used to get the result
587 a `PendingResult` is returned which can be used to get the result
596 at a later time.
588 at a later time.
597 """
589 """
598 targets, block = self._findTargetsAndBlock(targets, block)
590 targets, block = self._findTargetsAndBlock(targets, block)
599 return self._blockFromThread(self.smultiengine.push_function, namespace, targets=targets, block=block)
591 return self._blockFromThread(self.smultiengine.push_function, namespace, targets=targets, block=block)
600
592
601 def pull_function(self, keys, targets=None, block=None):
593 def pull_function(self, keys, targets=None, block=None):
602 """
594 """
603 Pull a Python function from an engine.
595 Pull a Python function from an engine.
604
596
605 This method is used to pull a Python function from an engine.
597 This method is used to pull a Python function from an engine.
606 Closures are not supported.
598 Closures are not supported.
607
599
608 :Parameters:
600 :Parameters:
609 keys : str or list of str
601 keys : str or list of str
610 The names of the functions to be pulled
602 The names of the functions to be pulled
611 targets : id or list of ids
603 targets : id or list of ids
612 The engine to use for the execution
604 The engine to use for the execution
613 block : boolean
605 block : boolean
614 If False, this method will return the actual result. If False,
606 If False, this method will return the actual result. If False,
615 a `PendingResult` is returned which can be used to get the result
607 a `PendingResult` is returned which can be used to get the result
616 at a later time.
608 at a later time.
617 """
609 """
618 targets, block = self._findTargetsAndBlock(targets, block)
610 targets, block = self._findTargetsAndBlock(targets, block)
619 return self._blockFromThread(self.smultiengine.pull_function, keys, targets=targets, block=block)
611 return self._blockFromThread(self.smultiengine.pull_function, keys, targets=targets, block=block)
620
612
621 def push_serialized(self, namespace, targets=None, block=None):
613 def push_serialized(self, namespace, targets=None, block=None):
622 targets, block = self._findTargetsAndBlock(targets, block)
614 targets, block = self._findTargetsAndBlock(targets, block)
623 return self._blockFromThread(self.smultiengine.push_serialized, namespace, targets=targets, block=block)
615 return self._blockFromThread(self.smultiengine.push_serialized, namespace, targets=targets, block=block)
624
616
625 def pull_serialized(self, keys, targets=None, block=None):
617 def pull_serialized(self, keys, targets=None, block=None):
626 targets, block = self._findTargetsAndBlock(targets, block)
618 targets, block = self._findTargetsAndBlock(targets, block)
627 return self._blockFromThread(self.smultiengine.pull_serialized, keys, targets=targets, block=block)
619 return self._blockFromThread(self.smultiengine.pull_serialized, keys, targets=targets, block=block)
628
620
629 def get_result(self, i=None, targets=None, block=None):
621 def get_result(self, i=None, targets=None, block=None):
630 """
622 """
631 Get a previous result.
623 Get a previous result.
632
624
633 When code is executed in an engine, a dict is created and returned. This
625 When code is executed in an engine, a dict is created and returned. This
634 method retrieves that dict for previous commands.
626 method retrieves that dict for previous commands.
635
627
636 :Parameters:
628 :Parameters:
637 i : int
629 i : int
638 The number of the result to get
630 The number of the result to get
639 targets : id or list of ids
631 targets : id or list of ids
640 The engine to use for the execution
632 The engine to use for the execution
641 block : boolean
633 block : boolean
642 If False, this method will return the actual result. If False,
634 If False, this method will return the actual result. If False,
643 a `PendingResult` is returned which can be used to get the result
635 a `PendingResult` is returned which can be used to get the result
644 at a later time.
636 at a later time.
645 """
637 """
646 targets, block = self._findTargetsAndBlock(targets, block)
638 targets, block = self._findTargetsAndBlock(targets, block)
647 result = blockingCallFromThread(self.smultiengine.get_result, i, targets=targets, block=block)
639 result = blockingCallFromThread(self.smultiengine.get_result, i, targets=targets, block=block)
648 if block:
640 if block:
649 result = ResultList(result)
641 result = ResultList(result)
650 else:
642 else:
651 result = PendingResult(self, result)
643 result = PendingResult(self, result)
652 result.add_callback(wrapResultList)
644 result.add_callback(wrapResultList)
653 return result
645 return result
654
646
655 def reset(self, targets=None, block=None):
647 def reset(self, targets=None, block=None):
656 """
648 """
657 Reset an engine.
649 Reset an engine.
658
650
659 This method clears out the namespace of an engine.
651 This method clears out the namespace of an engine.
660
652
661 :Parameters:
653 :Parameters:
662 targets : id or list of ids
654 targets : id or list of ids
663 The engine to use for the execution
655 The engine to use for the execution
664 block : boolean
656 block : boolean
665 If False, this method will return the actual result. If False,
657 If False, this method will return the actual result. If False,
666 a `PendingResult` is returned which can be used to get the result
658 a `PendingResult` is returned which can be used to get the result
667 at a later time.
659 at a later time.
668 """
660 """
669 targets, block = self._findTargetsAndBlock(targets, block)
661 targets, block = self._findTargetsAndBlock(targets, block)
670 return self._blockFromThread(self.smultiengine.reset, targets=targets, block=block)
662 return self._blockFromThread(self.smultiengine.reset, targets=targets, block=block)
671
663
672 def keys(self, targets=None, block=None):
664 def keys(self, targets=None, block=None):
673 """
665 """
674 Get a list of all the variables in an engine's namespace.
666 Get a list of all the variables in an engine's namespace.
675
667
676 :Parameters:
668 :Parameters:
677 targets : id or list of ids
669 targets : id or list of ids
678 The engine to use for the execution
670 The engine to use for the execution
679 block : boolean
671 block : boolean
680 If False, this method will return the actual result. If False,
672 If False, this method will return the actual result. If False,
681 a `PendingResult` is returned which can be used to get the result
673 a `PendingResult` is returned which can be used to get the result
682 at a later time.
674 at a later time.
683 """
675 """
684 targets, block = self._findTargetsAndBlock(targets, block)
676 targets, block = self._findTargetsAndBlock(targets, block)
685 return self._blockFromThread(self.smultiengine.keys, targets=targets, block=block)
677 return self._blockFromThread(self.smultiengine.keys, targets=targets, block=block)
686
678
687 def kill(self, controller=False, targets=None, block=None):
679 def kill(self, controller=False, targets=None, block=None):
688 """
680 """
689 Kill the engines and controller.
681 Kill the engines and controller.
690
682
691 This method is used to stop the engine and controller by calling
683 This method is used to stop the engine and controller by calling
692 `reactor.stop`.
684 `reactor.stop`.
693
685
694 :Parameters:
686 :Parameters:
695 controller : boolean
687 controller : boolean
696 If True, kill the engines and controller. If False, just the
688 If True, kill the engines and controller. If False, just the
697 engines
689 engines
698 targets : id or list of ids
690 targets : id or list of ids
699 The engine to use for the execution
691 The engine to use for the execution
700 block : boolean
692 block : boolean
701 If False, this method will return the actual result. If False,
693 If False, this method will return the actual result. If False,
702 a `PendingResult` is returned which can be used to get the result
694 a `PendingResult` is returned which can be used to get the result
703 at a later time.
695 at a later time.
704 """
696 """
705 targets, block = self._findTargetsAndBlock(targets, block)
697 targets, block = self._findTargetsAndBlock(targets, block)
706 return self._blockFromThread(self.smultiengine.kill, controller, targets=targets, block=block)
698 return self._blockFromThread(self.smultiengine.kill, controller, targets=targets, block=block)
707
699
708 def clear_queue(self, targets=None, block=None):
700 def clear_queue(self, targets=None, block=None):
709 """
701 """
710 Clear out the controller's queue for an engine.
702 Clear out the controller's queue for an engine.
711
703
712 The controller maintains a queue for each engine. This clear it out.
704 The controller maintains a queue for each engine. This clear it out.
713
705
714 :Parameters:
706 :Parameters:
715 targets : id or list of ids
707 targets : id or list of ids
716 The engine to use for the execution
708 The engine to use for the execution
717 block : boolean
709 block : boolean
718 If False, this method will return the actual result. If False,
710 If False, this method will return the actual result. If False,
719 a `PendingResult` is returned which can be used to get the result
711 a `PendingResult` is returned which can be used to get the result
720 at a later time.
712 at a later time.
721 """
713 """
722 targets, block = self._findTargetsAndBlock(targets, block)
714 targets, block = self._findTargetsAndBlock(targets, block)
723 return self._blockFromThread(self.smultiengine.clear_queue, targets=targets, block=block)
715 return self._blockFromThread(self.smultiengine.clear_queue, targets=targets, block=block)
724
716
725 def queue_status(self, targets=None, block=None):
717 def queue_status(self, targets=None, block=None):
726 """
718 """
727 Get the status of an engines queue.
719 Get the status of an engines queue.
728
720
729 :Parameters:
721 :Parameters:
730 targets : id or list of ids
722 targets : id or list of ids
731 The engine to use for the execution
723 The engine to use for the execution
732 block : boolean
724 block : boolean
733 If False, this method will return the actual result. If False,
725 If False, this method will return the actual result. If False,
734 a `PendingResult` is returned which can be used to get the result
726 a `PendingResult` is returned which can be used to get the result
735 at a later time.
727 at a later time.
736 """
728 """
737 targets, block = self._findTargetsAndBlock(targets, block)
729 targets, block = self._findTargetsAndBlock(targets, block)
738 return self._blockFromThread(self.smultiengine.queue_status, targets=targets, block=block)
730 return self._blockFromThread(self.smultiengine.queue_status, targets=targets, block=block)
739
731
740 def set_properties(self, properties, targets=None, block=None):
732 def set_properties(self, properties, targets=None, block=None):
741 targets, block = self._findTargetsAndBlock(targets, block)
733 targets, block = self._findTargetsAndBlock(targets, block)
742 return self._blockFromThread(self.smultiengine.set_properties, properties, targets=targets, block=block)
734 return self._blockFromThread(self.smultiengine.set_properties, properties, targets=targets, block=block)
743
735
744 def get_properties(self, keys=None, targets=None, block=None):
736 def get_properties(self, keys=None, targets=None, block=None):
745 targets, block = self._findTargetsAndBlock(targets, block)
737 targets, block = self._findTargetsAndBlock(targets, block)
746 return self._blockFromThread(self.smultiengine.get_properties, keys, targets=targets, block=block)
738 return self._blockFromThread(self.smultiengine.get_properties, keys, targets=targets, block=block)
747
739
748 def has_properties(self, keys, targets=None, block=None):
740 def has_properties(self, keys, targets=None, block=None):
749 targets, block = self._findTargetsAndBlock(targets, block)
741 targets, block = self._findTargetsAndBlock(targets, block)
750 return self._blockFromThread(self.smultiengine.has_properties, keys, targets=targets, block=block)
742 return self._blockFromThread(self.smultiengine.has_properties, keys, targets=targets, block=block)
751
743
752 def del_properties(self, keys, targets=None, block=None):
744 def del_properties(self, keys, targets=None, block=None):
753 targets, block = self._findTargetsAndBlock(targets, block)
745 targets, block = self._findTargetsAndBlock(targets, block)
754 return self._blockFromThread(self.smultiengine.del_properties, keys, targets=targets, block=block)
746 return self._blockFromThread(self.smultiengine.del_properties, keys, targets=targets, block=block)
755
747
756 def clear_properties(self, targets=None, block=None):
748 def clear_properties(self, targets=None, block=None):
757 targets, block = self._findTargetsAndBlock(targets, block)
749 targets, block = self._findTargetsAndBlock(targets, block)
758 return self._blockFromThread(self.smultiengine.clear_properties, targets=targets, block=block)
750 return self._blockFromThread(self.smultiengine.clear_properties, targets=targets, block=block)
759
751
760 #---------------------------------------------------------------------------
752 #---------------------------------------------------------------------------
761 # IMultiEngine related methods
753 # IMultiEngine related methods
762 #---------------------------------------------------------------------------
754 #---------------------------------------------------------------------------
763
755
764 def get_ids(self):
756 def get_ids(self):
765 """
757 """
766 Returns the ids of currently registered engines.
758 Returns the ids of currently registered engines.
767 """
759 """
768 result = blockingCallFromThread(self.smultiengine.get_ids)
760 result = blockingCallFromThread(self.smultiengine.get_ids)
769 return result
761 return result
770
762
771 #---------------------------------------------------------------------------
763 #---------------------------------------------------------------------------
772 # IMultiEngineCoordinator
764 # IMultiEngineCoordinator
773 #---------------------------------------------------------------------------
765 #---------------------------------------------------------------------------
774
766
775 def scatter(self, key, seq, style='basic', flatten=False, targets=None, block=None):
767 def scatter(self, key, seq, dist='b', flatten=False, targets=None, block=None):
776 """
768 """
777 Partition a Python sequence and send the partitions to a set of engines.
769 Partition a Python sequence and send the partitions to a set of engines.
778 """
770 """
779 targets, block = self._findTargetsAndBlock(targets, block)
771 targets, block = self._findTargetsAndBlock(targets, block)
780 return self._blockFromThread(self.smultiengine.scatter, key, seq,
772 return self._blockFromThread(self.smultiengine.scatter, key, seq,
781 style, flatten, targets=targets, block=block)
773 dist, flatten, targets=targets, block=block)
782
774
783 def gather(self, key, style='basic', targets=None, block=None):
775 def gather(self, key, dist='b', targets=None, block=None):
784 """
776 """
785 Gather a partitioned sequence on a set of engines as a single local seq.
777 Gather a partitioned sequence on a set of engines as a single local seq.
786 """
778 """
787 targets, block = self._findTargetsAndBlock(targets, block)
779 targets, block = self._findTargetsAndBlock(targets, block)
788 return self._blockFromThread(self.smultiengine.gather, key, style,
780 return self._blockFromThread(self.smultiengine.gather, key, dist,
789 targets=targets, block=block)
781 targets=targets, block=block)
790
782
791 def map(self, func, seq, style='basic', targets=None, block=None):
783 def raw_map(self, func, seq, dist='b', targets=None, block=None):
792 """
784 """
793 A parallelized version of Python's builtin map
785 A parallelized version of Python's builtin map.
786
787 This has a slightly different syntax than the builtin `map`.
788 This is needed because we need to have keyword arguments and thus
789 can't use *args to capture all the sequences. Instead, they must
790 be passed in a list or tuple.
791
792 raw_map(func, seqs) -> map(func, seqs[0], seqs[1], ...)
793
794 Most users will want to use parallel functions or the `mapper`
795 and `map` methods for an API that follows that of the builtin
796 `map`.
794 """
797 """
795 targets, block = self._findTargetsAndBlock(targets, block)
798 targets, block = self._findTargetsAndBlock(targets, block)
796 return self._blockFromThread(self.smultiengine.map, func, seq,
799 return self._blockFromThread(self.smultiengine.raw_map, func, seq,
797 style, targets=targets, block=block)
800 dist, targets=targets, block=block)
801
802 def map(self, func, *sequences):
803 """
804 A parallel version of Python's builtin `map` function.
805
806 This method applies a function to sequences of arguments. It
807 follows the same syntax as the builtin `map`.
808
809 This method creates a mapper objects by calling `self.mapper` with
810 no arguments and then uses that mapper to do the mapping. See
811 the documentation of `mapper` for more details.
812 """
813 return self.mapper().map(func, *sequences)
814
815 def mapper(self, dist='b', targets='all', block=None):
816 """
817 Create a mapper object that has a `map` method.
818
819 This method returns an object that implements the `IMapper`
820 interface. This method is a factory that is used to control how
821 the map happens.
822
823 :Parameters:
824 dist : str
825 What decomposition to use, 'b' is the only one supported
826 currently
827 targets : str, int, sequence of ints
828 Which engines to use for the map
829 block : boolean
830 Should calls to `map` block or not
831 """
832 return MultiEngineMapper(self, dist, targets, block)
833
834 def parallel(self, dist='b', targets=None, block=None):
835 """
836 A decorator that turns a function into a parallel function.
837
838 This can be used as:
839
840 @parallel()
841 def f(x, y)
842 ...
843
844 f(range(10), range(10))
845
846 This causes f(0,0), f(1,1), ... to be called in parallel.
847
848 :Parameters:
849 dist : str
850 What decomposition to use, 'b' is the only one supported
851 currently
852 targets : str, int, sequence of ints
853 Which engines to use for the map
854 block : boolean
855 Should calls to `map` block or not
856 """
857 targets, block = self._findTargetsAndBlock(targets, block)
858 mapper = self.mapper(dist, targets, block)
859 pf = ParallelFunction(mapper)
860 return pf
798
861
799 #---------------------------------------------------------------------------
862 #---------------------------------------------------------------------------
800 # IMultiEngineExtras
863 # IMultiEngineExtras
801 #---------------------------------------------------------------------------
864 #---------------------------------------------------------------------------
802
865
803 def zip_pull(self, keys, targets=None, block=None):
866 def zip_pull(self, keys, targets=None, block=None):
804 targets, block = self._findTargetsAndBlock(targets, block)
867 targets, block = self._findTargetsAndBlock(targets, block)
805 return self._blockFromThread(self.smultiengine.zip_pull, keys,
868 return self._blockFromThread(self.smultiengine.zip_pull, keys,
806 targets=targets, block=block)
869 targets=targets, block=block)
807
870
808 def run(self, filename, targets=None, block=None):
871 def run(self, filename, targets=None, block=None):
809 """
872 """
810 Run a Python code in a file on the engines.
873 Run a Python code in a file on the engines.
811
874
812 :Parameters:
875 :Parameters:
813 filename : str
876 filename : str
814 The name of the local file to run
877 The name of the local file to run
815 targets : id or list of ids
878 targets : id or list of ids
816 The engine to use for the execution
879 The engine to use for the execution
817 block : boolean
880 block : boolean
818 If False, this method will return the actual result. If False,
881 If False, this method will return the actual result. If False,
819 a `PendingResult` is returned which can be used to get the result
882 a `PendingResult` is returned which can be used to get the result
820 at a later time.
883 at a later time.
821 """
884 """
822 targets, block = self._findTargetsAndBlock(targets, block)
885 targets, block = self._findTargetsAndBlock(targets, block)
823 return self._blockFromThread(self.smultiengine.run, filename,
886 return self._blockFromThread(self.smultiengine.run, filename,
824 targets=targets, block=block)
887 targets=targets, block=block)
825
888
826
889
827
890
828 components.registerAdapter(FullBlockingMultiEngineClient,
891 components.registerAdapter(FullBlockingMultiEngineClient,
829 IFullSynchronousMultiEngine, IFullBlockingMultiEngineClient)
892 IFullSynchronousMultiEngine, IFullBlockingMultiEngineClient)
830
893
831
894
832
895
833
896
@@ -1,668 +1,757 b''
1 # encoding: utf-8
1 # encoding: utf-8
2
2
3 """
3 """
4 Expose the multiengine controller over the Foolscap network protocol.
4 Expose the multiengine controller over the Foolscap network protocol.
5 """
5 """
6
6
7 __docformat__ = "restructuredtext en"
7 __docformat__ = "restructuredtext en"
8
8
9 #-------------------------------------------------------------------------------
9 #-------------------------------------------------------------------------------
10 # Copyright (C) 2008 The IPython Development Team
10 # Copyright (C) 2008 The IPython Development Team
11 #
11 #
12 # Distributed under the terms of the BSD License. The full license is in
12 # Distributed under the terms of the BSD License. The full license is in
13 # the file COPYING, distributed as part of this software.
13 # the file COPYING, distributed as part of this software.
14 #-------------------------------------------------------------------------------
14 #-------------------------------------------------------------------------------
15
15
16 #-------------------------------------------------------------------------------
16 #-------------------------------------------------------------------------------
17 # Imports
17 # Imports
18 #-------------------------------------------------------------------------------
18 #-------------------------------------------------------------------------------
19
19
20 import cPickle as pickle
20 import cPickle as pickle
21 from types import FunctionType
21 from types import FunctionType
22
22
23 from zope.interface import Interface, implements
23 from zope.interface import Interface, implements
24 from twisted.internet import defer
24 from twisted.internet import defer
25 from twisted.python import components, failure, log
25 from twisted.python import components, failure, log
26
26
27 from foolscap import Referenceable
27 from foolscap import Referenceable
28
28
29 from IPython.kernel import error
29 from IPython.kernel import error
30 from IPython.kernel.util import printer
30 from IPython.kernel.util import printer
31 from IPython.kernel import map as Map
31 from IPython.kernel import map as Map
32 from IPython.kernel.parallelfunction import ParallelFunction
33 from IPython.kernel.mapper import (
34 MultiEngineMapper,
35 IMultiEngineMapperFactory,
36 IMapper
37 )
32 from IPython.kernel.twistedutil import gatherBoth
38 from IPython.kernel.twistedutil import gatherBoth
33 from IPython.kernel.multiengine import (MultiEngine,
39 from IPython.kernel.multiengine import (MultiEngine,
34 IMultiEngine,
40 IMultiEngine,
35 IFullSynchronousMultiEngine,
41 IFullSynchronousMultiEngine,
36 ISynchronousMultiEngine)
42 ISynchronousMultiEngine)
37 from IPython.kernel.multiengineclient import wrapResultList
43 from IPython.kernel.multiengineclient import wrapResultList
38 from IPython.kernel.pendingdeferred import PendingDeferredManager
44 from IPython.kernel.pendingdeferred import PendingDeferredManager
39 from IPython.kernel.pickleutil import (can, canDict,
45 from IPython.kernel.pickleutil import (can, canDict,
40 canSequence, uncan, uncanDict, uncanSequence)
46 canSequence, uncan, uncanDict, uncanSequence)
41
47
42 from IPython.kernel.clientinterfaces import (
48 from IPython.kernel.clientinterfaces import (
43 IFCClientInterfaceProvider,
49 IFCClientInterfaceProvider,
44 IBlockingClientAdaptor
50 IBlockingClientAdaptor
45 )
51 )
46
52
47 # Needed to access the true globals from __main__.__dict__
53 # Needed to access the true globals from __main__.__dict__
48 import __main__
54 import __main__
49
55
50 #-------------------------------------------------------------------------------
56 #-------------------------------------------------------------------------------
51 # The Controller side of things
57 # The Controller side of things
52 #-------------------------------------------------------------------------------
58 #-------------------------------------------------------------------------------
53
59
54 def packageResult(wrappedMethod):
60 def packageResult(wrappedMethod):
55
61
56 def wrappedPackageResult(self, *args, **kwargs):
62 def wrappedPackageResult(self, *args, **kwargs):
57 d = wrappedMethod(self, *args, **kwargs)
63 d = wrappedMethod(self, *args, **kwargs)
58 d.addCallback(self.packageSuccess)
64 d.addCallback(self.packageSuccess)
59 d.addErrback(self.packageFailure)
65 d.addErrback(self.packageFailure)
60 return d
66 return d
61 return wrappedPackageResult
67 return wrappedPackageResult
62
68
63
69
64 class IFCSynchronousMultiEngine(Interface):
70 class IFCSynchronousMultiEngine(Interface):
65 """Foolscap interface to `ISynchronousMultiEngine`.
71 """Foolscap interface to `ISynchronousMultiEngine`.
66
72
67 The methods in this interface are similar to those of
73 The methods in this interface are similar to those of
68 `ISynchronousMultiEngine`, but their arguments and return values are pickled
74 `ISynchronousMultiEngine`, but their arguments and return values are pickled
69 if they are not already simple Python types that can be send over XML-RPC.
75 if they are not already simple Python types that can be send over XML-RPC.
70
76
71 See the documentation of `ISynchronousMultiEngine` and `IMultiEngine` for
77 See the documentation of `ISynchronousMultiEngine` and `IMultiEngine` for
72 documentation about the methods.
78 documentation about the methods.
73
79
74 Most methods in this interface act like the `ISynchronousMultiEngine`
80 Most methods in this interface act like the `ISynchronousMultiEngine`
75 versions and can be called in blocking or non-blocking mode.
81 versions and can be called in blocking or non-blocking mode.
76 """
82 """
77 pass
83 pass
78
84
79
85
80 class FCSynchronousMultiEngineFromMultiEngine(Referenceable):
86 class FCSynchronousMultiEngineFromMultiEngine(Referenceable):
81 """Adapt `IMultiEngine` -> `ISynchronousMultiEngine` -> `IFCSynchronousMultiEngine`.
87 """Adapt `IMultiEngine` -> `ISynchronousMultiEngine` -> `IFCSynchronousMultiEngine`.
82 """
88 """
83
89
84 implements(IFCSynchronousMultiEngine, IFCClientInterfaceProvider)
90 implements(IFCSynchronousMultiEngine, IFCClientInterfaceProvider)
85
91
86 addSlash = True
92 addSlash = True
87
93
88 def __init__(self, multiengine):
94 def __init__(self, multiengine):
89 # Adapt the raw multiengine to `ISynchronousMultiEngine` before saving
95 # Adapt the raw multiengine to `ISynchronousMultiEngine` before saving
90 # it. This allow this class to do two adaptation steps.
96 # it. This allow this class to do two adaptation steps.
91 self.smultiengine = ISynchronousMultiEngine(multiengine)
97 self.smultiengine = ISynchronousMultiEngine(multiengine)
92 self._deferredIDCallbacks = {}
98 self._deferredIDCallbacks = {}
93
99
94 #---------------------------------------------------------------------------
100 #---------------------------------------------------------------------------
95 # Non interface methods
101 # Non interface methods
96 #---------------------------------------------------------------------------
102 #---------------------------------------------------------------------------
97
103
98 def packageFailure(self, f):
104 def packageFailure(self, f):
99 f.cleanFailure()
105 f.cleanFailure()
100 return self.packageSuccess(f)
106 return self.packageSuccess(f)
101
107
102 def packageSuccess(self, obj):
108 def packageSuccess(self, obj):
103 serial = pickle.dumps(obj, 2)
109 serial = pickle.dumps(obj, 2)
104 return serial
110 return serial
105
111
106 #---------------------------------------------------------------------------
112 #---------------------------------------------------------------------------
107 # Things related to PendingDeferredManager
113 # Things related to PendingDeferredManager
108 #---------------------------------------------------------------------------
114 #---------------------------------------------------------------------------
109
115
110 @packageResult
116 @packageResult
111 def remote_get_pending_deferred(self, deferredID, block):
117 def remote_get_pending_deferred(self, deferredID, block):
112 d = self.smultiengine.get_pending_deferred(deferredID, block)
118 d = self.smultiengine.get_pending_deferred(deferredID, block)
113 try:
119 try:
114 callback = self._deferredIDCallbacks.pop(deferredID)
120 callback = self._deferredIDCallbacks.pop(deferredID)
115 except KeyError:
121 except KeyError:
116 callback = None
122 callback = None
117 if callback is not None:
123 if callback is not None:
118 d.addCallback(callback[0], *callback[1], **callback[2])
124 d.addCallback(callback[0], *callback[1], **callback[2])
119 return d
125 return d
120
126
121 @packageResult
127 @packageResult
122 def remote_clear_pending_deferreds(self):
128 def remote_clear_pending_deferreds(self):
123 return defer.maybeDeferred(self.smultiengine.clear_pending_deferreds)
129 return defer.maybeDeferred(self.smultiengine.clear_pending_deferreds)
124
130
125 def _addDeferredIDCallback(self, did, callback, *args, **kwargs):
131 def _addDeferredIDCallback(self, did, callback, *args, **kwargs):
126 self._deferredIDCallbacks[did] = (callback, args, kwargs)
132 self._deferredIDCallbacks[did] = (callback, args, kwargs)
127 return did
133 return did
128
134
129 #---------------------------------------------------------------------------
135 #---------------------------------------------------------------------------
130 # IEngineMultiplexer related methods
136 # IEngineMultiplexer related methods
131 #---------------------------------------------------------------------------
137 #---------------------------------------------------------------------------
132
138
133 @packageResult
139 @packageResult
134 def remote_execute(self, lines, targets, block):
140 def remote_execute(self, lines, targets, block):
135 return self.smultiengine.execute(lines, targets=targets, block=block)
141 return self.smultiengine.execute(lines, targets=targets, block=block)
136
142
137 @packageResult
143 @packageResult
138 def remote_push(self, binaryNS, targets, block):
144 def remote_push(self, binaryNS, targets, block):
139 try:
145 try:
140 namespace = pickle.loads(binaryNS)
146 namespace = pickle.loads(binaryNS)
141 except:
147 except:
142 d = defer.fail(failure.Failure())
148 d = defer.fail(failure.Failure())
143 else:
149 else:
144 d = self.smultiengine.push(namespace, targets=targets, block=block)
150 d = self.smultiengine.push(namespace, targets=targets, block=block)
145 return d
151 return d
146
152
147 @packageResult
153 @packageResult
148 def remote_pull(self, keys, targets, block):
154 def remote_pull(self, keys, targets, block):
149 d = self.smultiengine.pull(keys, targets=targets, block=block)
155 d = self.smultiengine.pull(keys, targets=targets, block=block)
150 return d
156 return d
151
157
152 @packageResult
158 @packageResult
153 def remote_push_function(self, binaryNS, targets, block):
159 def remote_push_function(self, binaryNS, targets, block):
154 try:
160 try:
155 namespace = pickle.loads(binaryNS)
161 namespace = pickle.loads(binaryNS)
156 except:
162 except:
157 d = defer.fail(failure.Failure())
163 d = defer.fail(failure.Failure())
158 else:
164 else:
159 namespace = uncanDict(namespace)
165 namespace = uncanDict(namespace)
160 d = self.smultiengine.push_function(namespace, targets=targets, block=block)
166 d = self.smultiengine.push_function(namespace, targets=targets, block=block)
161 return d
167 return d
162
168
163 def _canMultipleKeys(self, result):
169 def _canMultipleKeys(self, result):
164 return [canSequence(r) for r in result]
170 return [canSequence(r) for r in result]
165
171
166 @packageResult
172 @packageResult
167 def remote_pull_function(self, keys, targets, block):
173 def remote_pull_function(self, keys, targets, block):
168 def can_functions(r, keys):
174 def can_functions(r, keys):
169 if len(keys)==1 or isinstance(keys, str):
175 if len(keys)==1 or isinstance(keys, str):
170 result = canSequence(r)
176 result = canSequence(r)
171 elif len(keys)>1:
177 elif len(keys)>1:
172 result = [canSequence(s) for s in r]
178 result = [canSequence(s) for s in r]
173 return result
179 return result
174 d = self.smultiengine.pull_function(keys, targets=targets, block=block)
180 d = self.smultiengine.pull_function(keys, targets=targets, block=block)
175 if block:
181 if block:
176 d.addCallback(can_functions, keys)
182 d.addCallback(can_functions, keys)
177 else:
183 else:
178 d.addCallback(lambda did: self._addDeferredIDCallback(did, can_functions, keys))
184 d.addCallback(lambda did: self._addDeferredIDCallback(did, can_functions, keys))
179 return d
185 return d
180
186
181 @packageResult
187 @packageResult
182 def remote_push_serialized(self, binaryNS, targets, block):
188 def remote_push_serialized(self, binaryNS, targets, block):
183 try:
189 try:
184 namespace = pickle.loads(binaryNS)
190 namespace = pickle.loads(binaryNS)
185 except:
191 except:
186 d = defer.fail(failure.Failure())
192 d = defer.fail(failure.Failure())
187 else:
193 else:
188 d = self.smultiengine.push_serialized(namespace, targets=targets, block=block)
194 d = self.smultiengine.push_serialized(namespace, targets=targets, block=block)
189 return d
195 return d
190
196
191 @packageResult
197 @packageResult
192 def remote_pull_serialized(self, keys, targets, block):
198 def remote_pull_serialized(self, keys, targets, block):
193 d = self.smultiengine.pull_serialized(keys, targets=targets, block=block)
199 d = self.smultiengine.pull_serialized(keys, targets=targets, block=block)
194 return d
200 return d
195
201
196 @packageResult
202 @packageResult
197 def remote_get_result(self, i, targets, block):
203 def remote_get_result(self, i, targets, block):
198 if i == 'None':
204 if i == 'None':
199 i = None
205 i = None
200 return self.smultiengine.get_result(i, targets=targets, block=block)
206 return self.smultiengine.get_result(i, targets=targets, block=block)
201
207
202 @packageResult
208 @packageResult
203 def remote_reset(self, targets, block):
209 def remote_reset(self, targets, block):
204 return self.smultiengine.reset(targets=targets, block=block)
210 return self.smultiengine.reset(targets=targets, block=block)
205
211
206 @packageResult
212 @packageResult
207 def remote_keys(self, targets, block):
213 def remote_keys(self, targets, block):
208 return self.smultiengine.keys(targets=targets, block=block)
214 return self.smultiengine.keys(targets=targets, block=block)
209
215
210 @packageResult
216 @packageResult
211 def remote_kill(self, controller, targets, block):
217 def remote_kill(self, controller, targets, block):
212 return self.smultiengine.kill(controller, targets=targets, block=block)
218 return self.smultiengine.kill(controller, targets=targets, block=block)
213
219
214 @packageResult
220 @packageResult
215 def remote_clear_queue(self, targets, block):
221 def remote_clear_queue(self, targets, block):
216 return self.smultiengine.clear_queue(targets=targets, block=block)
222 return self.smultiengine.clear_queue(targets=targets, block=block)
217
223
218 @packageResult
224 @packageResult
219 def remote_queue_status(self, targets, block):
225 def remote_queue_status(self, targets, block):
220 return self.smultiengine.queue_status(targets=targets, block=block)
226 return self.smultiengine.queue_status(targets=targets, block=block)
221
227
222 @packageResult
228 @packageResult
223 def remote_set_properties(self, binaryNS, targets, block):
229 def remote_set_properties(self, binaryNS, targets, block):
224 try:
230 try:
225 ns = pickle.loads(binaryNS)
231 ns = pickle.loads(binaryNS)
226 except:
232 except:
227 d = defer.fail(failure.Failure())
233 d = defer.fail(failure.Failure())
228 else:
234 else:
229 d = self.smultiengine.set_properties(ns, targets=targets, block=block)
235 d = self.smultiengine.set_properties(ns, targets=targets, block=block)
230 return d
236 return d
231
237
232 @packageResult
238 @packageResult
233 def remote_get_properties(self, keys, targets, block):
239 def remote_get_properties(self, keys, targets, block):
234 if keys=='None':
240 if keys=='None':
235 keys=None
241 keys=None
236 return self.smultiengine.get_properties(keys, targets=targets, block=block)
242 return self.smultiengine.get_properties(keys, targets=targets, block=block)
237
243
238 @packageResult
244 @packageResult
239 def remote_has_properties(self, keys, targets, block):
245 def remote_has_properties(self, keys, targets, block):
240 return self.smultiengine.has_properties(keys, targets=targets, block=block)
246 return self.smultiengine.has_properties(keys, targets=targets, block=block)
241
247
242 @packageResult
248 @packageResult
243 def remote_del_properties(self, keys, targets, block):
249 def remote_del_properties(self, keys, targets, block):
244 return self.smultiengine.del_properties(keys, targets=targets, block=block)
250 return self.smultiengine.del_properties(keys, targets=targets, block=block)
245
251
246 @packageResult
252 @packageResult
247 def remote_clear_properties(self, targets, block):
253 def remote_clear_properties(self, targets, block):
248 return self.smultiengine.clear_properties(targets=targets, block=block)
254 return self.smultiengine.clear_properties(targets=targets, block=block)
249
255
250 #---------------------------------------------------------------------------
256 #---------------------------------------------------------------------------
251 # IMultiEngine related methods
257 # IMultiEngine related methods
252 #---------------------------------------------------------------------------
258 #---------------------------------------------------------------------------
253
259
254 def remote_get_ids(self):
260 def remote_get_ids(self):
255 """Get the ids of the registered engines.
261 """Get the ids of the registered engines.
256
262
257 This method always blocks.
263 This method always blocks.
258 """
264 """
259 return self.smultiengine.get_ids()
265 return self.smultiengine.get_ids()
260
266
261 #---------------------------------------------------------------------------
267 #---------------------------------------------------------------------------
262 # IFCClientInterfaceProvider related methods
268 # IFCClientInterfaceProvider related methods
263 #---------------------------------------------------------------------------
269 #---------------------------------------------------------------------------
264
270
265 def remote_get_client_name(self):
271 def remote_get_client_name(self):
266 return 'IPython.kernel.multienginefc.FCFullSynchronousMultiEngineClient'
272 return 'IPython.kernel.multienginefc.FCFullSynchronousMultiEngineClient'
267
273
268
274
269 # The __init__ method of `FCMultiEngineFromMultiEngine` first adapts the
275 # The __init__ method of `FCMultiEngineFromMultiEngine` first adapts the
270 # `IMultiEngine` to `ISynchronousMultiEngine` so this is actually doing a
276 # `IMultiEngine` to `ISynchronousMultiEngine` so this is actually doing a
271 # two phase adaptation.
277 # two phase adaptation.
272 components.registerAdapter(FCSynchronousMultiEngineFromMultiEngine,
278 components.registerAdapter(FCSynchronousMultiEngineFromMultiEngine,
273 IMultiEngine, IFCSynchronousMultiEngine)
279 IMultiEngine, IFCSynchronousMultiEngine)
274
280
275
281
276 #-------------------------------------------------------------------------------
282 #-------------------------------------------------------------------------------
277 # The Client side of things
283 # The Client side of things
278 #-------------------------------------------------------------------------------
284 #-------------------------------------------------------------------------------
279
285
280
286
281 class FCFullSynchronousMultiEngineClient(object):
287 class FCFullSynchronousMultiEngineClient(object):
282
288
283 implements(IFullSynchronousMultiEngine, IBlockingClientAdaptor)
289 implements(
290 IFullSynchronousMultiEngine,
291 IBlockingClientAdaptor,
292 IMultiEngineMapperFactory,
293 IMapper
294 )
284
295
285 def __init__(self, remote_reference):
296 def __init__(self, remote_reference):
286 self.remote_reference = remote_reference
297 self.remote_reference = remote_reference
287 self._deferredIDCallbacks = {}
298 self._deferredIDCallbacks = {}
288 # This class manages some pending deferreds through this instance. This
299 # This class manages some pending deferreds through this instance. This
289 # is required for methods like gather/scatter as it enables us to
300 # is required for methods like gather/scatter as it enables us to
290 # create our own pending deferreds for composite operations.
301 # create our own pending deferreds for composite operations.
291 self.pdm = PendingDeferredManager()
302 self.pdm = PendingDeferredManager()
292
303
293 #---------------------------------------------------------------------------
304 #---------------------------------------------------------------------------
294 # Non interface methods
305 # Non interface methods
295 #---------------------------------------------------------------------------
306 #---------------------------------------------------------------------------
296
307
297 def unpackage(self, r):
308 def unpackage(self, r):
298 return pickle.loads(r)
309 return pickle.loads(r)
299
310
300 #---------------------------------------------------------------------------
311 #---------------------------------------------------------------------------
301 # Things related to PendingDeferredManager
312 # Things related to PendingDeferredManager
302 #---------------------------------------------------------------------------
313 #---------------------------------------------------------------------------
303
314
304 def get_pending_deferred(self, deferredID, block=True):
315 def get_pending_deferred(self, deferredID, block=True):
305
316
306 # Because we are managing some pending deferreds locally (through
317 # Because we are managing some pending deferreds locally (through
307 # self.pdm) and some remotely (on the controller), we first try the
318 # self.pdm) and some remotely (on the controller), we first try the
308 # local one and then the remote one.
319 # local one and then the remote one.
309 if self.pdm.quick_has_id(deferredID):
320 if self.pdm.quick_has_id(deferredID):
310 d = self.pdm.get_pending_deferred(deferredID, block)
321 d = self.pdm.get_pending_deferred(deferredID, block)
311 return d
322 return d
312 else:
323 else:
313 d = self.remote_reference.callRemote('get_pending_deferred', deferredID, block)
324 d = self.remote_reference.callRemote('get_pending_deferred', deferredID, block)
314 d.addCallback(self.unpackage)
325 d.addCallback(self.unpackage)
315 try:
326 try:
316 callback = self._deferredIDCallbacks.pop(deferredID)
327 callback = self._deferredIDCallbacks.pop(deferredID)
317 except KeyError:
328 except KeyError:
318 callback = None
329 callback = None
319 if callback is not None:
330 if callback is not None:
320 d.addCallback(callback[0], *callback[1], **callback[2])
331 d.addCallback(callback[0], *callback[1], **callback[2])
321 return d
332 return d
322
333
323 def clear_pending_deferreds(self):
334 def clear_pending_deferreds(self):
324
335
325 # This clear both the local (self.pdm) and remote pending deferreds
336 # This clear both the local (self.pdm) and remote pending deferreds
326 self.pdm.clear_pending_deferreds()
337 self.pdm.clear_pending_deferreds()
327 d2 = self.remote_reference.callRemote('clear_pending_deferreds')
338 d2 = self.remote_reference.callRemote('clear_pending_deferreds')
328 d2.addCallback(self.unpackage)
339 d2.addCallback(self.unpackage)
329 return d2
340 return d2
330
341
331 def _addDeferredIDCallback(self, did, callback, *args, **kwargs):
342 def _addDeferredIDCallback(self, did, callback, *args, **kwargs):
332 self._deferredIDCallbacks[did] = (callback, args, kwargs)
343 self._deferredIDCallbacks[did] = (callback, args, kwargs)
333 return did
344 return did
334
345
335 #---------------------------------------------------------------------------
346 #---------------------------------------------------------------------------
336 # IEngineMultiplexer related methods
347 # IEngineMultiplexer related methods
337 #---------------------------------------------------------------------------
348 #---------------------------------------------------------------------------
338
349
339 def execute(self, lines, targets='all', block=True):
350 def execute(self, lines, targets='all', block=True):
340 d = self.remote_reference.callRemote('execute', lines, targets, block)
351 d = self.remote_reference.callRemote('execute', lines, targets, block)
341 d.addCallback(self.unpackage)
352 d.addCallback(self.unpackage)
342 return d
353 return d
343
354
344 def push(self, namespace, targets='all', block=True):
355 def push(self, namespace, targets='all', block=True):
345 serial = pickle.dumps(namespace, 2)
356 serial = pickle.dumps(namespace, 2)
346 d = self.remote_reference.callRemote('push', serial, targets, block)
357 d = self.remote_reference.callRemote('push', serial, targets, block)
347 d.addCallback(self.unpackage)
358 d.addCallback(self.unpackage)
348 return d
359 return d
349
360
350 def pull(self, keys, targets='all', block=True):
361 def pull(self, keys, targets='all', block=True):
351 d = self.remote_reference.callRemote('pull', keys, targets, block)
362 d = self.remote_reference.callRemote('pull', keys, targets, block)
352 d.addCallback(self.unpackage)
363 d.addCallback(self.unpackage)
353 return d
364 return d
354
365
355 def push_function(self, namespace, targets='all', block=True):
366 def push_function(self, namespace, targets='all', block=True):
356 cannedNamespace = canDict(namespace)
367 cannedNamespace = canDict(namespace)
357 serial = pickle.dumps(cannedNamespace, 2)
368 serial = pickle.dumps(cannedNamespace, 2)
358 d = self.remote_reference.callRemote('push_function', serial, targets, block)
369 d = self.remote_reference.callRemote('push_function', serial, targets, block)
359 d.addCallback(self.unpackage)
370 d.addCallback(self.unpackage)
360 return d
371 return d
361
372
362 def pull_function(self, keys, targets='all', block=True):
373 def pull_function(self, keys, targets='all', block=True):
363 def uncan_functions(r, keys):
374 def uncan_functions(r, keys):
364 if len(keys)==1 or isinstance(keys, str):
375 if len(keys)==1 or isinstance(keys, str):
365 return uncanSequence(r)
376 return uncanSequence(r)
366 elif len(keys)>1:
377 elif len(keys)>1:
367 return [uncanSequence(s) for s in r]
378 return [uncanSequence(s) for s in r]
368 d = self.remote_reference.callRemote('pull_function', keys, targets, block)
379 d = self.remote_reference.callRemote('pull_function', keys, targets, block)
369 if block:
380 if block:
370 d.addCallback(self.unpackage)
381 d.addCallback(self.unpackage)
371 d.addCallback(uncan_functions, keys)
382 d.addCallback(uncan_functions, keys)
372 else:
383 else:
373 d.addCallback(self.unpackage)
384 d.addCallback(self.unpackage)
374 d.addCallback(lambda did: self._addDeferredIDCallback(did, uncan_functions, keys))
385 d.addCallback(lambda did: self._addDeferredIDCallback(did, uncan_functions, keys))
375 return d
386 return d
376
387
377 def push_serialized(self, namespace, targets='all', block=True):
388 def push_serialized(self, namespace, targets='all', block=True):
378 cannedNamespace = canDict(namespace)
389 cannedNamespace = canDict(namespace)
379 serial = pickle.dumps(cannedNamespace, 2)
390 serial = pickle.dumps(cannedNamespace, 2)
380 d = self.remote_reference.callRemote('push_serialized', serial, targets, block)
391 d = self.remote_reference.callRemote('push_serialized', serial, targets, block)
381 d.addCallback(self.unpackage)
392 d.addCallback(self.unpackage)
382 return d
393 return d
383
394
384 def pull_serialized(self, keys, targets='all', block=True):
395 def pull_serialized(self, keys, targets='all', block=True):
385 d = self.remote_reference.callRemote('pull_serialized', keys, targets, block)
396 d = self.remote_reference.callRemote('pull_serialized', keys, targets, block)
386 d.addCallback(self.unpackage)
397 d.addCallback(self.unpackage)
387 return d
398 return d
388
399
389 def get_result(self, i=None, targets='all', block=True):
400 def get_result(self, i=None, targets='all', block=True):
390 if i is None: # This is because None cannot be marshalled by xml-rpc
401 if i is None: # This is because None cannot be marshalled by xml-rpc
391 i = 'None'
402 i = 'None'
392 d = self.remote_reference.callRemote('get_result', i, targets, block)
403 d = self.remote_reference.callRemote('get_result', i, targets, block)
393 d.addCallback(self.unpackage)
404 d.addCallback(self.unpackage)
394 return d
405 return d
395
406
396 def reset(self, targets='all', block=True):
407 def reset(self, targets='all', block=True):
397 d = self.remote_reference.callRemote('reset', targets, block)
408 d = self.remote_reference.callRemote('reset', targets, block)
398 d.addCallback(self.unpackage)
409 d.addCallback(self.unpackage)
399 return d
410 return d
400
411
401 def keys(self, targets='all', block=True):
412 def keys(self, targets='all', block=True):
402 d = self.remote_reference.callRemote('keys', targets, block)
413 d = self.remote_reference.callRemote('keys', targets, block)
403 d.addCallback(self.unpackage)
414 d.addCallback(self.unpackage)
404 return d
415 return d
405
416
406 def kill(self, controller=False, targets='all', block=True):
417 def kill(self, controller=False, targets='all', block=True):
407 d = self.remote_reference.callRemote('kill', controller, targets, block)
418 d = self.remote_reference.callRemote('kill', controller, targets, block)
408 d.addCallback(self.unpackage)
419 d.addCallback(self.unpackage)
409 return d
420 return d
410
421
411 def clear_queue(self, targets='all', block=True):
422 def clear_queue(self, targets='all', block=True):
412 d = self.remote_reference.callRemote('clear_queue', targets, block)
423 d = self.remote_reference.callRemote('clear_queue', targets, block)
413 d.addCallback(self.unpackage)
424 d.addCallback(self.unpackage)
414 return d
425 return d
415
426
416 def queue_status(self, targets='all', block=True):
427 def queue_status(self, targets='all', block=True):
417 d = self.remote_reference.callRemote('queue_status', targets, block)
428 d = self.remote_reference.callRemote('queue_status', targets, block)
418 d.addCallback(self.unpackage)
429 d.addCallback(self.unpackage)
419 return d
430 return d
420
431
421 def set_properties(self, properties, targets='all', block=True):
432 def set_properties(self, properties, targets='all', block=True):
422 serial = pickle.dumps(properties, 2)
433 serial = pickle.dumps(properties, 2)
423 d = self.remote_reference.callRemote('set_properties', serial, targets, block)
434 d = self.remote_reference.callRemote('set_properties', serial, targets, block)
424 d.addCallback(self.unpackage)
435 d.addCallback(self.unpackage)
425 return d
436 return d
426
437
427 def get_properties(self, keys=None, targets='all', block=True):
438 def get_properties(self, keys=None, targets='all', block=True):
428 if keys==None:
439 if keys==None:
429 keys='None'
440 keys='None'
430 d = self.remote_reference.callRemote('get_properties', keys, targets, block)
441 d = self.remote_reference.callRemote('get_properties', keys, targets, block)
431 d.addCallback(self.unpackage)
442 d.addCallback(self.unpackage)
432 return d
443 return d
433
444
434 def has_properties(self, keys, targets='all', block=True):
445 def has_properties(self, keys, targets='all', block=True):
435 d = self.remote_reference.callRemote('has_properties', keys, targets, block)
446 d = self.remote_reference.callRemote('has_properties', keys, targets, block)
436 d.addCallback(self.unpackage)
447 d.addCallback(self.unpackage)
437 return d
448 return d
438
449
439 def del_properties(self, keys, targets='all', block=True):
450 def del_properties(self, keys, targets='all', block=True):
440 d = self.remote_reference.callRemote('del_properties', keys, targets, block)
451 d = self.remote_reference.callRemote('del_properties', keys, targets, block)
441 d.addCallback(self.unpackage)
452 d.addCallback(self.unpackage)
442 return d
453 return d
443
454
444 def clear_properties(self, targets='all', block=True):
455 def clear_properties(self, targets='all', block=True):
445 d = self.remote_reference.callRemote('clear_properties', targets, block)
456 d = self.remote_reference.callRemote('clear_properties', targets, block)
446 d.addCallback(self.unpackage)
457 d.addCallback(self.unpackage)
447 return d
458 return d
448
459
449 #---------------------------------------------------------------------------
460 #---------------------------------------------------------------------------
450 # IMultiEngine related methods
461 # IMultiEngine related methods
451 #---------------------------------------------------------------------------
462 #---------------------------------------------------------------------------
452
463
453 def get_ids(self):
464 def get_ids(self):
454 d = self.remote_reference.callRemote('get_ids')
465 d = self.remote_reference.callRemote('get_ids')
455 return d
466 return d
456
467
457 #---------------------------------------------------------------------------
468 #---------------------------------------------------------------------------
458 # ISynchronousMultiEngineCoordinator related methods
469 # ISynchronousMultiEngineCoordinator related methods
459 #---------------------------------------------------------------------------
470 #---------------------------------------------------------------------------
460
471
461 def _process_targets(self, targets):
472 def _process_targets(self, targets):
462 def create_targets(ids):
473 def create_targets(ids):
463 if isinstance(targets, int):
474 if isinstance(targets, int):
464 engines = [targets]
475 engines = [targets]
465 elif targets=='all':
476 elif targets=='all':
466 engines = ids
477 engines = ids
467 elif isinstance(targets, (list, tuple)):
478 elif isinstance(targets, (list, tuple)):
468 engines = targets
479 engines = targets
469 for t in engines:
480 for t in engines:
470 if not t in ids:
481 if not t in ids:
471 raise error.InvalidEngineID("engine with id %r does not exist"%t)
482 raise error.InvalidEngineID("engine with id %r does not exist"%t)
472 return engines
483 return engines
473
484
474 d = self.get_ids()
485 d = self.get_ids()
475 d.addCallback(create_targets)
486 d.addCallback(create_targets)
476 return d
487 return d
477
488
478 def scatter(self, key, seq, style='basic', flatten=False, targets='all', block=True):
489 def scatter(self, key, seq, dist='b', flatten=False, targets='all', block=True):
479
490
480 # Note: scatter and gather handle pending deferreds locally through self.pdm.
491 # Note: scatter and gather handle pending deferreds locally through self.pdm.
481 # This enables us to collect a bunch fo deferred ids and make a secondary
492 # This enables us to collect a bunch fo deferred ids and make a secondary
482 # deferred id that corresponds to the entire group. This logic is extremely
493 # deferred id that corresponds to the entire group. This logic is extremely
483 # difficult to get right though.
494 # difficult to get right though.
484 def do_scatter(engines):
495 def do_scatter(engines):
485 nEngines = len(engines)
496 nEngines = len(engines)
486 mapClass = Map.styles[style]
497 mapClass = Map.dists[dist]
487 mapObject = mapClass()
498 mapObject = mapClass()
488 d_list = []
499 d_list = []
489 # Loop through and push to each engine in non-blocking mode.
500 # Loop through and push to each engine in non-blocking mode.
490 # This returns a set of deferreds to deferred_ids
501 # This returns a set of deferreds to deferred_ids
491 for index, engineid in enumerate(engines):
502 for index, engineid in enumerate(engines):
492 partition = mapObject.getPartition(seq, index, nEngines)
503 partition = mapObject.getPartition(seq, index, nEngines)
493 if flatten and len(partition) == 1:
504 if flatten and len(partition) == 1:
494 d = self.push({key: partition[0]}, targets=engineid, block=False)
505 d = self.push({key: partition[0]}, targets=engineid, block=False)
495 else:
506 else:
496 d = self.push({key: partition}, targets=engineid, block=False)
507 d = self.push({key: partition}, targets=engineid, block=False)
497 d_list.append(d)
508 d_list.append(d)
498 # Collect the deferred to deferred_ids
509 # Collect the deferred to deferred_ids
499 d = gatherBoth(d_list,
510 d = gatherBoth(d_list,
500 fireOnOneErrback=0,
511 fireOnOneErrback=0,
501 consumeErrors=1,
512 consumeErrors=1,
502 logErrors=0)
513 logErrors=0)
503 # Now d has a list of deferred_ids or Failures coming
514 # Now d has a list of deferred_ids or Failures coming
504 d.addCallback(error.collect_exceptions, 'scatter')
515 d.addCallback(error.collect_exceptions, 'scatter')
505 def process_did_list(did_list):
516 def process_did_list(did_list):
506 """Turn a list of deferred_ids into a final result or failure."""
517 """Turn a list of deferred_ids into a final result or failure."""
507 new_d_list = [self.get_pending_deferred(did, True) for did in did_list]
518 new_d_list = [self.get_pending_deferred(did, True) for did in did_list]
508 final_d = gatherBoth(new_d_list,
519 final_d = gatherBoth(new_d_list,
509 fireOnOneErrback=0,
520 fireOnOneErrback=0,
510 consumeErrors=1,
521 consumeErrors=1,
511 logErrors=0)
522 logErrors=0)
512 final_d.addCallback(error.collect_exceptions, 'scatter')
523 final_d.addCallback(error.collect_exceptions, 'scatter')
513 final_d.addCallback(lambda lop: [i[0] for i in lop])
524 final_d.addCallback(lambda lop: [i[0] for i in lop])
514 return final_d
525 return final_d
515 # Now, depending on block, we need to handle the list deferred_ids
526 # Now, depending on block, we need to handle the list deferred_ids
516 # coming down the pipe diferently.
527 # coming down the pipe diferently.
517 if block:
528 if block:
518 # If we are blocking register a callback that will transform the
529 # If we are blocking register a callback that will transform the
519 # list of deferred_ids into the final result.
530 # list of deferred_ids into the final result.
520 d.addCallback(process_did_list)
531 d.addCallback(process_did_list)
521 return d
532 return d
522 else:
533 else:
523 # Here we are going to use a _local_ PendingDeferredManager.
534 # Here we are going to use a _local_ PendingDeferredManager.
524 deferred_id = self.pdm.get_deferred_id()
535 deferred_id = self.pdm.get_deferred_id()
525 # This is the deferred we will return to the user that will fire
536 # This is the deferred we will return to the user that will fire
526 # with the local deferred_id AFTER we have received the list of
537 # with the local deferred_id AFTER we have received the list of
527 # primary deferred_ids
538 # primary deferred_ids
528 d_to_return = defer.Deferred()
539 d_to_return = defer.Deferred()
529 def do_it(did_list):
540 def do_it(did_list):
530 """Produce a deferred to the final result, but first fire the
541 """Produce a deferred to the final result, but first fire the
531 deferred we will return to the user that has the local
542 deferred we will return to the user that has the local
532 deferred id."""
543 deferred id."""
533 d_to_return.callback(deferred_id)
544 d_to_return.callback(deferred_id)
534 return process_did_list(did_list)
545 return process_did_list(did_list)
535 d.addCallback(do_it)
546 d.addCallback(do_it)
536 # Now save the deferred to the final result
547 # Now save the deferred to the final result
537 self.pdm.save_pending_deferred(d, deferred_id)
548 self.pdm.save_pending_deferred(d, deferred_id)
538 return d_to_return
549 return d_to_return
539
550
540 d = self._process_targets(targets)
551 d = self._process_targets(targets)
541 d.addCallback(do_scatter)
552 d.addCallback(do_scatter)
542 return d
553 return d
543
554
544 def gather(self, key, style='basic', targets='all', block=True):
555 def gather(self, key, dist='b', targets='all', block=True):
545
556
546 # Note: scatter and gather handle pending deferreds locally through self.pdm.
557 # Note: scatter and gather handle pending deferreds locally through self.pdm.
547 # This enables us to collect a bunch fo deferred ids and make a secondary
558 # This enables us to collect a bunch fo deferred ids and make a secondary
548 # deferred id that corresponds to the entire group. This logic is extremely
559 # deferred id that corresponds to the entire group. This logic is extremely
549 # difficult to get right though.
560 # difficult to get right though.
550 def do_gather(engines):
561 def do_gather(engines):
551 nEngines = len(engines)
562 nEngines = len(engines)
552 mapClass = Map.styles[style]
563 mapClass = Map.dists[dist]
553 mapObject = mapClass()
564 mapObject = mapClass()
554 d_list = []
565 d_list = []
555 # Loop through and push to each engine in non-blocking mode.
566 # Loop through and push to each engine in non-blocking mode.
556 # This returns a set of deferreds to deferred_ids
567 # This returns a set of deferreds to deferred_ids
557 for index, engineid in enumerate(engines):
568 for index, engineid in enumerate(engines):
558 d = self.pull(key, targets=engineid, block=False)
569 d = self.pull(key, targets=engineid, block=False)
559 d_list.append(d)
570 d_list.append(d)
560 # Collect the deferred to deferred_ids
571 # Collect the deferred to deferred_ids
561 d = gatherBoth(d_list,
572 d = gatherBoth(d_list,
562 fireOnOneErrback=0,
573 fireOnOneErrback=0,
563 consumeErrors=1,
574 consumeErrors=1,
564 logErrors=0)
575 logErrors=0)
565 # Now d has a list of deferred_ids or Failures coming
576 # Now d has a list of deferred_ids or Failures coming
566 d.addCallback(error.collect_exceptions, 'scatter')
577 d.addCallback(error.collect_exceptions, 'scatter')
567 def process_did_list(did_list):
578 def process_did_list(did_list):
568 """Turn a list of deferred_ids into a final result or failure."""
579 """Turn a list of deferred_ids into a final result or failure."""
569 new_d_list = [self.get_pending_deferred(did, True) for did in did_list]
580 new_d_list = [self.get_pending_deferred(did, True) for did in did_list]
570 final_d = gatherBoth(new_d_list,
581 final_d = gatherBoth(new_d_list,
571 fireOnOneErrback=0,
582 fireOnOneErrback=0,
572 consumeErrors=1,
583 consumeErrors=1,
573 logErrors=0)
584 logErrors=0)
574 final_d.addCallback(error.collect_exceptions, 'gather')
585 final_d.addCallback(error.collect_exceptions, 'gather')
575 final_d.addCallback(lambda lop: [i[0] for i in lop])
586 final_d.addCallback(lambda lop: [i[0] for i in lop])
576 final_d.addCallback(mapObject.joinPartitions)
587 final_d.addCallback(mapObject.joinPartitions)
577 return final_d
588 return final_d
578 # Now, depending on block, we need to handle the list deferred_ids
589 # Now, depending on block, we need to handle the list deferred_ids
579 # coming down the pipe diferently.
590 # coming down the pipe diferently.
580 if block:
591 if block:
581 # If we are blocking register a callback that will transform the
592 # If we are blocking register a callback that will transform the
582 # list of deferred_ids into the final result.
593 # list of deferred_ids into the final result.
583 d.addCallback(process_did_list)
594 d.addCallback(process_did_list)
584 return d
595 return d
585 else:
596 else:
586 # Here we are going to use a _local_ PendingDeferredManager.
597 # Here we are going to use a _local_ PendingDeferredManager.
587 deferred_id = self.pdm.get_deferred_id()
598 deferred_id = self.pdm.get_deferred_id()
588 # This is the deferred we will return to the user that will fire
599 # This is the deferred we will return to the user that will fire
589 # with the local deferred_id AFTER we have received the list of
600 # with the local deferred_id AFTER we have received the list of
590 # primary deferred_ids
601 # primary deferred_ids
591 d_to_return = defer.Deferred()
602 d_to_return = defer.Deferred()
592 def do_it(did_list):
603 def do_it(did_list):
593 """Produce a deferred to the final result, but first fire the
604 """Produce a deferred to the final result, but first fire the
594 deferred we will return to the user that has the local
605 deferred we will return to the user that has the local
595 deferred id."""
606 deferred id."""
596 d_to_return.callback(deferred_id)
607 d_to_return.callback(deferred_id)
597 return process_did_list(did_list)
608 return process_did_list(did_list)
598 d.addCallback(do_it)
609 d.addCallback(do_it)
599 # Now save the deferred to the final result
610 # Now save the deferred to the final result
600 self.pdm.save_pending_deferred(d, deferred_id)
611 self.pdm.save_pending_deferred(d, deferred_id)
601 return d_to_return
612 return d_to_return
602
613
603 d = self._process_targets(targets)
614 d = self._process_targets(targets)
604 d.addCallback(do_gather)
615 d.addCallback(do_gather)
605 return d
616 return d
606
617
607 def map(self, func, seq, style='basic', targets='all', block=True):
618 def raw_map(self, func, sequences, dist='b', targets='all', block=True):
608 d_list = []
619 """
620 A parallelized version of Python's builtin map.
621
622 This has a slightly different syntax than the builtin `map`.
623 This is needed because we need to have keyword arguments and thus
624 can't use *args to capture all the sequences. Instead, they must
625 be passed in a list or tuple.
626
627 raw_map(func, seqs) -> map(func, seqs[0], seqs[1], ...)
628
629 Most users will want to use parallel functions or the `mapper`
630 and `map` methods for an API that follows that of the builtin
631 `map`.
632 """
633 if not isinstance(sequences, (list, tuple)):
634 raise TypeError('sequences must be a list or tuple')
635 max_len = max(len(s) for s in sequences)
636 for s in sequences:
637 if len(s)!=max_len:
638 raise ValueError('all sequences must have equal length')
609 if isinstance(func, FunctionType):
639 if isinstance(func, FunctionType):
610 d = self.push_function(dict(_ipython_map_func=func), targets=targets, block=False)
640 d = self.push_function(dict(_ipython_map_func=func), targets=targets, block=False)
611 d.addCallback(lambda did: self.get_pending_deferred(did, True))
641 d.addCallback(lambda did: self.get_pending_deferred(did, True))
612 sourceToRun = '_ipython_map_seq_result = map(_ipython_map_func, _ipython_map_seq)'
642 sourceToRun = '_ipython_map_seq_result = map(_ipython_map_func, *zip(*_ipython_map_seq))'
613 elif isinstance(func, str):
643 elif isinstance(func, str):
614 d = defer.succeed(None)
644 d = defer.succeed(None)
615 sourceToRun = \
645 sourceToRun = \
616 '_ipython_map_seq_result = map(%s, _ipython_map_seq)' % func
646 '_ipython_map_seq_result = map(%s, *zip(*_ipython_map_seq))' % func
617 else:
647 else:
618 raise TypeError("func must be a function or str")
648 raise TypeError("func must be a function or str")
619
649
620 d.addCallback(lambda _: self.scatter('_ipython_map_seq', seq, style, targets=targets))
650 d.addCallback(lambda _: self.scatter('_ipython_map_seq', zip(*sequences), dist, targets=targets))
621 d.addCallback(lambda _: self.execute(sourceToRun, targets=targets, block=False))
651 d.addCallback(lambda _: self.execute(sourceToRun, targets=targets, block=False))
622 d.addCallback(lambda did: self.get_pending_deferred(did, True))
652 d.addCallback(lambda did: self.get_pending_deferred(did, True))
623 d.addCallback(lambda _: self.gather('_ipython_map_seq_result', style, targets=targets, block=block))
653 d.addCallback(lambda _: self.gather('_ipython_map_seq_result', dist, targets=targets, block=block))
624 return d
654 return d
625
655
656 def map(self, func, *sequences):
657 """
658 A parallel version of Python's builtin `map` function.
659
660 This method applies a function to sequences of arguments. It
661 follows the same syntax as the builtin `map`.
662
663 This method creates a mapper objects by calling `self.mapper` with
664 no arguments and then uses that mapper to do the mapping. See
665 the documentation of `mapper` for more details.
666 """
667 return self.mapper().map(func, *sequences)
668
669 def mapper(self, dist='b', targets='all', block=True):
670 """
671 Create a mapper object that has a `map` method.
672
673 This method returns an object that implements the `IMapper`
674 interface. This method is a factory that is used to control how
675 the map happens.
676
677 :Parameters:
678 dist : str
679 What decomposition to use, 'b' is the only one supported
680 currently
681 targets : str, int, sequence of ints
682 Which engines to use for the map
683 block : boolean
684 Should calls to `map` block or not
685 """
686 return MultiEngineMapper(self, dist, targets, block)
687
688 def parallel(self, dist='b', targets='all', block=True):
689 """
690 A decorator that turns a function into a parallel function.
691
692 This can be used as:
693
694 @parallel()
695 def f(x, y)
696 ...
697
698 f(range(10), range(10))
699
700 This causes f(0,0), f(1,1), ... to be called in parallel.
701
702 :Parameters:
703 dist : str
704 What decomposition to use, 'b' is the only one supported
705 currently
706 targets : str, int, sequence of ints
707 Which engines to use for the map
708 block : boolean
709 Should calls to `map` block or not
710 """
711 mapper = self.mapper(dist, targets, block)
712 pf = ParallelFunction(mapper)
713 return pf
714
626 #---------------------------------------------------------------------------
715 #---------------------------------------------------------------------------
627 # ISynchronousMultiEngineExtras related methods
716 # ISynchronousMultiEngineExtras related methods
628 #---------------------------------------------------------------------------
717 #---------------------------------------------------------------------------
629
718
630 def _transformPullResult(self, pushResult, multitargets, lenKeys):
719 def _transformPullResult(self, pushResult, multitargets, lenKeys):
631 if not multitargets:
720 if not multitargets:
632 result = pushResult[0]
721 result = pushResult[0]
633 elif lenKeys > 1:
722 elif lenKeys > 1:
634 result = zip(*pushResult)
723 result = zip(*pushResult)
635 elif lenKeys is 1:
724 elif lenKeys is 1:
636 result = list(pushResult)
725 result = list(pushResult)
637 return result
726 return result
638
727
639 def zip_pull(self, keys, targets='all', block=True):
728 def zip_pull(self, keys, targets='all', block=True):
640 multitargets = not isinstance(targets, int) and len(targets) > 1
729 multitargets = not isinstance(targets, int) and len(targets) > 1
641 lenKeys = len(keys)
730 lenKeys = len(keys)
642 d = self.pull(keys, targets=targets, block=block)
731 d = self.pull(keys, targets=targets, block=block)
643 if block:
732 if block:
644 d.addCallback(self._transformPullResult, multitargets, lenKeys)
733 d.addCallback(self._transformPullResult, multitargets, lenKeys)
645 else:
734 else:
646 d.addCallback(lambda did: self._addDeferredIDCallback(did, self._transformPullResult, multitargets, lenKeys))
735 d.addCallback(lambda did: self._addDeferredIDCallback(did, self._transformPullResult, multitargets, lenKeys))
647 return d
736 return d
648
737
649 def run(self, fname, targets='all', block=True):
738 def run(self, fname, targets='all', block=True):
650 fileobj = open(fname,'r')
739 fileobj = open(fname,'r')
651 source = fileobj.read()
740 source = fileobj.read()
652 fileobj.close()
741 fileobj.close()
653 # if the compilation blows, we get a local error right away
742 # if the compilation blows, we get a local error right away
654 try:
743 try:
655 code = compile(source,fname,'exec')
744 code = compile(source,fname,'exec')
656 except:
745 except:
657 return defer.fail(failure.Failure())
746 return defer.fail(failure.Failure())
658 # Now run the code
747 # Now run the code
659 d = self.execute(source, targets=targets, block=block)
748 d = self.execute(source, targets=targets, block=block)
660 return d
749 return d
661
750
662 #---------------------------------------------------------------------------
751 #---------------------------------------------------------------------------
663 # IBlockingClientAdaptor related methods
752 # IBlockingClientAdaptor related methods
664 #---------------------------------------------------------------------------
753 #---------------------------------------------------------------------------
665
754
666 def adapt_to_blocking_client(self):
755 def adapt_to_blocking_client(self):
667 from IPython.kernel.multiengineclient import IFullBlockingMultiEngineClient
756 from IPython.kernel.multiengineclient import IFullBlockingMultiEngineClient
668 return IFullBlockingMultiEngineClient(self)
757 return IFullBlockingMultiEngineClient(self)
@@ -1,32 +1,107 b''
1 # encoding: utf-8
1 # encoding: utf-8
2
2
3 """A parallelized function that does scatter/execute/gather."""
3 """A parallelized function that does scatter/execute/gather."""
4
4
5 __docformat__ = "restructuredtext en"
5 __docformat__ = "restructuredtext en"
6
6
7 #-------------------------------------------------------------------------------
7 #-------------------------------------------------------------------------------
8 # Copyright (C) 2008 The IPython Development Team
8 # Copyright (C) 2008 The IPython Development Team
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #-------------------------------------------------------------------------------
12 #-------------------------------------------------------------------------------
13
13
14 #-------------------------------------------------------------------------------
14 #-------------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #-------------------------------------------------------------------------------
16 #-------------------------------------------------------------------------------
17
17
18 from types import FunctionType
18 from types import FunctionType
19 from zope.interface import Interface, implements
19
20
20 class ParallelFunction:
21
21 """A function that operates in parallel on sequences."""
22 class IMultiEngineParallelDecorator(Interface):
22 def __init__(self, func, multiengine, targets, block):
23 """A decorator that creates a parallel function."""
23 """Create a `ParallelFunction`.
24
25 def parallel(dist='b', targets=None, block=None):
26 """
27 A decorator that turns a function into a parallel function.
28
29 This can be used as:
30
31 @parallel()
32 def f(x, y)
33 ...
34
35 f(range(10), range(10))
36
37 This causes f(0,0), f(1,1), ... to be called in parallel.
38
39 :Parameters:
40 dist : str
41 What decomposition to use, 'b' is the only one supported
42 currently
43 targets : str, int, sequence of ints
44 Which engines to use for the map
45 block : boolean
46 Should calls to `map` block or not
47 """
48
49 class ITaskParallelDecorator(Interface):
50 """A decorator that creates a parallel function."""
51
52 def parallel(clear_before=False, clear_after=False, retries=0,
53 recovery_task=None, depend=None, block=True):
54 """
55 A decorator that turns a function into a parallel function.
56
57 This can be used as:
58
59 @parallel()
60 def f(x, y)
61 ...
62
63 f(range(10), range(10))
64
65 This causes f(0,0), f(1,1), ... to be called in parallel.
66
67 See the documentation for `IPython.kernel.task.BaseTask` for
68 documentation on the arguments to this method.
69 """
70
71 class IParallelFunction(Interface):
72 pass
73
74 class ParallelFunction(object):
75 """
76 The implementation of a parallel function.
77
78 A parallel function is similar to Python's map function:
79
80 map(func, *sequences) -> pfunc(*sequences)
81
82 Parallel functions should be created by using the @parallel decorator.
83 """
84
85 implements(IParallelFunction)
86
87 def __init__(self, mapper):
88 """
89 Create a parallel function from an `IMapper`.
90
91 :Parameters:
92 mapper : an `IMapper` implementer.
93 The mapper to use for the parallel function
94 """
95 self.mapper = mapper
96
97 def __call__(self, func):
98 """
99 Decorate a function to make it run in parallel.
24 """
100 """
25 assert isinstance(func, (str, FunctionType)), "func must be a fuction or str"
101 assert isinstance(func, (str, FunctionType)), "func must be a fuction or str"
26 self.func = func
102 self.func = func
27 self.multiengine = multiengine
103 def call_function(*sequences):
28 self.targets = targets
104 return self.mapper.map(self.func, *sequences)
29 self.block = block
105 return call_function
30
106
31 def __call__(self, sequence):
107 No newline at end of file
32 return self.multiengine.map(self.func, sequence, targets=self.targets, block=self.block) No newline at end of file
This diff has been collapsed as it changes many lines, (682 lines changed) Show them Hide them
@@ -1,803 +1,1113 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 # -*- test-case-name: IPython.kernel.tests.test_task -*-
2 # -*- test-case-name: IPython.kernel.tests.test_task -*-
3
3
4 """Task farming representation of the ControllerService."""
4 """Task farming representation of the ControllerService."""
5
5
6 __docformat__ = "restructuredtext en"
6 __docformat__ = "restructuredtext en"
7
7
8 #-------------------------------------------------------------------------------
8 #-----------------------------------------------------------------------------
9 # Copyright (C) 2008 The IPython Development Team
9 # Copyright (C) 2008 The IPython Development Team
10 #
10 #
11 # Distributed under the terms of the BSD License. The full license is in
11 # Distributed under the terms of the BSD License. The full license is in
12 # the file COPYING, distributed as part of this software.
12 # the file COPYING, distributed as part of this software.
13 #-------------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15 #-------------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16 # Imports
16 # Imports
17 #-------------------------------------------------------------------------------
17 #-----------------------------------------------------------------------------
18
18
19 import copy, time
19 import copy, time
20 from types import FunctionType as function
20 from types import FunctionType
21
21
22 import zope.interface as zi, string
22 import zope.interface as zi, string
23 from twisted.internet import defer, reactor
23 from twisted.internet import defer, reactor
24 from twisted.python import components, log, failure
24 from twisted.python import components, log, failure
25
25
26 # from IPython.genutils import time
26 from IPython.kernel.util import printer
27
28 from IPython.kernel import engineservice as es, error
27 from IPython.kernel import engineservice as es, error
29 from IPython.kernel import controllerservice as cs
28 from IPython.kernel import controllerservice as cs
30 from IPython.kernel.twistedutil import gatherBoth, DeferredList
29 from IPython.kernel.twistedutil import gatherBoth, DeferredList
31
30
32 from IPython.kernel.pickleutil import can,uncan, CannedFunction
31 from IPython.kernel.pickleutil import can, uncan, CannedFunction
33
34 def canTask(task):
35 t = copy.copy(task)
36 t.depend = can(t.depend)
37 if t.recovery_task:
38 t.recovery_task = canTask(t.recovery_task)
39 return t
40
32
41 def uncanTask(task):
33 #-----------------------------------------------------------------------------
42 t = copy.copy(task)
34 # Definition of the Task objects
43 t.depend = uncan(t.depend)
35 #-----------------------------------------------------------------------------
44 if t.recovery_task and t.recovery_task is not task:
45 t.recovery_task = uncanTask(t.recovery_task)
46 return t
47
36
48 time_format = '%Y/%m/%d %H:%M:%S'
37 time_format = '%Y/%m/%d %H:%M:%S'
49
38
50 class Task(object):
39 class ITask(zi.Interface):
51 r"""Our representation of a task for the `TaskController` interface.
40 """
52
41 This interface provides a generic definition of what constitutes a task.
53 The user should create instances of this class to represent a task that
42
54 needs to be done.
43 There are two sides to a task. First a task needs to take input from
55
44 a user to determine what work is performed by the task. Second, the
56 :Parameters:
45 task needs to have the logic that knows how to turn that information
57 expression : str
46 info specific calls to a worker, through the `IQueuedEngine` interface.
58 A str that is valid python code that is the task.
47
59 pull : str or list of str
48 Many method in this class get two things passed to them: a Deferred
60 The names of objects to be pulled as results. If not specified,
49 and an IQueuedEngine implementer. Such methods should register callbacks
61 will return {'result', None}
50 on the Deferred that use the IQueuedEngine to accomplish something. See
62 push : dict
51 the existing task objects for examples.
63 A dict of objects to be pushed into the engines namespace before
52 """
64 execution of the expression.
53
65 clear_before : boolean
54 zi.Attribute('retries','How many times to retry the task')
66 Should the engine's namespace be cleared before the task is run.
55 zi.Attribute('recovery_task','A task to try if the initial one fails')
67 Default=False.
56 zi.Attribute('taskid','the id of the task')
68 clear_after : boolean
57
69 Should the engine's namespace be cleared after the task is run.
58 def start_time(result):
70 Default=False.
59 """
71 retries : int
60 Do anything needed to start the timing of the task.
72 The number of times to resumbit the task if it fails. Default=0.
61
73 recovery_task : Task
62 Must simply return the result after starting the timers.
74 This is the Task to be run when the task has exhausted its retries
63 """
75 Default=None.
76 depend : bool function(properties)
77 This is the dependency function for the Task, which determines
78 whether a task can be run on a Worker. `depend` is called with
79 one argument, the worker's properties dict, and should return
80 True if the worker meets the dependencies or False if it does
81 not.
82 Default=None - run on any worker
83 options : dict
84 Any other keyword options for more elaborate uses of tasks
85
86 Examples
87 --------
88
64
89 >>> t = Task('dostuff(args)')
65 def stop_time(result):
90 >>> t = Task('a=5', pull='a')
66 """
91 >>> t = Task('a=5\nb=4', pull=['a','b'])
67 Do anything needed to stop the timing of the task.
92 >>> t = Task('os.kill(os.getpid(),9)', retries=100) # this is a bad idea
68
69 Must simply return the result after stopping the timers. This
70 method will usually set attributes that are used by `process_result`
71 in building result of the task.
72 """
73
74 def pre_task(d, queued_engine):
75 """Do something with the queued_engine before the task is run.
76
77 This method should simply add callbacks to the input Deferred
78 that do something with the `queued_engine` before the task is run.
79
80 :Parameters:
81 d : Deferred
82 The deferred that actions should be attached to
83 queued_engine : IQueuedEngine implementer
84 The worker that has been allocated to perform the task
85 """
86
87 def post_task(d, queued_engine):
88 """Do something with the queued_engine after the task is run.
89
90 This method should simply add callbacks to the input Deferred
91 that do something with the `queued_engine` before the task is run.
92
93 :Parameters:
94 d : Deferred
95 The deferred that actions should be attached to
96 queued_engine : IQueuedEngine implementer
97 The worker that has been allocated to perform the task
98 """
99
100 def submit_task(d, queued_engine):
101 """Submit a task using the `queued_engine` we have been allocated.
102
103 When a task is ready to run, this method is called. This method
104 must take the internal information of the task and make suitable
105 calls on the queued_engine to have the actual work done.
106
107 This method should simply add callbacks to the input Deferred
108 that do something with the `queued_engine` before the task is run.
109
110 :Parameters:
111 d : Deferred
112 The deferred that actions should be attached to
113 queued_engine : IQueuedEngine implementer
114 The worker that has been allocated to perform the task
115 """
93
116
94 A dependency case:
117 def process_result(d, result, engine_id):
95 >>> def hasMPI(props):
118 """Take a raw task result.
96 ... return props.get('mpi') is not None
119
97 >>> t = Task('mpi.send(blah,blah)', depend = hasMPI)
120 Objects that implement `ITask` can choose how the result of running
121 the task is presented. This method takes the raw result and
122 does this logic. Two example are the `MapTask` which simply returns
123 the raw result or a `Failure` object and the `StringTask` which
124 returns a `TaskResult` object.
125
126 :Parameters:
127 d : Deferred
128 The deferred that actions should be attached to
129 result : object
130 The raw task result that needs to be wrapped
131 engine_id : int
132 The id of the engine that did the task
133
134 :Returns:
135 The result, as a tuple of the form: (success, result).
136 Here, success is a boolean indicating if the task
137 succeeded or failed and result is the result.
138 """
139
140 def check_depend(properties):
141 """Check properties to see if the task should be run.
142
143 :Parameters:
144 properties : dict
145 A dictionary of properties that an engine has set
146
147 :Returns:
148 True if the task should be run, False otherwise
149 """
150
151 def can_task(self):
152 """Serialize (can) any functions in the task for pickling.
153
154 Subclasses must override this method and make sure that all
155 functions in the task are canned by calling `can` on the
156 function.
157 """
158
159 def uncan_task(self):
160 """Unserialize (uncan) any canned function in the task."""
161
162 class BaseTask(object):
163 """
164 Common fuctionality for all objects implementing `ITask`.
98 """
165 """
99
166
100 def __init__(self, expression, pull=None, push=None,
167 zi.implements(ITask)
101 clear_before=False, clear_after=False, retries=0,
168
102 recovery_task=None, depend=None, **options):
169 def __init__(self, clear_before=False, clear_after=False, retries=0,
103 self.expression = expression
170 recovery_task=None, depend=None):
104 if isinstance(pull, str):
171 """
105 self.pull = [pull]
172 Make a generic task.
106 else:
173
107 self.pull = pull
174 :Parameters:
108 self.push = push
175 clear_before : boolean
176 Should the engines namespace be cleared before the task
177 is run
178 clear_after : boolean
179 Should the engines namespace be clear after the task is run
180 retries : int
181 The number of times a task should be retries upon failure
182 recovery_task : any task object
183 If a task fails and it has a recovery_task, that is run
184 upon a retry
185 depend : FunctionType
186 A function that is called to test for properties. This function
187 must take one argument, the properties dict and return a boolean
188 """
109 self.clear_before = clear_before
189 self.clear_before = clear_before
110 self.clear_after = clear_after
190 self.clear_after = clear_after
111 self.retries=retries
191 self.retries = retries
112 self.recovery_task = recovery_task
192 self.recovery_task = recovery_task
113 self.depend = depend
193 self.depend = depend
114 self.options = options
115 self.taskid = None
194 self.taskid = None
195
196 def start_time(self, result):
197 """
198 Start the basic timers.
199 """
200 self.start = time.time()
201 self.start_struct = time.localtime()
202 return result
203
204 def stop_time(self, result):
205 """
206 Stop the basic timers.
207 """
208 self.stop = time.time()
209 self.stop_struct = time.localtime()
210 self.duration = self.stop - self.start
211 self.submitted = time.strftime(time_format, self.start_struct)
212 self.completed = time.strftime(time_format)
213 return result
214
215 def pre_task(self, d, queued_engine):
216 """
217 Clear the engine before running the task if clear_before is set.
218 """
219 if self.clear_before:
220 d.addCallback(lambda r: queued_engine.reset())
221
222 def post_task(self, d, queued_engine):
223 """
224 Clear the engine after running the task if clear_after is set.
225 """
226 def reseter(result):
227 queued_engine.reset()
228 return result
229 if self.clear_after:
230 d.addBoth(reseter)
231
232 def submit_task(self, d, queued_engine):
233 raise NotImplementedError('submit_task must be implemented in a subclass')
234
235 def process_result(self, result, engine_id):
236 """
237 Process a task result.
238
239 This is the default `process_result` that just returns the raw
240 result or a `Failure`.
241 """
242 if isinstance(result, failure.Failure):
243 return (False, result)
244 else:
245 return (True, result)
246
247 def check_depend(self, properties):
248 """
249 Calls self.depend(properties) to see if a task should be run.
250 """
251 if self.depend is not None:
252 return self.depend(properties)
253 else:
254 return True
255
256 def can_task(self):
257 self.depend = can(self.depend)
258 if isinstance(self.recovery_task, BaseTask):
259 self.recovery_task.can_task()
260
261 def uncan_task(self):
262 self.depend = uncan(self.depend)
263 if isinstance(self.recovery_task, BaseTask):
264 self.recovery_task.uncan_task()
265
266 class MapTask(BaseTask):
267 """
268 A task that consists of a function and arguments.
269 """
270
271 zi.implements(ITask)
272
273 def __init__(self, function, args=None, kwargs=None, clear_before=False,
274 clear_after=False, retries=0, recovery_task=None, depend=None):
275 """
276 Create a task based on a function, args and kwargs.
277
278 This is a simple type of task that consists of calling:
279 function(*args, **kwargs) and wrapping the result in a `TaskResult`.
280
281 The return value of the function, or a `Failure` wrapping an
282 exception is the task result for this type of task.
283 """
284 BaseTask.__init__(self, clear_before, clear_after, retries,
285 recovery_task, depend)
286 if not isinstance(function, FunctionType):
287 raise TypeError('a task function must be a FunctionType')
288 self.function = function
289 if args is None:
290 self.args = ()
291 else:
292 self.args = args
293 if not isinstance(self.args, (list, tuple)):
294 raise TypeError('a task args must be a list or tuple')
295 if kwargs is None:
296 self.kwargs = {}
297 else:
298 self.kwargs = kwargs
299 if not isinstance(self.kwargs, dict):
300 raise TypeError('a task kwargs must be a dict')
301
302 def submit_task(self, d, queued_engine):
303 d.addCallback(lambda r: queued_engine.push_function(
304 dict(_ipython_task_function=self.function))
305 )
306 d.addCallback(lambda r: queued_engine.push(
307 dict(_ipython_task_args=self.args,_ipython_task_kwargs=self.kwargs))
308 )
309 d.addCallback(lambda r: queued_engine.execute(
310 '_ipython_task_result = _ipython_task_function(*_ipython_task_args,**_ipython_task_kwargs)')
311 )
312 d.addCallback(lambda r: queued_engine.pull('_ipython_task_result'))
313
314 def can_task(self):
315 self.function = can(self.function)
316 BaseTask.can_task(self)
317
318 def uncan_task(self):
319 self.function = uncan(self.function)
320 BaseTask.uncan_task(self)
321
322
323 class StringTask(BaseTask):
324 """
325 A task that consists of a string of Python code to run.
326 """
327
328 def __init__(self, expression, pull=None, push=None,
329 clear_before=False, clear_after=False, retries=0,
330 recovery_task=None, depend=None):
331 """
332 Create a task based on a Python expression and variables
333
334 This type of task lets you push a set of variables to the engines
335 namespace, run a Python string in that namespace and then bring back
336 a different set of Python variables as the result.
337
338 Because this type of task can return many results (through the
339 `pull` keyword argument) it returns a special `TaskResult` object
340 that wraps the pulled variables, statistics about the run and
341 any exceptions raised.
342 """
343 if not isinstance(expression, str):
344 raise TypeError('a task expression must be a string')
345 self.expression = expression
346
347 if pull==None:
348 self.pull = ()
349 elif isinstance(pull, str):
350 self.pull = (pull,)
351 elif isinstance(pull, (list, tuple)):
352 self.pull = pull
353 else:
354 raise TypeError('pull must be str or a sequence of strs')
355
356 if push==None:
357 self.push = {}
358 elif isinstance(push, dict):
359 self.push = push
360 else:
361 raise TypeError('push must be a dict')
362
363 BaseTask.__init__(self, clear_before, clear_after, retries,
364 recovery_task, depend)
116
365
117 class ResultNS:
366 def submit_task(self, d, queued_engine):
118 """The result namespace object for use in TaskResult objects as tr.ns.
367 if self.push is not None:
368 d.addCallback(lambda r: queued_engine.push(self.push))
369
370 d.addCallback(lambda r: queued_engine.execute(self.expression))
371
372 if self.pull is not None:
373 d.addCallback(lambda r: queued_engine.pull(self.pull))
374 else:
375 d.addCallback(lambda r: None)
376
377 def process_result(self, result, engine_id):
378 if isinstance(result, failure.Failure):
379 tr = TaskResult(result, engine_id)
380 else:
381 if self.pull is None:
382 resultDict = {}
383 elif len(self.pull) == 1:
384 resultDict = {self.pull[0]:result}
385 else:
386 resultDict = dict(zip(self.pull, result))
387 tr = TaskResult(resultDict, engine_id)
388 # Assign task attributes
389 tr.submitted = self.submitted
390 tr.completed = self.completed
391 tr.duration = self.duration
392 if hasattr(self,'taskid'):
393 tr.taskid = self.taskid
394 else:
395 tr.taskid = None
396 if isinstance(result, failure.Failure):
397 return (False, tr)
398 else:
399 return (True, tr)
400
401 class ResultNS(object):
402 """
403 A dict like object for holding the results of a task.
404
405 The result namespace object for use in `TaskResult` objects as tr.ns.
119 It builds an object from a dictionary, such that it has attributes
406 It builds an object from a dictionary, such that it has attributes
120 according to the key,value pairs of the dictionary.
407 according to the key,value pairs of the dictionary.
121
408
122 This works by calling setattr on ALL key,value pairs in the dict. If a user
409 This works by calling setattr on ALL key,value pairs in the dict. If a user
123 chooses to overwrite the `__repr__` or `__getattr__` attributes, they can.
410 chooses to overwrite the `__repr__` or `__getattr__` attributes, they can.
124 This can be a bad idea, as it may corrupt standard behavior of the
411 This can be a bad idea, as it may corrupt standard behavior of the
125 ns object.
412 ns object.
126
413
127 Example
414 Example
128 --------
415 --------
129
416
130 >>> ns = ResultNS({'a':17,'foo':range(3)})
417 >>> ns = ResultNS({'a':17,'foo':range(3)})
131
132 >>> print ns
418 >>> print ns
133 NS{'a': 17, 'foo': [0, 1, 2]}
419 NS{'a':17,'foo':range(3)}
134
135 >>> ns.a
420 >>> ns.a
136 17
421 17
137
138 >>> ns['foo']
422 >>> ns['foo']
139 [0, 1, 2]
423 [0,1,2]
140 """
424 """
141 def __init__(self, dikt):
425 def __init__(self, dikt):
142 for k,v in dikt.iteritems():
426 for k,v in dikt.iteritems():
143 setattr(self,k,v)
427 setattr(self,k,v)
144
428
145 def __repr__(self):
429 def __repr__(self):
146 l = dir(self)
430 l = dir(self)
147 d = {}
431 d = {}
148 for k in l:
432 for k in l:
149 # do not print private objects
433 # do not print private objects
150 if k[:2] != '__' and k[-2:] != '__':
434 if k[:2] != '__' and k[-2:] != '__':
151 d[k] = getattr(self, k)
435 d[k] = getattr(self, k)
152 return "NS"+repr(d)
436 return "NS"+repr(d)
153
437
154 def __getitem__(self, key):
438 def __getitem__(self, key):
155 return getattr(self, key)
439 return getattr(self, key)
156
440
157 class TaskResult(object):
441 class TaskResult(object):
158 """
442 """
159 An object for returning task results.
443 An object for returning task results for certain types of tasks.
160
444
161 This object encapsulates the results of a task. On task
445 This object encapsulates the results of a task. On task
162 success it will have a keys attribute that will have a list
446 success it will have a keys attribute that will have a list
163 of the variables that have been pulled back. These variables
447 of the variables that have been pulled back. These variables
164 are accessible as attributes of this class as well. On
448 are accessible as attributes of this class as well. On
165 success the failure attribute will be None.
449 success the failure attribute will be None.
166
450
167 In task failure, keys will be empty, but failure will contain
451 In task failure, keys will be empty, but failure will contain
168 the failure object that encapsulates the remote exception.
452 the failure object that encapsulates the remote exception.
169 One can also simply call the raiseException() method of
453 One can also simply call the `raise_exception` method of
170 this class to re-raise any remote exception in the local
454 this class to re-raise any remote exception in the local
171 session.
455 session.
172
456
173 The TaskResult has a .ns member, which is a property for access
457 The `TaskResult` has a `.ns` member, which is a property for access
174 to the results. If the Task had pull=['a', 'b'], then the
458 to the results. If the Task had pull=['a', 'b'], then the
175 Task Result will have attributes tr.ns.a, tr.ns.b for those values.
459 Task Result will have attributes `tr.ns.a`, `tr.ns.b` for those values.
176 Accessing tr.ns will raise the remote failure if the task failed.
460 Accessing `tr.ns` will raise the remote failure if the task failed.
177
461
178 The engineid attribute should have the engineid of the engine
462 The `engineid` attribute should have the `engineid` of the engine
179 that ran the task. But, because engines can come and go in
463 that ran the task. But, because engines can come and go,
180 the ipython task system, the engineid may not continue to be
464 the `engineid` may not continue to be
181 valid or accurate.
465 valid or accurate.
182
466
183 The taskid attribute simply gives the taskid that the task
467 The `taskid` attribute simply gives the `taskid` that the task
184 is tracked under.
468 is tracked under.
185 """
469 """
186 taskid = None
470 taskid = None
187
471
188 def _getNS(self):
472 def _getNS(self):
189 if isinstance(self.failure, failure.Failure):
473 if isinstance(self.failure, failure.Failure):
190 return self.failure.raiseException()
474 return self.failure.raiseException()
191 else:
475 else:
192 return self._ns
476 return self._ns
193
477
194 def _setNS(self, v):
478 def _setNS(self, v):
195 raise Exception("I am protected!")
479 raise Exception("the ns attribute cannot be changed")
196
480
197 ns = property(_getNS, _setNS)
481 ns = property(_getNS, _setNS)
198
482
199 def __init__(self, results, engineid):
483 def __init__(self, results, engineid):
200 self.engineid = engineid
484 self.engineid = engineid
201 if isinstance(results, failure.Failure):
485 if isinstance(results, failure.Failure):
202 self.failure = results
486 self.failure = results
203 self.results = {}
487 self.results = {}
204 else:
488 else:
205 self.results = results
489 self.results = results
206 self.failure = None
490 self.failure = None
207
491
208 self._ns = ResultNS(self.results)
492 self._ns = ResultNS(self.results)
209
493
210 self.keys = self.results.keys()
494 self.keys = self.results.keys()
211
495
212 def __repr__(self):
496 def __repr__(self):
213 if self.failure is not None:
497 if self.failure is not None:
214 contents = self.failure
498 contents = self.failure
215 else:
499 else:
216 contents = self.results
500 contents = self.results
217 return "TaskResult[ID:%r]:%r"%(self.taskid, contents)
501 return "TaskResult[ID:%r]:%r"%(self.taskid, contents)
218
502
219 def __getitem__(self, key):
503 def __getitem__(self, key):
220 if self.failure is not None:
504 if self.failure is not None:
221 self.raiseException()
505 self.raise_exception()
222 return self.results[key]
506 return self.results[key]
223
507
224 def raiseException(self):
508 def raise_exception(self):
225 """Re-raise any remote exceptions in the local python session."""
509 """Re-raise any remote exceptions in the local python session."""
226 if self.failure is not None:
510 if self.failure is not None:
227 self.failure.raiseException()
511 self.failure.raiseException()
228
512
229
513
514 #-----------------------------------------------------------------------------
515 # The controller side of things
516 #-----------------------------------------------------------------------------
517
230 class IWorker(zi.Interface):
518 class IWorker(zi.Interface):
231 """The Basic Worker Interface.
519 """The Basic Worker Interface.
232
520
233 A worked is a representation of an Engine that is ready to run tasks.
521 A worked is a representation of an Engine that is ready to run tasks.
234 """
522 """
235
523
236 zi.Attribute("workerid", "the id of the worker")
524 zi.Attribute("workerid", "the id of the worker")
237
525
238 def run(task):
526 def run(task):
239 """Run task in worker's namespace.
527 """Run task in worker's namespace.
240
528
241 :Parameters:
529 :Parameters:
242 task : a `Task` object
530 task : a `Task` object
243
531
244 :Returns: `Deferred` to a `TaskResult` object.
532 :Returns: `Deferred` to a tuple of (success, result) where
533 success if a boolean that signifies success or failure
534 and result is the task result.
245 """
535 """
246
536
247
537
248 class WorkerFromQueuedEngine(object):
538 class WorkerFromQueuedEngine(object):
249 """Adapt an `IQueuedEngine` to an `IWorker` object"""
539 """Adapt an `IQueuedEngine` to an `IWorker` object"""
540
250 zi.implements(IWorker)
541 zi.implements(IWorker)
251
542
252 def __init__(self, qe):
543 def __init__(self, qe):
253 self.queuedEngine = qe
544 self.queuedEngine = qe
254 self.workerid = None
545 self.workerid = None
255
546
256 def _get_properties(self):
547 def _get_properties(self):
257 return self.queuedEngine.properties
548 return self.queuedEngine.properties
258
549
259 properties = property(_get_properties, lambda self, _:None)
550 properties = property(_get_properties, lambda self, _:None)
260
551
261 def run(self, task):
552 def run(self, task):
262 """Run task in worker's namespace.
553 """Run task in worker's namespace.
263
554
555 This takes a task and calls methods on the task that actually
556 cause `self.queuedEngine` to do the task. See the methods of
557 `ITask` for more information about how these methods are called.
558
264 :Parameters:
559 :Parameters:
265 task : a `Task` object
560 task : a `Task` object
266
561
267 :Returns: `Deferred` to a `TaskResult` object.
562 :Returns: `Deferred` to a tuple of (success, result) where
563 success if a boolean that signifies success or failure
564 and result is the task result.
268 """
565 """
269 if task.clear_before:
566 d = defer.succeed(None)
270 d = self.queuedEngine.reset()
567 d.addCallback(task.start_time)
271 else:
568 task.pre_task(d, self.queuedEngine)
272 d = defer.succeed(None)
569 task.submit_task(d, self.queuedEngine)
273
570 task.post_task(d, self.queuedEngine)
274 if task.push is not None:
571 d.addBoth(task.stop_time)
275 d.addCallback(lambda r: self.queuedEngine.push(task.push))
572 d.addBoth(task.process_result, self.queuedEngine.id)
276
573 # At this point, there will be (success, result) coming down the line
277 d.addCallback(lambda r: self.queuedEngine.execute(task.expression))
574 return d
278
575
279 if task.pull is not None:
280 d.addCallback(lambda r: self.queuedEngine.pull(task.pull))
281 else:
282 d.addCallback(lambda r: None)
283
284 def reseter(result):
285 self.queuedEngine.reset()
286 return result
287
288 if task.clear_after:
289 d.addBoth(reseter)
290
291 return d.addBoth(self._zipResults, task.pull, time.time(), time.localtime())
292
293 def _zipResults(self, result, names, start, start_struct):
294 """Callback for construting the TaskResult object."""
295 if isinstance(result, failure.Failure):
296 tr = TaskResult(result, self.queuedEngine.id)
297 else:
298 if names is None:
299 resultDict = {}
300 elif len(names) == 1:
301 resultDict = {names[0]:result}
302 else:
303 resultDict = dict(zip(names, result))
304 tr = TaskResult(resultDict, self.queuedEngine.id)
305 # the time info
306 tr.submitted = time.strftime(time_format, start_struct)
307 tr.completed = time.strftime(time_format)
308 tr.duration = time.time()-start
309 return tr
310
311
576
312 components.registerAdapter(WorkerFromQueuedEngine, es.IEngineQueued, IWorker)
577 components.registerAdapter(WorkerFromQueuedEngine, es.IEngineQueued, IWorker)
313
578
314 class IScheduler(zi.Interface):
579 class IScheduler(zi.Interface):
315 """The interface for a Scheduler.
580 """The interface for a Scheduler.
316 """
581 """
317 zi.Attribute("nworkers", "the number of unassigned workers")
582 zi.Attribute("nworkers", "the number of unassigned workers")
318 zi.Attribute("ntasks", "the number of unscheduled tasks")
583 zi.Attribute("ntasks", "the number of unscheduled tasks")
319 zi.Attribute("workerids", "a list of the worker ids")
584 zi.Attribute("workerids", "a list of the worker ids")
320 zi.Attribute("taskids", "a list of the task ids")
585 zi.Attribute("taskids", "a list of the task ids")
321
586
322 def add_task(task, **flags):
587 def add_task(task, **flags):
323 """Add a task to the queue of the Scheduler.
588 """Add a task to the queue of the Scheduler.
324
589
325 :Parameters:
590 :Parameters:
326 task : a `Task` object
591 task : an `ITask` implementer
327 The task to be queued.
592 The task to be queued.
328 flags : dict
593 flags : dict
329 General keywords for more sophisticated scheduling
594 General keywords for more sophisticated scheduling
330 """
595 """
331
596
332 def pop_task(id=None):
597 def pop_task(id=None):
333 """Pops a Task object.
598 """Pops a task object from the queue.
334
599
335 This gets the next task to be run. If no `id` is requested, the highest priority
600 This gets the next task to be run. If no `id` is requested, the highest priority
336 task is returned.
601 task is returned.
337
602
338 :Parameters:
603 :Parameters:
339 id
604 id
340 The id of the task to be popped. The default (None) is to return
605 The id of the task to be popped. The default (None) is to return
341 the highest priority task.
606 the highest priority task.
342
607
343 :Returns: a `Task` object
608 :Returns: an `ITask` implementer
344
609
345 :Exceptions:
610 :Exceptions:
346 IndexError : raised if no taskid in queue
611 IndexError : raised if no taskid in queue
347 """
612 """
348
613
349 def add_worker(worker, **flags):
614 def add_worker(worker, **flags):
350 """Add a worker to the worker queue.
615 """Add a worker to the worker queue.
351
616
352 :Parameters:
617 :Parameters:
353 worker : an IWorker implementing object
618 worker : an `IWorker` implementer
354 flags : General keywords for more sophisticated scheduling
619 flags : dict
620 General keywords for more sophisticated scheduling
355 """
621 """
356
622
357 def pop_worker(id=None):
623 def pop_worker(id=None):
358 """Pops an IWorker object that is ready to do work.
624 """Pops an IWorker object that is ready to do work.
359
625
360 This gets the next IWorker that is ready to do work.
626 This gets the next IWorker that is ready to do work.
361
627
362 :Parameters:
628 :Parameters:
363 id : if specified, will pop worker with workerid=id, else pops
629 id : if specified, will pop worker with workerid=id, else pops
364 highest priority worker. Defaults to None.
630 highest priority worker. Defaults to None.
365
631
366 :Returns:
632 :Returns:
367 an IWorker object
633 an IWorker object
368
634
369 :Exceptions:
635 :Exceptions:
370 IndexError : raised if no workerid in queue
636 IndexError : raised if no workerid in queue
371 """
637 """
372
638
373 def ready():
639 def ready():
374 """Returns True if there is something to do, False otherwise"""
640 """Returns True if there is something to do, False otherwise"""
375
641
376 def schedule():
642 def schedule():
377 """Returns a tuple of the worker and task pair for the next
643 """Returns (worker,task) pair for the next task to be run."""
378 task to be run.
379 """
380
644
381
645
382 class FIFOScheduler(object):
646 class FIFOScheduler(object):
383 """A basic First-In-First-Out (Queue) Scheduler.
647 """
384 This is the default Scheduler for the TaskController.
648 A basic First-In-First-Out (Queue) Scheduler.
385 See the docstrings for IScheduler for interface details.
649
650 This is the default Scheduler for the `TaskController`.
651 See the docstrings for `IScheduler` for interface details.
386 """
652 """
387
653
388 zi.implements(IScheduler)
654 zi.implements(IScheduler)
389
655
390 def __init__(self):
656 def __init__(self):
391 self.tasks = []
657 self.tasks = []
392 self.workers = []
658 self.workers = []
393
659
394 def _ntasks(self):
660 def _ntasks(self):
395 return len(self.tasks)
661 return len(self.tasks)
396
662
397 def _nworkers(self):
663 def _nworkers(self):
398 return len(self.workers)
664 return len(self.workers)
399
665
400 ntasks = property(_ntasks, lambda self, _:None)
666 ntasks = property(_ntasks, lambda self, _:None)
401 nworkers = property(_nworkers, lambda self, _:None)
667 nworkers = property(_nworkers, lambda self, _:None)
402
668
403 def _taskids(self):
669 def _taskids(self):
404 return [t.taskid for t in self.tasks]
670 return [t.taskid for t in self.tasks]
405
671
406 def _workerids(self):
672 def _workerids(self):
407 return [w.workerid for w in self.workers]
673 return [w.workerid for w in self.workers]
408
674
409 taskids = property(_taskids, lambda self,_:None)
675 taskids = property(_taskids, lambda self,_:None)
410 workerids = property(_workerids, lambda self,_:None)
676 workerids = property(_workerids, lambda self,_:None)
411
677
412 def add_task(self, task, **flags):
678 def add_task(self, task, **flags):
413 self.tasks.append(task)
679 self.tasks.append(task)
414
680
415 def pop_task(self, id=None):
681 def pop_task(self, id=None):
416 if id is None:
682 if id is None:
417 return self.tasks.pop(0)
683 return self.tasks.pop(0)
418 else:
684 else:
419 for i in range(len(self.tasks)):
685 for i in range(len(self.tasks)):
420 taskid = self.tasks[i].taskid
686 taskid = self.tasks[i].taskid
421 if id == taskid:
687 if id == taskid:
422 return self.tasks.pop(i)
688 return self.tasks.pop(i)
423 raise IndexError("No task #%i"%id)
689 raise IndexError("No task #%i"%id)
424
690
425 def add_worker(self, worker, **flags):
691 def add_worker(self, worker, **flags):
426 self.workers.append(worker)
692 self.workers.append(worker)
427
693
428 def pop_worker(self, id=None):
694 def pop_worker(self, id=None):
429 if id is None:
695 if id is None:
430 return self.workers.pop(0)
696 return self.workers.pop(0)
431 else:
697 else:
432 for i in range(len(self.workers)):
698 for i in range(len(self.workers)):
433 workerid = self.workers[i].workerid
699 workerid = self.workers[i].workerid
434 if id == workerid:
700 if id == workerid:
435 return self.workers.pop(i)
701 return self.workers.pop(i)
436 raise IndexError("No worker #%i"%id)
702 raise IndexError("No worker #%i"%id)
437
703
438 def schedule(self):
704 def schedule(self):
439 for t in self.tasks:
705 for t in self.tasks:
440 for w in self.workers:
706 for w in self.workers:
441 try:# do not allow exceptions to break this
707 try:# do not allow exceptions to break this
442 cando = t.depend is None or t.depend(w.properties)
708 # Allow the task to check itself using its
709 # check_depend method.
710 cando = t.check_depend(w.properties)
443 except:
711 except:
444 cando = False
712 cando = False
445 if cando:
713 if cando:
446 return self.pop_worker(w.workerid), self.pop_task(t.taskid)
714 return self.pop_worker(w.workerid), self.pop_task(t.taskid)
447 return None, None
715 return None, None
448
716
449
717
450
718
451 class LIFOScheduler(FIFOScheduler):
719 class LIFOScheduler(FIFOScheduler):
452 """A Last-In-First-Out (Stack) Scheduler. This scheduler should naively
720 """
453 reward fast engines by giving them more jobs. This risks starvation, but
721 A Last-In-First-Out (Stack) Scheduler.
454 only in cases with low load, where starvation does not really matter.
722
723 This scheduler should naively reward fast engines by giving
724 them more jobs. This risks starvation, but only in cases with
725 low load, where starvation does not really matter.
455 """
726 """
456
727
457 def add_task(self, task, **flags):
728 def add_task(self, task, **flags):
458 # self.tasks.reverse()
729 # self.tasks.reverse()
459 self.tasks.insert(0, task)
730 self.tasks.insert(0, task)
460 # self.tasks.reverse()
731 # self.tasks.reverse()
461
732
462 def add_worker(self, worker, **flags):
733 def add_worker(self, worker, **flags):
463 # self.workers.reverse()
734 # self.workers.reverse()
464 self.workers.insert(0, worker)
735 self.workers.insert(0, worker)
465 # self.workers.reverse()
736 # self.workers.reverse()
466
737
467
738
468 class ITaskController(cs.IControllerBase):
739 class ITaskController(cs.IControllerBase):
469 """The Task based interface to a `ControllerService` object
740 """
741 The Task based interface to a `ControllerService` object
470
742
471 This adapts a `ControllerService` to the ITaskController interface.
743 This adapts a `ControllerService` to the ITaskController interface.
472 """
744 """
473
745
474 def run(task):
746 def run(task):
475 """Run a task.
747 """
748 Run a task.
476
749
477 :Parameters:
750 :Parameters:
478 task : an IPython `Task` object
751 task : an IPython `Task` object
479
752
480 :Returns: the integer ID of the task
753 :Returns: the integer ID of the task
481 """
754 """
482
755
483 def get_task_result(taskid, block=False):
756 def get_task_result(taskid, block=False):
484 """Get the result of a task by its ID.
757 """
758 Get the result of a task by its ID.
485
759
486 :Parameters:
760 :Parameters:
487 taskid : int
761 taskid : int
488 the id of the task whose result is requested
762 the id of the task whose result is requested
489
763
490 :Returns: `Deferred` to (taskid, actualResult) if the task is done, and None
764 :Returns: `Deferred` to the task result if the task is done, and None
491 if not.
765 if not.
492
766
493 :Exceptions:
767 :Exceptions:
494 actualResult will be an `IndexError` if no such task has been submitted
768 actualResult will be an `IndexError` if no such task has been submitted
495 """
769 """
496
770
497 def abort(taskid):
771 def abort(taskid):
498 """Remove task from queue if task is has not been submitted.
772 """Remove task from queue if task is has not been submitted.
499
773
500 If the task has already been submitted, wait for it to finish and discard
774 If the task has already been submitted, wait for it to finish and discard
501 results and prevent resubmission.
775 results and prevent resubmission.
502
776
503 :Parameters:
777 :Parameters:
504 taskid : the id of the task to be aborted
778 taskid : the id of the task to be aborted
505
779
506 :Returns:
780 :Returns:
507 `Deferred` to abort attempt completion. Will be None on success.
781 `Deferred` to abort attempt completion. Will be None on success.
508
782
509 :Exceptions:
783 :Exceptions:
510 deferred will fail with `IndexError` if no such task has been submitted
784 deferred will fail with `IndexError` if no such task has been submitted
511 or the task has already completed.
785 or the task has already completed.
512 """
786 """
513
787
514 def barrier(taskids):
788 def barrier(taskids):
515 """Block until the list of taskids are completed.
789 """
790 Block until the list of taskids are completed.
516
791
517 Returns None on success.
792 Returns None on success.
518 """
793 """
519
794
520 def spin():
795 def spin():
521 """touch the scheduler, to resume scheduling without submitting
796 """
522 a task.
797 Touch the scheduler, to resume scheduling without submitting a task.
523 """
798 """
524
799
525 def queue_status(self, verbose=False):
800 def queue_status(verbose=False):
526 """Get a dictionary with the current state of the task queue.
801 """
802 Get a dictionary with the current state of the task queue.
527
803
528 If verbose is True, then return lists of taskids, otherwise,
804 If verbose is True, then return lists of taskids, otherwise,
529 return the number of tasks with each status.
805 return the number of tasks with each status.
530 """
806 """
531
807
808 def clear():
809 """
810 Clear all previously run tasks from the task controller.
811
812 This is needed because the task controller keep all task results
813 in memory. This can be a problem is there are many completed
814 tasks. Users should call this periodically to clean out these
815 cached task results.
816 """
817
532
818
533 class TaskController(cs.ControllerAdapterBase):
819 class TaskController(cs.ControllerAdapterBase):
534 """The Task based interface to a Controller object.
820 """The Task based interface to a Controller object.
535
821
536 If you want to use a different scheduler, just subclass this and set
822 If you want to use a different scheduler, just subclass this and set
537 the `SchedulerClass` member to the *class* of your chosen scheduler.
823 the `SchedulerClass` member to the *class* of your chosen scheduler.
538 """
824 """
539
825
540 zi.implements(ITaskController)
826 zi.implements(ITaskController)
541 SchedulerClass = FIFOScheduler
827 SchedulerClass = FIFOScheduler
542
828
543 timeout = 30
829 timeout = 30
544
830
545 def __init__(self, controller):
831 def __init__(self, controller):
546 self.controller = controller
832 self.controller = controller
547 self.controller.on_register_engine_do(self.registerWorker, True)
833 self.controller.on_register_engine_do(self.registerWorker, True)
548 self.controller.on_unregister_engine_do(self.unregisterWorker, True)
834 self.controller.on_unregister_engine_do(self.unregisterWorker, True)
549 self.taskid = 0
835 self.taskid = 0
550 self.failurePenalty = 1 # the time in seconds to penalize
836 self.failurePenalty = 1 # the time in seconds to penalize
551 # a worker for failing a task
837 # a worker for failing a task
552 self.pendingTasks = {} # dict of {workerid:(taskid, task)}
838 self.pendingTasks = {} # dict of {workerid:(taskid, task)}
553 self.deferredResults = {} # dict of {taskid:deferred}
839 self.deferredResults = {} # dict of {taskid:deferred}
554 self.finishedResults = {} # dict of {taskid:actualResult}
840 self.finishedResults = {} # dict of {taskid:actualResult}
555 self.workers = {} # dict of {workerid:worker}
841 self.workers = {} # dict of {workerid:worker}
556 self.abortPending = [] # dict of {taskid:abortDeferred}
842 self.abortPending = [] # dict of {taskid:abortDeferred}
557 self.idleLater = None # delayed call object for timeout
843 self.idleLater = None # delayed call object for timeout
558 self.scheduler = self.SchedulerClass()
844 self.scheduler = self.SchedulerClass()
559
845
560 for id in self.controller.engines.keys():
846 for id in self.controller.engines.keys():
561 self.workers[id] = IWorker(self.controller.engines[id])
847 self.workers[id] = IWorker(self.controller.engines[id])
562 self.workers[id].workerid = id
848 self.workers[id].workerid = id
563 self.schedule.add_worker(self.workers[id])
849 self.schedule.add_worker(self.workers[id])
564
850
565 def registerWorker(self, id):
851 def registerWorker(self, id):
566 """Called by controller.register_engine."""
852 """Called by controller.register_engine."""
567 if self.workers.get(id):
853 if self.workers.get(id):
568 raise "We already have one! This should not happen."
854 raise ValueError("worker with id %s already exists. This should not happen." % id)
569 self.workers[id] = IWorker(self.controller.engines[id])
855 self.workers[id] = IWorker(self.controller.engines[id])
570 self.workers[id].workerid = id
856 self.workers[id].workerid = id
571 if not self.pendingTasks.has_key(id):# if not working
857 if not self.pendingTasks.has_key(id):# if not working
572 self.scheduler.add_worker(self.workers[id])
858 self.scheduler.add_worker(self.workers[id])
573 self.distributeTasks()
859 self.distributeTasks()
574
860
575 def unregisterWorker(self, id):
861 def unregisterWorker(self, id):
576 """Called by controller.unregister_engine"""
862 """Called by controller.unregister_engine"""
577
863
578 if self.workers.has_key(id):
864 if self.workers.has_key(id):
579 try:
865 try:
580 self.scheduler.pop_worker(id)
866 self.scheduler.pop_worker(id)
581 except IndexError:
867 except IndexError:
582 pass
868 pass
583 self.workers.pop(id)
869 self.workers.pop(id)
584
870
585 def _pendingTaskIDs(self):
871 def _pendingTaskIDs(self):
586 return [t.taskid for t in self.pendingTasks.values()]
872 return [t.taskid for t in self.pendingTasks.values()]
587
873
588 #---------------------------------------------------------------------------
874 #---------------------------------------------------------------------------
589 # Interface methods
875 # Interface methods
590 #---------------------------------------------------------------------------
876 #---------------------------------------------------------------------------
591
877
592 def run(self, task):
878 def run(self, task):
593 """Run a task and return `Deferred` to its taskid."""
879 """
880 Run a task and return `Deferred` to its taskid.
881 """
594 task.taskid = self.taskid
882 task.taskid = self.taskid
595 task.start = time.localtime()
883 task.start = time.localtime()
596 self.taskid += 1
884 self.taskid += 1
597 d = defer.Deferred()
885 d = defer.Deferred()
598 self.scheduler.add_task(task)
886 self.scheduler.add_task(task)
599 # log.msg('Queuing task: %i' % task.taskid)
887 log.msg('Queuing task: %i' % task.taskid)
600
888
601 self.deferredResults[task.taskid] = []
889 self.deferredResults[task.taskid] = []
602 self.distributeTasks()
890 self.distributeTasks()
603 return defer.succeed(task.taskid)
891 return defer.succeed(task.taskid)
604
892
605 def get_task_result(self, taskid, block=False):
893 def get_task_result(self, taskid, block=False):
606 """Returns a `Deferred` to a TaskResult tuple or None."""
894 """
607 # log.msg("Getting task result: %i" % taskid)
895 Returns a `Deferred` to the task result, or None.
896 """
897 log.msg("Getting task result: %i" % taskid)
608 if self.finishedResults.has_key(taskid):
898 if self.finishedResults.has_key(taskid):
609 tr = self.finishedResults[taskid]
899 tr = self.finishedResults[taskid]
610 return defer.succeed(tr)
900 return defer.succeed(tr)
611 elif self.deferredResults.has_key(taskid):
901 elif self.deferredResults.has_key(taskid):
612 if block:
902 if block:
613 d = defer.Deferred()
903 d = defer.Deferred()
614 self.deferredResults[taskid].append(d)
904 self.deferredResults[taskid].append(d)
615 return d
905 return d
616 else:
906 else:
617 return defer.succeed(None)
907 return defer.succeed(None)
618 else:
908 else:
619 return defer.fail(IndexError("task ID not registered: %r" % taskid))
909 return defer.fail(IndexError("task ID not registered: %r" % taskid))
620
910
621 def abort(self, taskid):
911 def abort(self, taskid):
622 """Remove a task from the queue if it has not been run already."""
912 """
913 Remove a task from the queue if it has not been run already.
914 """
623 if not isinstance(taskid, int):
915 if not isinstance(taskid, int):
624 return defer.fail(failure.Failure(TypeError("an integer task id expected: %r" % taskid)))
916 return defer.fail(failure.Failure(TypeError("an integer task id expected: %r" % taskid)))
625 try:
917 try:
626 self.scheduler.pop_task(taskid)
918 self.scheduler.pop_task(taskid)
627 except IndexError, e:
919 except IndexError, e:
628 if taskid in self.finishedResults.keys():
920 if taskid in self.finishedResults.keys():
629 d = defer.fail(IndexError("Task Already Completed"))
921 d = defer.fail(IndexError("Task Already Completed"))
630 elif taskid in self.abortPending:
922 elif taskid in self.abortPending:
631 d = defer.fail(IndexError("Task Already Aborted"))
923 d = defer.fail(IndexError("Task Already Aborted"))
632 elif taskid in self._pendingTaskIDs():# task is pending
924 elif taskid in self._pendingTaskIDs():# task is pending
633 self.abortPending.append(taskid)
925 self.abortPending.append(taskid)
634 d = defer.succeed(None)
926 d = defer.succeed(None)
635 else:
927 else:
636 d = defer.fail(e)
928 d = defer.fail(e)
637 else:
929 else:
638 d = defer.execute(self._doAbort, taskid)
930 d = defer.execute(self._doAbort, taskid)
639
931
640 return d
932 return d
641
933
642 def barrier(self, taskids):
934 def barrier(self, taskids):
643 dList = []
935 dList = []
644 if isinstance(taskids, int):
936 if isinstance(taskids, int):
645 taskids = [taskids]
937 taskids = [taskids]
646 for id in taskids:
938 for id in taskids:
647 d = self.get_task_result(id, block=True)
939 d = self.get_task_result(id, block=True)
648 dList.append(d)
940 dList.append(d)
649 d = DeferredList(dList, consumeErrors=1)
941 d = DeferredList(dList, consumeErrors=1)
650 d.addCallbacks(lambda r: None)
942 d.addCallbacks(lambda r: None)
651 return d
943 return d
652
944
653 def spin(self):
945 def spin(self):
654 return defer.succeed(self.distributeTasks())
946 return defer.succeed(self.distributeTasks())
655
947
656 def queue_status(self, verbose=False):
948 def queue_status(self, verbose=False):
657 pending = self._pendingTaskIDs()
949 pending = self._pendingTaskIDs()
658 failed = []
950 failed = []
659 succeeded = []
951 succeeded = []
660 for k,v in self.finishedResults.iteritems():
952 for k,v in self.finishedResults.iteritems():
661 if not isinstance(v, failure.Failure):
953 if not isinstance(v, failure.Failure):
662 if hasattr(v,'failure'):
954 if hasattr(v,'failure'):
663 if v.failure is None:
955 if v.failure is None:
664 succeeded.append(k)
956 succeeded.append(k)
665 else:
957 else:
666 failed.append(k)
958 failed.append(k)
667 scheduled = self.scheduler.taskids
959 scheduled = self.scheduler.taskids
668 if verbose:
960 if verbose:
669 result = dict(pending=pending, failed=failed,
961 result = dict(pending=pending, failed=failed,
670 succeeded=succeeded, scheduled=scheduled)
962 succeeded=succeeded, scheduled=scheduled)
671 else:
963 else:
672 result = dict(pending=len(pending),failed=len(failed),
964 result = dict(pending=len(pending),failed=len(failed),
673 succeeded=len(succeeded),scheduled=len(scheduled))
965 succeeded=len(succeeded),scheduled=len(scheduled))
674 return defer.succeed(result)
966 return defer.succeed(result)
675
967
676 #---------------------------------------------------------------------------
968 #---------------------------------------------------------------------------
677 # Queue methods
969 # Queue methods
678 #---------------------------------------------------------------------------
970 #---------------------------------------------------------------------------
679
971
680 def _doAbort(self, taskid):
972 def _doAbort(self, taskid):
681 """Helper function for aborting a pending task."""
973 """
682 # log.msg("Task aborted: %i" % taskid)
974 Helper function for aborting a pending task.
975 """
976 log.msg("Task aborted: %i" % taskid)
683 result = failure.Failure(error.TaskAborted())
977 result = failure.Failure(error.TaskAborted())
684 self._finishTask(taskid, result)
978 self._finishTask(taskid, result)
685 if taskid in self.abortPending:
979 if taskid in self.abortPending:
686 self.abortPending.remove(taskid)
980 self.abortPending.remove(taskid)
687
981
688 def _finishTask(self, taskid, result):
982 def _finishTask(self, taskid, result):
689 dlist = self.deferredResults.pop(taskid)
983 dlist = self.deferredResults.pop(taskid)
690 result.taskid = taskid # The TaskResult should save the taskid
984 # result.taskid = taskid # The TaskResult should save the taskid
691 self.finishedResults[taskid] = result
985 self.finishedResults[taskid] = result
692 for d in dlist:
986 for d in dlist:
693 d.callback(result)
987 d.callback(result)
694
988
695 def distributeTasks(self):
989 def distributeTasks(self):
696 """Distribute tasks while self.scheduler has things to do."""
990 """
697 # log.msg("distributing Tasks")
991 Distribute tasks while self.scheduler has things to do.
992 """
993 log.msg("distributing Tasks")
698 worker, task = self.scheduler.schedule()
994 worker, task = self.scheduler.schedule()
699 if not worker and not task:
995 if not worker and not task:
700 if self.idleLater and self.idleLater.called:# we are inside failIdle
996 if self.idleLater and self.idleLater.called:# we are inside failIdle
701 self.idleLater = None
997 self.idleLater = None
702 else:
998 else:
703 self.checkIdle()
999 self.checkIdle()
704 return False
1000 return False
705 # else something to do:
1001 # else something to do:
706 while worker and task:
1002 while worker and task:
707 # get worker and task
1003 # get worker and task
708 # add to pending
1004 # add to pending
709 self.pendingTasks[worker.workerid] = task
1005 self.pendingTasks[worker.workerid] = task
710 # run/link callbacks
1006 # run/link callbacks
711 d = worker.run(task)
1007 d = worker.run(task)
712 # log.msg("Running task %i on worker %i" %(task.taskid, worker.workerid))
1008 log.msg("Running task %i on worker %i" %(task.taskid, worker.workerid))
713 d.addBoth(self.taskCompleted, task.taskid, worker.workerid)
1009 d.addBoth(self.taskCompleted, task.taskid, worker.workerid)
714 worker, task = self.scheduler.schedule()
1010 worker, task = self.scheduler.schedule()
715 # check for idle timeout:
1011 # check for idle timeout:
716 self.checkIdle()
1012 self.checkIdle()
717 return True
1013 return True
718
1014
719 def checkIdle(self):
1015 def checkIdle(self):
720 if self.idleLater and not self.idleLater.called:
1016 if self.idleLater and not self.idleLater.called:
721 self.idleLater.cancel()
1017 self.idleLater.cancel()
722 if self.scheduler.ntasks and self.workers and \
1018 if self.scheduler.ntasks and self.workers and \
723 self.scheduler.nworkers == len(self.workers):
1019 self.scheduler.nworkers == len(self.workers):
724 self.idleLater = reactor.callLater(self.timeout, self.failIdle)
1020 self.idleLater = reactor.callLater(self.timeout, self.failIdle)
725 else:
1021 else:
726 self.idleLater = None
1022 self.idleLater = None
727
1023
728 def failIdle(self):
1024 def failIdle(self):
729 if not self.distributeTasks():
1025 if not self.distributeTasks():
730 while self.scheduler.ntasks:
1026 while self.scheduler.ntasks:
731 t = self.scheduler.pop_task()
1027 t = self.scheduler.pop_task()
732 msg = "task %i failed to execute due to unmet dependencies"%t.taskid
1028 msg = "task %i failed to execute due to unmet dependencies"%t.taskid
733 msg += " for %i seconds"%self.timeout
1029 msg += " for %i seconds"%self.timeout
734 # log.msg("Task aborted by timeout: %i" % t.taskid)
1030 log.msg("Task aborted by timeout: %i" % t.taskid)
735 f = failure.Failure(error.TaskTimeout(msg))
1031 f = failure.Failure(error.TaskTimeout(msg))
736 self._finishTask(t.taskid, f)
1032 self._finishTask(t.taskid, f)
737 self.idleLater = None
1033 self.idleLater = None
738
1034
739
1035
740 def taskCompleted(self, result, taskid, workerid):
1036 def taskCompleted(self, success_and_result, taskid, workerid):
741 """This is the err/callback for a completed task."""
1037 """This is the err/callback for a completed task."""
1038 success, result = success_and_result
742 try:
1039 try:
743 task = self.pendingTasks.pop(workerid)
1040 task = self.pendingTasks.pop(workerid)
744 except:
1041 except:
745 # this should not happen
1042 # this should not happen
746 log.msg("Tried to pop bad pending task %i from worker %i"%(taskid, workerid))
1043 log.msg("Tried to pop bad pending task %i from worker %i"%(taskid, workerid))
747 log.msg("Result: %r"%result)
1044 log.msg("Result: %r"%result)
748 log.msg("Pending tasks: %s"%self.pendingTasks)
1045 log.msg("Pending tasks: %s"%self.pendingTasks)
749 return
1046 return
750
1047
751 # Check if aborted while pending
1048 # Check if aborted while pending
752 aborted = False
1049 aborted = False
753 if taskid in self.abortPending:
1050 if taskid in self.abortPending:
754 self._doAbort(taskid)
1051 self._doAbort(taskid)
755 aborted = True
1052 aborted = True
756
1053
757 if not aborted:
1054 if not aborted:
758 if result.failure is not None and isinstance(result.failure, failure.Failure): # we failed
1055 if not success:
759 log.msg("Task %i failed on worker %i"% (taskid, workerid))
1056 log.msg("Task %i failed on worker %i"% (taskid, workerid))
760 if task.retries > 0: # resubmit
1057 if task.retries > 0: # resubmit
761 task.retries -= 1
1058 task.retries -= 1
762 self.scheduler.add_task(task)
1059 self.scheduler.add_task(task)
763 s = "Resubmitting task %i, %i retries remaining" %(taskid, task.retries)
1060 s = "Resubmitting task %i, %i retries remaining" %(taskid, task.retries)
764 log.msg(s)
1061 log.msg(s)
765 self.distributeTasks()
1062 self.distributeTasks()
766 elif isinstance(task.recovery_task, Task) and \
1063 elif isinstance(task.recovery_task, BaseTask) and \
767 task.recovery_task.retries > -1:
1064 task.recovery_task.retries > -1:
768 # retries = -1 is to prevent infinite recovery_task loop
1065 # retries = -1 is to prevent infinite recovery_task loop
769 task.retries = -1
1066 task.retries = -1
770 task.recovery_task.taskid = taskid
1067 task.recovery_task.taskid = taskid
771 task = task.recovery_task
1068 task = task.recovery_task
772 self.scheduler.add_task(task)
1069 self.scheduler.add_task(task)
773 s = "Recovering task %i, %i retries remaining" %(taskid, task.retries)
1070 s = "Recovering task %i, %i retries remaining" %(taskid, task.retries)
774 log.msg(s)
1071 log.msg(s)
775 self.distributeTasks()
1072 self.distributeTasks()
776 else: # done trying
1073 else: # done trying
777 self._finishTask(taskid, result)
1074 self._finishTask(taskid, result)
778 # wait a second before readmitting a worker that failed
1075 # wait a second before readmitting a worker that failed
779 # it may have died, and not yet been unregistered
1076 # it may have died, and not yet been unregistered
780 reactor.callLater(self.failurePenalty, self.readmitWorker, workerid)
1077 reactor.callLater(self.failurePenalty, self.readmitWorker, workerid)
781 else: # we succeeded
1078 else: # we succeeded
782 # log.msg("Task completed: %i"% taskid)
1079 log.msg("Task completed: %i"% taskid)
783 self._finishTask(taskid, result)
1080 self._finishTask(taskid, result)
784 self.readmitWorker(workerid)
1081 self.readmitWorker(workerid)
785 else:# we aborted the task
1082 else: # we aborted the task
786 if result.failure is not None and isinstance(result.failure, failure.Failure): # it failed, penalize worker
1083 if not success:
787 reactor.callLater(self.failurePenalty, self.readmitWorker, workerid)
1084 reactor.callLater(self.failurePenalty, self.readmitWorker, workerid)
788 else:
1085 else:
789 self.readmitWorker(workerid)
1086 self.readmitWorker(workerid)
790
1087
791 def readmitWorker(self, workerid):
1088 def readmitWorker(self, workerid):
792 """Readmit a worker to the scheduler.
1089 """
1090 Readmit a worker to the scheduler.
793
1091
794 This is outside `taskCompleted` because of the `failurePenalty` being
1092 This is outside `taskCompleted` because of the `failurePenalty` being
795 implemented through `reactor.callLater`.
1093 implemented through `reactor.callLater`.
796 """
1094 """
797
1095
798 if workerid in self.workers.keys() and workerid not in self.pendingTasks.keys():
1096 if workerid in self.workers.keys() and workerid not in self.pendingTasks.keys():
799 self.scheduler.add_worker(self.workers[workerid])
1097 self.scheduler.add_worker(self.workers[workerid])
800 self.distributeTasks()
1098 self.distributeTasks()
1099
1100 def clear(self):
1101 """
1102 Clear all previously run tasks from the task controller.
1103
1104 This is needed because the task controller keep all task results
1105 in memory. This can be a problem is there are many completed
1106 tasks. Users should call this periodically to clean out these
1107 cached task results.
1108 """
1109 self.finishedResults = {}
1110 return defer.succeed(None)
801
1111
802
1112
803 components.registerAdapter(TaskController, cs.IControllerBase, ITaskController)
1113 components.registerAdapter(TaskController, cs.IControllerBase, ITaskController)
@@ -1,161 +1,180 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 # -*- test-case-name: IPython.kernel.tests.test_taskcontrollerxmlrpc -*-
2 # -*- test-case-name: IPython.kernel.tests.test_taskcontrollerxmlrpc -*-
3
3
4 """The Generic Task Client object.
4 """
5
5 A blocking version of the task client.
6 This must be subclassed based on your connection method.
7 """
6 """
8
7
9 __docformat__ = "restructuredtext en"
8 __docformat__ = "restructuredtext en"
10
9
11 #-------------------------------------------------------------------------------
10 #-------------------------------------------------------------------------------
12 # Copyright (C) 2008 The IPython Development Team
11 # Copyright (C) 2008 The IPython Development Team
13 #
12 #
14 # Distributed under the terms of the BSD License. The full license is in
13 # Distributed under the terms of the BSD License. The full license is in
15 # the file COPYING, distributed as part of this software.
14 # the file COPYING, distributed as part of this software.
16 #-------------------------------------------------------------------------------
15 #-------------------------------------------------------------------------------
17
16
18 #-------------------------------------------------------------------------------
17 #-------------------------------------------------------------------------------
19 # Imports
18 # Imports
20 #-------------------------------------------------------------------------------
19 #-------------------------------------------------------------------------------
21
20
22 from zope.interface import Interface, implements
21 from zope.interface import Interface, implements
23 from twisted.python import components, log
22 from twisted.python import components, log
24
23
25 from IPython.kernel.twistedutil import blockingCallFromThread
24 from IPython.kernel.twistedutil import blockingCallFromThread
26 from IPython.kernel import task, error
25 from IPython.kernel import task, error
26 from IPython.kernel.mapper import (
27 SynchronousTaskMapper,
28 ITaskMapperFactory,
29 IMapper
30 )
31 from IPython.kernel.parallelfunction import (
32 ParallelFunction,
33 ITaskParallelDecorator
34 )
27
35
28 #-------------------------------------------------------------------------------
36 #-------------------------------------------------------------------------------
29 # Connecting Task Client
37 # The task client
30 #-------------------------------------------------------------------------------
38 #-------------------------------------------------------------------------------
31
39
32 class InteractiveTaskClient(object):
33
34 def irun(self, *args, **kwargs):
35 """Run a task on the `TaskController`.
36
37 This method is a shorthand for run(task) and its arguments are simply
38 passed onto a `Task` object:
39
40 irun(*args, **kwargs) -> run(Task(*args, **kwargs))
41
42 :Parameters:
43 expression : str
44 A str that is valid python code that is the task.
45 pull : str or list of str
46 The names of objects to be pulled as results.
47 push : dict
48 A dict of objects to be pushed into the engines namespace before
49 execution of the expression.
50 clear_before : boolean
51 Should the engine's namespace be cleared before the task is run.
52 Default=False.
53 clear_after : boolean
54 Should the engine's namespace be cleared after the task is run.
55 Default=False.
56 retries : int
57 The number of times to resumbit the task if it fails. Default=0.
58 options : dict
59 Any other keyword options for more elaborate uses of tasks
60
61 :Returns: A `TaskResult` object.
62 """
63 block = kwargs.pop('block', False)
64 if len(args) == 1 and isinstance(args[0], task.Task):
65 t = args[0]
66 else:
67 t = task.Task(*args, **kwargs)
68 taskid = self.run(t)
69 print "TaskID = %i"%taskid
70 if block:
71 return self.get_task_result(taskid, block)
72 else:
73 return taskid
74
75 class IBlockingTaskClient(Interface):
40 class IBlockingTaskClient(Interface):
76 """
41 """
77 An interface for blocking task clients.
42 A vague interface of the blocking task client
78 """
43 """
79 pass
44 pass
80
45
81
46 class BlockingTaskClient(object):
82 class BlockingTaskClient(InteractiveTaskClient):
83 """
47 """
84 This class provides a blocking task client.
48 A blocking task client that adapts a non-blocking one.
85 """
49 """
86
50
87 implements(IBlockingTaskClient)
51 implements(
52 IBlockingTaskClient,
53 ITaskMapperFactory,
54 IMapper,
55 ITaskParallelDecorator
56 )
88
57
89 def __init__(self, task_controller):
58 def __init__(self, task_controller):
90 self.task_controller = task_controller
59 self.task_controller = task_controller
91 self.block = True
60 self.block = True
92
61
93 def run(self, task):
62 def run(self, task, block=False):
94 """
63 """Run a task on the `TaskController`.
95 Run a task and return a task id that can be used to get the task result.
64
65 See the documentation of the `MapTask` and `StringTask` classes for
66 details on how to build a task of different types.
96
67
97 :Parameters:
68 :Parameters:
98 task : `Task`
69 task : an `ITask` implementer
99 The `Task` object to run
70
71 :Returns: The int taskid of the submitted task. Pass this to
72 `get_task_result` to get the `TaskResult` object.
100 """
73 """
101 return blockingCallFromThread(self.task_controller.run, task)
74 tid = blockingCallFromThread(self.task_controller.run, task)
75 if block:
76 return self.get_task_result(tid, block=True)
77 else:
78 return tid
102
79
103 def get_task_result(self, taskid, block=False):
80 def get_task_result(self, taskid, block=False):
104 """
81 """
105 Get or poll for a task result.
82 Get a task result by taskid.
106
83
107 :Parameters:
84 :Parameters:
108 taskid : int
85 taskid : int
109 The id of the task whose result to get
86 The taskid of the task to be retrieved.
110 block : boolean
87 block : boolean
111 If True, wait until the task is done and then result the
88 Should I block until the task is done?
112 `TaskResult` object. If False, just poll for the result and
89
113 return None if the task is not done.
90 :Returns: A `TaskResult` object that encapsulates the task result.
114 """
91 """
115 return blockingCallFromThread(self.task_controller.get_task_result,
92 return blockingCallFromThread(self.task_controller.get_task_result,
116 taskid, block)
93 taskid, block)
117
94
118 def abort(self, taskid):
95 def abort(self, taskid):
119 """
96 """
120 Abort a task by task id if it has not been started.
97 Abort a task by taskid.
98
99 :Parameters:
100 taskid : int
101 The taskid of the task to be aborted.
121 """
102 """
122 return blockingCallFromThread(self.task_controller.abort, taskid)
103 return blockingCallFromThread(self.task_controller.abort, taskid)
123
104
124 def barrier(self, taskids):
105 def barrier(self, taskids):
125 """
106 """Block until a set of tasks are completed.
126 Wait for a set of tasks to finish.
127
107
128 :Parameters:
108 :Parameters:
129 taskids : list of ints
109 taskids : list, tuple
130 A list of task ids to wait for.
110 A sequence of taskids to block on.
131 """
111 """
132 return blockingCallFromThread(self.task_controller.barrier, taskids)
112 return blockingCallFromThread(self.task_controller.barrier, taskids)
133
113
134 def spin(self):
114 def spin(self):
135 """
115 """
136 Cause the scheduler to schedule tasks.
116 Touch the scheduler, to resume scheduling without submitting a task.
137
117
138 This method only needs to be called in unusual situations where the
118 This method only needs to be called in unusual situations where the
139 scheduler is idle for some reason.
119 scheduler is idle for some reason.
140 """
120 """
141 return blockingCallFromThread(self.task_controller.spin)
121 return blockingCallFromThread(self.task_controller.spin)
142
122
143 def queue_status(self, verbose=False):
123 def queue_status(self, verbose=False):
144 """
124 """
145 Get a dictionary with the current state of the task queue.
125 Get a dictionary with the current state of the task queue.
146
126
147 :Parameters:
127 :Parameters:
148 verbose : boolean
128 verbose : boolean
149 If True, return a list of taskids. If False, simply give
129 If True, return a list of taskids. If False, simply give
150 the number of tasks with each status.
130 the number of tasks with each status.
151
131
152 :Returns:
132 :Returns:
153 A dict with the queue status.
133 A dict with the queue status.
154 """
134 """
155 return blockingCallFromThread(self.task_controller.queue_status, verbose)
135 return blockingCallFromThread(self.task_controller.queue_status, verbose)
136
137 def clear(self):
138 """
139 Clear all previously run tasks from the task controller.
140
141 This is needed because the task controller keep all task results
142 in memory. This can be a problem is there are many completed
143 tasks. Users should call this periodically to clean out these
144 cached task results.
145 """
146 return blockingCallFromThread(self.task_controller.clear)
147
148 def map(self, func, *sequences):
149 """
150 Apply func to *sequences elementwise. Like Python's builtin map.
151
152 This version is load balanced.
153 """
154 return self.mapper().map(func, *sequences)
156
155
156 def mapper(self, clear_before=False, clear_after=False, retries=0,
157 recovery_task=None, depend=None, block=True):
158 """
159 Create an `IMapper` implementer with a given set of arguments.
160
161 The `IMapper` created using a task controller is load balanced.
162
163 See the documentation for `IPython.kernel.task.BaseTask` for
164 documentation on the arguments to this method.
165 """
166 return SynchronousTaskMapper(self, clear_before=clear_before,
167 clear_after=clear_after, retries=retries,
168 recovery_task=recovery_task, depend=depend, block=block)
169
170 def parallel(self, clear_before=False, clear_after=False, retries=0,
171 recovery_task=None, depend=None, block=True):
172 mapper = self.mapper(clear_before, clear_after, retries,
173 recovery_task, depend, block)
174 pf = ParallelFunction(mapper)
175 return pf
157
176
158 components.registerAdapter(BlockingTaskClient,
177 components.registerAdapter(BlockingTaskClient,
159 task.ITaskController, IBlockingTaskClient)
178 task.ITaskController, IBlockingTaskClient)
160
179
161
180
@@ -1,267 +1,329 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 # -*- test-case-name: IPython.kernel.tests.test_taskxmlrpc -*-
2 # -*- test-case-name: IPython.kernel.tests.test_taskxmlrpc -*-
3 """A Foolscap interface to a TaskController.
3 """A Foolscap interface to a TaskController.
4
4
5 This class lets Foolscap clients talk to a TaskController.
5 This class lets Foolscap clients talk to a TaskController.
6 """
6 """
7
7
8 __docformat__ = "restructuredtext en"
8 __docformat__ = "restructuredtext en"
9
9
10 #-------------------------------------------------------------------------------
10 #-------------------------------------------------------------------------------
11 # Copyright (C) 2008 The IPython Development Team
11 # Copyright (C) 2008 The IPython Development Team
12 #
12 #
13 # Distributed under the terms of the BSD License. The full license is in
13 # Distributed under the terms of the BSD License. The full license is in
14 # the file COPYING, distributed as part of this software.
14 # the file COPYING, distributed as part of this software.
15 #-------------------------------------------------------------------------------
15 #-------------------------------------------------------------------------------
16
16
17 #-------------------------------------------------------------------------------
17 #-------------------------------------------------------------------------------
18 # Imports
18 # Imports
19 #-------------------------------------------------------------------------------
19 #-------------------------------------------------------------------------------
20
20
21 import cPickle as pickle
21 import cPickle as pickle
22 import xmlrpclib, copy
22 import xmlrpclib, copy
23
23
24 from zope.interface import Interface, implements
24 from zope.interface import Interface, implements
25 from twisted.internet import defer
25 from twisted.internet import defer
26 from twisted.python import components, failure
26 from twisted.python import components, failure
27
27
28 from foolscap import Referenceable
28 from foolscap import Referenceable
29
29
30 from IPython.kernel.twistedutil import blockingCallFromThread
30 from IPython.kernel.twistedutil import blockingCallFromThread
31 from IPython.kernel import error, task as taskmodule, taskclient
31 from IPython.kernel import error, task as taskmodule, taskclient
32 from IPython.kernel.pickleutil import can, uncan
32 from IPython.kernel.pickleutil import can, uncan
33 from IPython.kernel.clientinterfaces import (
33 from IPython.kernel.clientinterfaces import (
34 IFCClientInterfaceProvider,
34 IFCClientInterfaceProvider,
35 IBlockingClientAdaptor
35 IBlockingClientAdaptor
36 )
36 )
37 from IPython.kernel.mapper import (
38 TaskMapper,
39 ITaskMapperFactory,
40 IMapper
41 )
42 from IPython.kernel.parallelfunction import (
43 ParallelFunction,
44 ITaskParallelDecorator
45 )
37
46
38 #-------------------------------------------------------------------------------
47 #-------------------------------------------------------------------------------
39 # The Controller side of things
48 # The Controller side of things
40 #-------------------------------------------------------------------------------
49 #-------------------------------------------------------------------------------
41
50
42
51
43 class IFCTaskController(Interface):
52 class IFCTaskController(Interface):
44 """Foolscap interface to task controller.
53 """Foolscap interface to task controller.
45
54
46 See the documentation of ITaskController for documentation about the methods.
55 See the documentation of `ITaskController` for more information.
47 """
56 """
48 def remote_run(request, binTask):
57 def remote_run(binTask):
49 """"""
58 """"""
50
59
51 def remote_abort(request, taskid):
60 def remote_abort(taskid):
52 """"""
61 """"""
53
62
54 def remote_get_task_result(request, taskid, block=False):
63 def remote_get_task_result(taskid, block=False):
55 """"""
64 """"""
56
65
57 def remote_barrier(request, taskids):
66 def remote_barrier(taskids):
67 """"""
68
69 def remote_spin():
58 """"""
70 """"""
59
71
60 def remote_spin(request):
72 def remote_queue_status(verbose):
61 """"""
73 """"""
62
74
63 def remote_queue_status(request, verbose):
75 def remote_clear():
64 """"""
76 """"""
65
77
66
78
67 class FCTaskControllerFromTaskController(Referenceable):
79 class FCTaskControllerFromTaskController(Referenceable):
68 """XML-RPC attachmeot for controller.
69
70 See IXMLRPCTaskController and ITaskController (and its children) for documentation.
71 """
80 """
81 Adapt a `TaskController` to an `IFCTaskController`
82
83 This class is used to expose a `TaskController` over the wire using
84 the Foolscap network protocol.
85 """
86
72 implements(IFCTaskController, IFCClientInterfaceProvider)
87 implements(IFCTaskController, IFCClientInterfaceProvider)
73
88
74 def __init__(self, taskController):
89 def __init__(self, taskController):
75 self.taskController = taskController
90 self.taskController = taskController
76
91
77 #---------------------------------------------------------------------------
92 #---------------------------------------------------------------------------
78 # Non interface methods
93 # Non interface methods
79 #---------------------------------------------------------------------------
94 #---------------------------------------------------------------------------
80
95
81 def packageFailure(self, f):
96 def packageFailure(self, f):
82 f.cleanFailure()
97 f.cleanFailure()
83 return self.packageSuccess(f)
98 return self.packageSuccess(f)
84
99
85 def packageSuccess(self, obj):
100 def packageSuccess(self, obj):
86 serial = pickle.dumps(obj, 2)
101 serial = pickle.dumps(obj, 2)
87 return serial
102 return serial
88
103
89 #---------------------------------------------------------------------------
104 #---------------------------------------------------------------------------
90 # ITaskController related methods
105 # ITaskController related methods
91 #---------------------------------------------------------------------------
106 #---------------------------------------------------------------------------
92
107
93 def remote_run(self, ptask):
108 def remote_run(self, ptask):
94 try:
109 try:
95 ctask = pickle.loads(ptask)
110 task = pickle.loads(ptask)
96 task = taskmodule.uncanTask(ctask)
111 task.uncan_task()
97 except:
112 except:
98 d = defer.fail(pickle.UnpickleableError("Could not unmarshal task"))
113 d = defer.fail(pickle.UnpickleableError("Could not unmarshal task"))
99 else:
114 else:
100 d = self.taskController.run(task)
115 d = self.taskController.run(task)
101 d.addCallback(self.packageSuccess)
116 d.addCallback(self.packageSuccess)
102 d.addErrback(self.packageFailure)
117 d.addErrback(self.packageFailure)
103 return d
118 return d
104
119
105 def remote_abort(self, taskid):
120 def remote_abort(self, taskid):
106 d = self.taskController.abort(taskid)
121 d = self.taskController.abort(taskid)
107 d.addCallback(self.packageSuccess)
122 d.addCallback(self.packageSuccess)
108 d.addErrback(self.packageFailure)
123 d.addErrback(self.packageFailure)
109 return d
124 return d
110
125
111 def remote_get_task_result(self, taskid, block=False):
126 def remote_get_task_result(self, taskid, block=False):
112 d = self.taskController.get_task_result(taskid, block)
127 d = self.taskController.get_task_result(taskid, block)
113 d.addCallback(self.packageSuccess)
128 d.addCallback(self.packageSuccess)
114 d.addErrback(self.packageFailure)
129 d.addErrback(self.packageFailure)
115 return d
130 return d
116
131
117 def remote_barrier(self, taskids):
132 def remote_barrier(self, taskids):
118 d = self.taskController.barrier(taskids)
133 d = self.taskController.barrier(taskids)
119 d.addCallback(self.packageSuccess)
134 d.addCallback(self.packageSuccess)
120 d.addErrback(self.packageFailure)
135 d.addErrback(self.packageFailure)
121 return d
136 return d
122
137
123 def remote_spin(self):
138 def remote_spin(self):
124 d = self.taskController.spin()
139 d = self.taskController.spin()
125 d.addCallback(self.packageSuccess)
140 d.addCallback(self.packageSuccess)
126 d.addErrback(self.packageFailure)
141 d.addErrback(self.packageFailure)
127 return d
142 return d
128
143
129 def remote_queue_status(self, verbose):
144 def remote_queue_status(self, verbose):
130 d = self.taskController.queue_status(verbose)
145 d = self.taskController.queue_status(verbose)
131 d.addCallback(self.packageSuccess)
146 d.addCallback(self.packageSuccess)
132 d.addErrback(self.packageFailure)
147 d.addErrback(self.packageFailure)
133 return d
148 return d
134
149
150 def remote_clear(self):
151 return self.taskController.clear()
152
135 def remote_get_client_name(self):
153 def remote_get_client_name(self):
136 return 'IPython.kernel.taskfc.FCTaskClient'
154 return 'IPython.kernel.taskfc.FCTaskClient'
137
155
138 components.registerAdapter(FCTaskControllerFromTaskController,
156 components.registerAdapter(FCTaskControllerFromTaskController,
139 taskmodule.ITaskController, IFCTaskController)
157 taskmodule.ITaskController, IFCTaskController)
140
158
141
159
142 #-------------------------------------------------------------------------------
160 #-------------------------------------------------------------------------------
143 # The Client side of things
161 # The Client side of things
144 #-------------------------------------------------------------------------------
162 #-------------------------------------------------------------------------------
145
163
146 class FCTaskClient(object):
164 class FCTaskClient(object):
147 """XML-RPC based TaskController client that implements ITaskController.
148
149 :Parameters:
150 addr : (ip, port)
151 The ip (str) and port (int) tuple of the `TaskController`.
152 """
165 """
153 implements(taskmodule.ITaskController, IBlockingClientAdaptor)
166 Client class for Foolscap exposed `TaskController`.
167
168 This class is an adapter that makes a `RemoteReference` to a
169 `TaskController` look like an actual `ITaskController` on the client side.
170
171 This class also implements `IBlockingClientAdaptor` so that clients can
172 automatically get a blocking version of this class.
173 """
174
175 implements(
176 taskmodule.ITaskController,
177 IBlockingClientAdaptor,
178 ITaskMapperFactory,
179 IMapper,
180 ITaskParallelDecorator
181 )
154
182
155 def __init__(self, remote_reference):
183 def __init__(self, remote_reference):
156 self.remote_reference = remote_reference
184 self.remote_reference = remote_reference
157
185
158 #---------------------------------------------------------------------------
186 #---------------------------------------------------------------------------
159 # Non interface methods
187 # Non interface methods
160 #---------------------------------------------------------------------------
188 #---------------------------------------------------------------------------
161
189
162 def unpackage(self, r):
190 def unpackage(self, r):
163 return pickle.loads(r)
191 return pickle.loads(r)
164
192
165 #---------------------------------------------------------------------------
193 #---------------------------------------------------------------------------
166 # ITaskController related methods
194 # ITaskController related methods
167 #---------------------------------------------------------------------------
195 #---------------------------------------------------------------------------
168 def run(self, task):
196 def run(self, task):
169 """Run a task on the `TaskController`.
197 """Run a task on the `TaskController`.
170
198
171 :Parameters:
199 See the documentation of the `MapTask` and `StringTask` classes for
172 task : a `Task` object
200 details on how to build a task of different types.
173
174 The Task object is created using the following signature:
175
176 Task(expression, pull=None, push={}, clear_before=False,
177 clear_after=False, retries=0, **options):)
178
201
179 The meaning of the arguments is as follows:
202 :Parameters:
203 task : an `ITask` implementer
180
204
181 :Task Parameters:
182 expression : str
183 A str that is valid python code that is the task.
184 pull : str or list of str
185 The names of objects to be pulled as results.
186 push : dict
187 A dict of objects to be pushed into the engines namespace before
188 execution of the expression.
189 clear_before : boolean
190 Should the engine's namespace be cleared before the task is run.
191 Default=False.
192 clear_after : boolean
193 Should the engine's namespace be cleared after the task is run.
194 Default=False.
195 retries : int
196 The number of times to resumbit the task if it fails. Default=0.
197 options : dict
198 Any other keyword options for more elaborate uses of tasks
199
200 :Returns: The int taskid of the submitted task. Pass this to
205 :Returns: The int taskid of the submitted task. Pass this to
201 `get_task_result` to get the `TaskResult` object.
206 `get_task_result` to get the `TaskResult` object.
202 """
207 """
203 assert isinstance(task, taskmodule.Task), "task must be a Task object!"
208 assert isinstance(task, taskmodule.BaseTask), "task must be a Task object!"
204 ctask = taskmodule.canTask(task) # handles arbitrary function in .depend
209 task.can_task()
205 # as well as arbitrary recovery_task chains
210 ptask = pickle.dumps(task, 2)
206 ptask = pickle.dumps(ctask, 2)
211 task.uncan_task()
207 d = self.remote_reference.callRemote('run', ptask)
212 d = self.remote_reference.callRemote('run', ptask)
208 d.addCallback(self.unpackage)
213 d.addCallback(self.unpackage)
209 return d
214 return d
210
215
211 def get_task_result(self, taskid, block=False):
216 def get_task_result(self, taskid, block=False):
212 """The task result by taskid.
217 """
218 Get a task result by taskid.
213
219
214 :Parameters:
220 :Parameters:
215 taskid : int
221 taskid : int
216 The taskid of the task to be retrieved.
222 The taskid of the task to be retrieved.
217 block : boolean
223 block : boolean
218 Should I block until the task is done?
224 Should I block until the task is done?
219
225
220 :Returns: A `TaskResult` object that encapsulates the task result.
226 :Returns: A `TaskResult` object that encapsulates the task result.
221 """
227 """
222 d = self.remote_reference.callRemote('get_task_result', taskid, block)
228 d = self.remote_reference.callRemote('get_task_result', taskid, block)
223 d.addCallback(self.unpackage)
229 d.addCallback(self.unpackage)
224 return d
230 return d
225
231
226 def abort(self, taskid):
232 def abort(self, taskid):
227 """Abort a task by taskid.
233 """
234 Abort a task by taskid.
228
235
229 :Parameters:
236 :Parameters:
230 taskid : int
237 taskid : int
231 The taskid of the task to be aborted.
238 The taskid of the task to be aborted.
232 block : boolean
233 Should I block until the task is aborted.
234 """
239 """
235 d = self.remote_reference.callRemote('abort', taskid)
240 d = self.remote_reference.callRemote('abort', taskid)
236 d.addCallback(self.unpackage)
241 d.addCallback(self.unpackage)
237 return d
242 return d
238
243
239 def barrier(self, taskids):
244 def barrier(self, taskids):
240 """Block until all tasks are completed.
245 """Block until a set of tasks are completed.
241
246
242 :Parameters:
247 :Parameters:
243 taskids : list, tuple
248 taskids : list, tuple
244 A sequence of taskids to block on.
249 A sequence of taskids to block on.
245 """
250 """
246 d = self.remote_reference.callRemote('barrier', taskids)
251 d = self.remote_reference.callRemote('barrier', taskids)
247 d.addCallback(self.unpackage)
252 d.addCallback(self.unpackage)
248 return d
253 return d
249
254
250 def spin(self):
255 def spin(self):
251 """touch the scheduler, to resume scheduling without submitting
256 """
252 a task.
257 Touch the scheduler, to resume scheduling without submitting a task.
258
259 This method only needs to be called in unusual situations where the
260 scheduler is idle for some reason.
253 """
261 """
254 d = self.remote_reference.callRemote('spin')
262 d = self.remote_reference.callRemote('spin')
255 d.addCallback(self.unpackage)
263 d.addCallback(self.unpackage)
256 return d
264 return d
257
265
258 def queue_status(self, verbose=False):
266 def queue_status(self, verbose=False):
259 """Return a dict with the status of the task queue."""
267 """
268 Get a dictionary with the current state of the task queue.
269
270 :Parameters:
271 verbose : boolean
272 If True, return a list of taskids. If False, simply give
273 the number of tasks with each status.
274
275 :Returns:
276 A dict with the queue status.
277 """
260 d = self.remote_reference.callRemote('queue_status', verbose)
278 d = self.remote_reference.callRemote('queue_status', verbose)
261 d.addCallback(self.unpackage)
279 d.addCallback(self.unpackage)
262 return d
280 return d
263
281
282 def clear(self):
283 """
284 Clear all previously run tasks from the task controller.
285
286 This is needed because the task controller keep all task results
287 in memory. This can be a problem is there are many completed
288 tasks. Users should call this periodically to clean out these
289 cached task results.
290 """
291 d = self.remote_reference.callRemote('clear')
292 return d
293
264 def adapt_to_blocking_client(self):
294 def adapt_to_blocking_client(self):
295 """
296 Wrap self in a blocking version that implements `IBlockingTaskClient.
297 """
265 from IPython.kernel.taskclient import IBlockingTaskClient
298 from IPython.kernel.taskclient import IBlockingTaskClient
266 return IBlockingTaskClient(self)
299 return IBlockingTaskClient(self)
300
301 def map(self, func, *sequences):
302 """
303 Apply func to *sequences elementwise. Like Python's builtin map.
304
305 This version is load balanced.
306 """
307 return self.mapper().map(func, *sequences)
308
309 def mapper(self, clear_before=False, clear_after=False, retries=0,
310 recovery_task=None, depend=None, block=True):
311 """
312 Create an `IMapper` implementer with a given set of arguments.
313
314 The `IMapper` created using a task controller is load balanced.
315
316 See the documentation for `IPython.kernel.task.BaseTask` for
317 documentation on the arguments to this method.
318 """
319 return TaskMapper(self, clear_before=clear_before,
320 clear_after=clear_after, retries=retries,
321 recovery_task=recovery_task, depend=depend, block=block)
322
323 def parallel(self, clear_before=False, clear_after=False, retries=0,
324 recovery_task=None, depend=None, block=True):
325 mapper = self.mapper(clear_before, clear_after, retries,
326 recovery_task, depend, block)
327 pf = ParallelFunction(mapper)
328 return pf
267
329
@@ -1,373 +1,372 b''
1 # encoding: utf-8
1 # encoding: utf-8
2
2
3 """Test template for complete engine object"""
3 """Test template for complete engine object"""
4
4
5 __docformat__ = "restructuredtext en"
5 __docformat__ = "restructuredtext en"
6
6
7 #-------------------------------------------------------------------------------
7 #-------------------------------------------------------------------------------
8 # Copyright (C) 2008 The IPython Development Team
8 # Copyright (C) 2008 The IPython Development Team
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #-------------------------------------------------------------------------------
12 #-------------------------------------------------------------------------------
13
13
14 #-------------------------------------------------------------------------------
14 #-------------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #-------------------------------------------------------------------------------
16 #-------------------------------------------------------------------------------
17
17
18 import cPickle as pickle
18 import cPickle as pickle
19
19
20 from twisted.internet import defer, reactor
20 from twisted.internet import defer, reactor
21 from twisted.python import failure
21 from twisted.python import failure
22 from twisted.application import service
22 from twisted.application import service
23 import zope.interface as zi
23 import zope.interface as zi
24
24
25 from IPython.kernel import newserialized
25 from IPython.kernel import newserialized
26 from IPython.kernel import error
26 from IPython.kernel import error
27 from IPython.kernel.pickleutil import can, uncan
27 from IPython.kernel.pickleutil import can, uncan
28 import IPython.kernel.engineservice as es
28 import IPython.kernel.engineservice as es
29 from IPython.kernel.core.interpreter import Interpreter
29 from IPython.kernel.core.interpreter import Interpreter
30 from IPython.testing.parametric import Parametric, parametric
30 from IPython.testing.parametric import Parametric, parametric
31
31
32 #-------------------------------------------------------------------------------
32 #-------------------------------------------------------------------------------
33 # Tests
33 # Tests
34 #-------------------------------------------------------------------------------
34 #-------------------------------------------------------------------------------
35
35
36
36
37 # A sequence of valid commands run through execute
37 # A sequence of valid commands run through execute
38 validCommands = ['a=5',
38 validCommands = ['a=5',
39 'b=10',
39 'b=10',
40 'a=5; b=10; c=a+b',
40 'a=5; b=10; c=a+b',
41 'import math; 2.0*math.pi',
41 'import math; 2.0*math.pi',
42 """def f():
42 """def f():
43 result = 0.0
43 result = 0.0
44 for i in range(10):
44 for i in range(10):
45 result += i
45 result += i
46 """,
46 """,
47 'if 1<2: a=5',
47 'if 1<2: a=5',
48 """import time
48 """import time
49 time.sleep(0.1)""",
49 time.sleep(0.1)""",
50 """from math import cos;
50 """from math import cos;
51 x = 1.0*cos(0.5)""", # Semicolons lead to Discard ast nodes that should be discarded
51 x = 1.0*cos(0.5)""", # Semicolons lead to Discard ast nodes that should be discarded
52 """from sets import Set
52 """from sets import Set
53 s = Set()
53 s = Set()
54 """, # Trailing whitespace should be allowed.
54 """, # Trailing whitespace should be allowed.
55 """import math
55 """import math
56 math.cos(1.0)""", # Test a method call with a discarded return value
56 math.cos(1.0)""", # Test a method call with a discarded return value
57 """x=1.0234
57 """x=1.0234
58 a=5; b=10""", # Test an embedded semicolon
58 a=5; b=10""", # Test an embedded semicolon
59 """x=1.0234
59 """x=1.0234
60 a=5; b=10;""" # Test both an embedded and trailing semicolon
60 a=5; b=10;""" # Test both an embedded and trailing semicolon
61 ]
61 ]
62
62
63 # A sequence of commands that raise various exceptions
63 # A sequence of commands that raise various exceptions
64 invalidCommands = [('a=1/0',ZeroDivisionError),
64 invalidCommands = [('a=1/0',ZeroDivisionError),
65 ('print v',NameError),
65 ('print v',NameError),
66 ('l=[];l[0]',IndexError),
66 ('l=[];l[0]',IndexError),
67 ("d={};d['a']",KeyError),
67 ("d={};d['a']",KeyError),
68 ("assert 1==0",AssertionError),
68 ("assert 1==0",AssertionError),
69 ("import abababsdbfsbaljasdlja",ImportError),
69 ("import abababsdbfsbaljasdlja",ImportError),
70 ("raise Exception()",Exception)]
70 ("raise Exception()",Exception)]
71
71
72 def testf(x):
72 def testf(x):
73 return 2.0*x
73 return 2.0*x
74
74
75 globala = 99
75 globala = 99
76
76
77 def testg(x):
77 def testg(x):
78 return globala*x
78 return globala*x
79
79
80 class IEngineCoreTestCase(object):
80 class IEngineCoreTestCase(object):
81 """Test an IEngineCore implementer."""
81 """Test an IEngineCore implementer."""
82
82
83 def createShell(self):
83 def createShell(self):
84 return Interpreter()
84 return Interpreter()
85
85
86 def catchQueueCleared(self, f):
86 def catchQueueCleared(self, f):
87 try:
87 try:
88 f.raiseException()
88 f.raiseException()
89 except error.QueueCleared:
89 except error.QueueCleared:
90 pass
90 pass
91
91
92 def testIEngineCoreInterface(self):
92 def testIEngineCoreInterface(self):
93 """Does self.engine claim to implement IEngineCore?"""
93 """Does self.engine claim to implement IEngineCore?"""
94 self.assert_(es.IEngineCore.providedBy(self.engine))
94 self.assert_(es.IEngineCore.providedBy(self.engine))
95
95
96 def testIEngineCoreInterfaceMethods(self):
96 def testIEngineCoreInterfaceMethods(self):
97 """Does self.engine have the methods and attributes in IEngineCore."""
97 """Does self.engine have the methods and attributes in IEngineCore."""
98 for m in list(es.IEngineCore):
98 for m in list(es.IEngineCore):
99 self.assert_(hasattr(self.engine, m))
99 self.assert_(hasattr(self.engine, m))
100
100
101 def testIEngineCoreDeferreds(self):
101 def testIEngineCoreDeferreds(self):
102 d = self.engine.execute('a=5')
102 d = self.engine.execute('a=5')
103 d.addCallback(lambda _: self.engine.pull('a'))
103 d.addCallback(lambda _: self.engine.pull('a'))
104 d.addCallback(lambda _: self.engine.get_result())
104 d.addCallback(lambda _: self.engine.get_result())
105 d.addCallback(lambda _: self.engine.keys())
105 d.addCallback(lambda _: self.engine.keys())
106 d.addCallback(lambda _: self.engine.push(dict(a=10)))
106 d.addCallback(lambda _: self.engine.push(dict(a=10)))
107 return d
107 return d
108
108
109 def runTestExecute(self, cmd):
109 def runTestExecute(self, cmd):
110 self.shell = Interpreter()
110 self.shell = Interpreter()
111 actual = self.shell.execute(cmd)
111 actual = self.shell.execute(cmd)
112 def compare(computed):
112 def compare(computed):
113 actual['id'] = computed['id']
113 actual['id'] = computed['id']
114 self.assertEquals(actual, computed)
114 self.assertEquals(actual, computed)
115 d = self.engine.execute(cmd)
115 d = self.engine.execute(cmd)
116 d.addCallback(compare)
116 d.addCallback(compare)
117 return d
117 return d
118
118
119 @parametric
119 @parametric
120 def testExecute(cls):
120 def testExecute(cls):
121 return [(cls.runTestExecute, cmd) for cmd in validCommands]
121 return [(cls.runTestExecute, cmd) for cmd in validCommands]
122
122
123 def runTestExecuteFailures(self, cmd, exc):
123 def runTestExecuteFailures(self, cmd, exc):
124 def compare(f):
124 def compare(f):
125 self.assertRaises(exc, f.raiseException)
125 self.assertRaises(exc, f.raiseException)
126 d = self.engine.execute(cmd)
126 d = self.engine.execute(cmd)
127 d.addErrback(compare)
127 d.addErrback(compare)
128 return d
128 return d
129
129
130 @parametric
130 @parametric
131 def testExecuteFailures(cls):
131 def testExecuteFailures(cls):
132 return [(cls.runTestExecuteFailures, cmd, exc) for cmd, exc in invalidCommands]
132 return [(cls.runTestExecuteFailures, cmd, exc) for cmd, exc in invalidCommands]
133
133
134 def runTestPushPull(self, o):
134 def runTestPushPull(self, o):
135 d = self.engine.push(dict(a=o))
135 d = self.engine.push(dict(a=o))
136 d.addCallback(lambda r: self.engine.pull('a'))
136 d.addCallback(lambda r: self.engine.pull('a'))
137 d.addCallback(lambda r: self.assertEquals(o,r))
137 d.addCallback(lambda r: self.assertEquals(o,r))
138 return d
138 return d
139
139
140 @parametric
140 @parametric
141 def testPushPull(cls):
141 def testPushPull(cls):
142 objs = [10,"hi there",1.2342354,{"p":(1,2)},None]
142 objs = [10,"hi there",1.2342354,{"p":(1,2)},None]
143 return [(cls.runTestPushPull, o) for o in objs]
143 return [(cls.runTestPushPull, o) for o in objs]
144
144
145 def testPullNameError(self):
145 def testPullNameError(self):
146 d = self.engine.push(dict(a=5))
146 d = self.engine.push(dict(a=5))
147 d.addCallback(lambda _:self.engine.reset())
147 d.addCallback(lambda _:self.engine.reset())
148 d.addCallback(lambda _: self.engine.pull("a"))
148 d.addCallback(lambda _: self.engine.pull("a"))
149 d.addErrback(lambda f: self.assertRaises(NameError, f.raiseException))
149 d.addErrback(lambda f: self.assertRaises(NameError, f.raiseException))
150 return d
150 return d
151
151
152 def testPushPullFailures(self):
152 def testPushPullFailures(self):
153 d = self.engine.pull('a')
153 d = self.engine.pull('a')
154 d.addErrback(lambda f: self.assertRaises(NameError, f.raiseException))
154 d.addErrback(lambda f: self.assertRaises(NameError, f.raiseException))
155 d.addCallback(lambda _: self.engine.execute('l = lambda x: x'))
155 d.addCallback(lambda _: self.engine.execute('l = lambda x: x'))
156 d.addCallback(lambda _: self.engine.pull('l'))
156 d.addCallback(lambda _: self.engine.pull('l'))
157 d.addErrback(lambda f: self.assertRaises(pickle.PicklingError, f.raiseException))
157 d.addErrback(lambda f: self.assertRaises(pickle.PicklingError, f.raiseException))
158 d.addCallback(lambda _: self.engine.push(dict(l=lambda x: x)))
158 d.addCallback(lambda _: self.engine.push(dict(l=lambda x: x)))
159 d.addErrback(lambda f: self.assertRaises(pickle.PicklingError, f.raiseException))
159 d.addErrback(lambda f: self.assertRaises(pickle.PicklingError, f.raiseException))
160 return d
160 return d
161
161
162 def testPushPullArray(self):
162 def testPushPullArray(self):
163 try:
163 try:
164 import numpy
164 import numpy
165 except:
165 except:
166 print 'no numpy, ',
167 return
166 return
168 a = numpy.random.random(1000)
167 a = numpy.random.random(1000)
169 d = self.engine.push(dict(a=a))
168 d = self.engine.push(dict(a=a))
170 d.addCallback(lambda _: self.engine.pull('a'))
169 d.addCallback(lambda _: self.engine.pull('a'))
171 d.addCallback(lambda b: b==a)
170 d.addCallback(lambda b: b==a)
172 d.addCallback(lambda c: c.all())
171 d.addCallback(lambda c: c.all())
173 return self.assertDeferredEquals(d, True)
172 return self.assertDeferredEquals(d, True)
174
173
175 def testPushFunction(self):
174 def testPushFunction(self):
176
175
177 d = self.engine.push_function(dict(f=testf))
176 d = self.engine.push_function(dict(f=testf))
178 d.addCallback(lambda _: self.engine.execute('result = f(10)'))
177 d.addCallback(lambda _: self.engine.execute('result = f(10)'))
179 d.addCallback(lambda _: self.engine.pull('result'))
178 d.addCallback(lambda _: self.engine.pull('result'))
180 d.addCallback(lambda r: self.assertEquals(r, testf(10)))
179 d.addCallback(lambda r: self.assertEquals(r, testf(10)))
181 return d
180 return d
182
181
183 def testPullFunction(self):
182 def testPullFunction(self):
184 d = self.engine.push_function(dict(f=testf, g=testg))
183 d = self.engine.push_function(dict(f=testf, g=testg))
185 d.addCallback(lambda _: self.engine.pull_function(('f','g')))
184 d.addCallback(lambda _: self.engine.pull_function(('f','g')))
186 d.addCallback(lambda r: self.assertEquals(r[0](10), testf(10)))
185 d.addCallback(lambda r: self.assertEquals(r[0](10), testf(10)))
187 return d
186 return d
188
187
189 def testPushFunctionGlobal(self):
188 def testPushFunctionGlobal(self):
190 """Make sure that pushed functions pick up the user's namespace for globals."""
189 """Make sure that pushed functions pick up the user's namespace for globals."""
191 d = self.engine.push(dict(globala=globala))
190 d = self.engine.push(dict(globala=globala))
192 d.addCallback(lambda _: self.engine.push_function(dict(g=testg)))
191 d.addCallback(lambda _: self.engine.push_function(dict(g=testg)))
193 d.addCallback(lambda _: self.engine.execute('result = g(10)'))
192 d.addCallback(lambda _: self.engine.execute('result = g(10)'))
194 d.addCallback(lambda _: self.engine.pull('result'))
193 d.addCallback(lambda _: self.engine.pull('result'))
195 d.addCallback(lambda r: self.assertEquals(r, testg(10)))
194 d.addCallback(lambda r: self.assertEquals(r, testg(10)))
196 return d
195 return d
197
196
198 def testGetResultFailure(self):
197 def testGetResultFailure(self):
199 d = self.engine.get_result(None)
198 d = self.engine.get_result(None)
200 d.addErrback(lambda f: self.assertRaises(IndexError, f.raiseException))
199 d.addErrback(lambda f: self.assertRaises(IndexError, f.raiseException))
201 d.addCallback(lambda _: self.engine.get_result(10))
200 d.addCallback(lambda _: self.engine.get_result(10))
202 d.addErrback(lambda f: self.assertRaises(IndexError, f.raiseException))
201 d.addErrback(lambda f: self.assertRaises(IndexError, f.raiseException))
203 return d
202 return d
204
203
205 def runTestGetResult(self, cmd):
204 def runTestGetResult(self, cmd):
206 self.shell = Interpreter()
205 self.shell = Interpreter()
207 actual = self.shell.execute(cmd)
206 actual = self.shell.execute(cmd)
208 def compare(computed):
207 def compare(computed):
209 actual['id'] = computed['id']
208 actual['id'] = computed['id']
210 self.assertEquals(actual, computed)
209 self.assertEquals(actual, computed)
211 d = self.engine.execute(cmd)
210 d = self.engine.execute(cmd)
212 d.addCallback(lambda r: self.engine.get_result(r['number']))
211 d.addCallback(lambda r: self.engine.get_result(r['number']))
213 d.addCallback(compare)
212 d.addCallback(compare)
214 return d
213 return d
215
214
216 @parametric
215 @parametric
217 def testGetResult(cls):
216 def testGetResult(cls):
218 return [(cls.runTestGetResult, cmd) for cmd in validCommands]
217 return [(cls.runTestGetResult, cmd) for cmd in validCommands]
219
218
220 def testGetResultDefault(self):
219 def testGetResultDefault(self):
221 cmd = 'a=5'
220 cmd = 'a=5'
222 shell = self.createShell()
221 shell = self.createShell()
223 shellResult = shell.execute(cmd)
222 shellResult = shell.execute(cmd)
224 def popit(dikt, key):
223 def popit(dikt, key):
225 dikt.pop(key)
224 dikt.pop(key)
226 return dikt
225 return dikt
227 d = self.engine.execute(cmd)
226 d = self.engine.execute(cmd)
228 d.addCallback(lambda _: self.engine.get_result())
227 d.addCallback(lambda _: self.engine.get_result())
229 d.addCallback(lambda r: self.assertEquals(shellResult, popit(r,'id')))
228 d.addCallback(lambda r: self.assertEquals(shellResult, popit(r,'id')))
230 return d
229 return d
231
230
232 def testKeys(self):
231 def testKeys(self):
233 d = self.engine.keys()
232 d = self.engine.keys()
234 d.addCallback(lambda s: isinstance(s, list))
233 d.addCallback(lambda s: isinstance(s, list))
235 d.addCallback(lambda r: self.assertEquals(r, True))
234 d.addCallback(lambda r: self.assertEquals(r, True))
236 return d
235 return d
237
236
238 Parametric(IEngineCoreTestCase)
237 Parametric(IEngineCoreTestCase)
239
238
240 class IEngineSerializedTestCase(object):
239 class IEngineSerializedTestCase(object):
241 """Test an IEngineCore implementer."""
240 """Test an IEngineCore implementer."""
242
241
243 def testIEngineSerializedInterface(self):
242 def testIEngineSerializedInterface(self):
244 """Does self.engine claim to implement IEngineCore?"""
243 """Does self.engine claim to implement IEngineCore?"""
245 self.assert_(es.IEngineSerialized.providedBy(self.engine))
244 self.assert_(es.IEngineSerialized.providedBy(self.engine))
246
245
247 def testIEngineSerializedInterfaceMethods(self):
246 def testIEngineSerializedInterfaceMethods(self):
248 """Does self.engine have the methods and attributes in IEngireCore."""
247 """Does self.engine have the methods and attributes in IEngireCore."""
249 for m in list(es.IEngineSerialized):
248 for m in list(es.IEngineSerialized):
250 self.assert_(hasattr(self.engine, m))
249 self.assert_(hasattr(self.engine, m))
251
250
252 def testIEngineSerializedDeferreds(self):
251 def testIEngineSerializedDeferreds(self):
253 dList = []
252 dList = []
254 d = self.engine.push_serialized(dict(key=newserialized.serialize(12345)))
253 d = self.engine.push_serialized(dict(key=newserialized.serialize(12345)))
255 self.assert_(isinstance(d, defer.Deferred))
254 self.assert_(isinstance(d, defer.Deferred))
256 dList.append(d)
255 dList.append(d)
257 d = self.engine.pull_serialized('key')
256 d = self.engine.pull_serialized('key')
258 self.assert_(isinstance(d, defer.Deferred))
257 self.assert_(isinstance(d, defer.Deferred))
259 dList.append(d)
258 dList.append(d)
260 D = defer.DeferredList(dList)
259 D = defer.DeferredList(dList)
261 return D
260 return D
262
261
263 def testPushPullSerialized(self):
262 def testPushPullSerialized(self):
264 objs = [10,"hi there",1.2342354,{"p":(1,2)}]
263 objs = [10,"hi there",1.2342354,{"p":(1,2)}]
265 d = defer.succeed(None)
264 d = defer.succeed(None)
266 for o in objs:
265 for o in objs:
267 self.engine.push_serialized(dict(key=newserialized.serialize(o)))
266 self.engine.push_serialized(dict(key=newserialized.serialize(o)))
268 value = self.engine.pull_serialized('key')
267 value = self.engine.pull_serialized('key')
269 value.addCallback(lambda serial: newserialized.IUnSerialized(serial).getObject())
268 value.addCallback(lambda serial: newserialized.IUnSerialized(serial).getObject())
270 d = self.assertDeferredEquals(value,o,d)
269 d = self.assertDeferredEquals(value,o,d)
271 return d
270 return d
272
271
273 def testPullSerializedFailures(self):
272 def testPullSerializedFailures(self):
274 d = self.engine.pull_serialized('a')
273 d = self.engine.pull_serialized('a')
275 d.addErrback(lambda f: self.assertRaises(NameError, f.raiseException))
274 d.addErrback(lambda f: self.assertRaises(NameError, f.raiseException))
276 d.addCallback(lambda _: self.engine.execute('l = lambda x: x'))
275 d.addCallback(lambda _: self.engine.execute('l = lambda x: x'))
277 d.addCallback(lambda _: self.engine.pull_serialized('l'))
276 d.addCallback(lambda _: self.engine.pull_serialized('l'))
278 d.addErrback(lambda f: self.assertRaises(pickle.PicklingError, f.raiseException))
277 d.addErrback(lambda f: self.assertRaises(pickle.PicklingError, f.raiseException))
279 return d
278 return d
280
279
281 Parametric(IEngineSerializedTestCase)
280 Parametric(IEngineSerializedTestCase)
282
281
283 class IEngineQueuedTestCase(object):
282 class IEngineQueuedTestCase(object):
284 """Test an IEngineQueued implementer."""
283 """Test an IEngineQueued implementer."""
285
284
286 def testIEngineQueuedInterface(self):
285 def testIEngineQueuedInterface(self):
287 """Does self.engine claim to implement IEngineQueued?"""
286 """Does self.engine claim to implement IEngineQueued?"""
288 self.assert_(es.IEngineQueued.providedBy(self.engine))
287 self.assert_(es.IEngineQueued.providedBy(self.engine))
289
288
290 def testIEngineQueuedInterfaceMethods(self):
289 def testIEngineQueuedInterfaceMethods(self):
291 """Does self.engine have the methods and attributes in IEngireQueued."""
290 """Does self.engine have the methods and attributes in IEngireQueued."""
292 for m in list(es.IEngineQueued):
291 for m in list(es.IEngineQueued):
293 self.assert_(hasattr(self.engine, m))
292 self.assert_(hasattr(self.engine, m))
294
293
295 def testIEngineQueuedDeferreds(self):
294 def testIEngineQueuedDeferreds(self):
296 dList = []
295 dList = []
297 d = self.engine.clear_queue()
296 d = self.engine.clear_queue()
298 self.assert_(isinstance(d, defer.Deferred))
297 self.assert_(isinstance(d, defer.Deferred))
299 dList.append(d)
298 dList.append(d)
300 d = self.engine.queue_status()
299 d = self.engine.queue_status()
301 self.assert_(isinstance(d, defer.Deferred))
300 self.assert_(isinstance(d, defer.Deferred))
302 dList.append(d)
301 dList.append(d)
303 D = defer.DeferredList(dList)
302 D = defer.DeferredList(dList)
304 return D
303 return D
305
304
306 def testClearQueue(self):
305 def testClearQueue(self):
307 result = self.engine.clear_queue()
306 result = self.engine.clear_queue()
308 d1 = self.assertDeferredEquals(result, None)
307 d1 = self.assertDeferredEquals(result, None)
309 d1.addCallback(lambda _: self.engine.queue_status())
308 d1.addCallback(lambda _: self.engine.queue_status())
310 d2 = self.assertDeferredEquals(d1, {'queue':[], 'pending':'None'})
309 d2 = self.assertDeferredEquals(d1, {'queue':[], 'pending':'None'})
311 return d2
310 return d2
312
311
313 def testQueueStatus(self):
312 def testQueueStatus(self):
314 result = self.engine.queue_status()
313 result = self.engine.queue_status()
315 result.addCallback(lambda r: 'queue' in r and 'pending' in r)
314 result.addCallback(lambda r: 'queue' in r and 'pending' in r)
316 d = self.assertDeferredEquals(result, True)
315 d = self.assertDeferredEquals(result, True)
317 return d
316 return d
318
317
319 Parametric(IEngineQueuedTestCase)
318 Parametric(IEngineQueuedTestCase)
320
319
321 class IEnginePropertiesTestCase(object):
320 class IEnginePropertiesTestCase(object):
322 """Test an IEngineProperties implementor."""
321 """Test an IEngineProperties implementor."""
323
322
324 def testIEnginePropertiesInterface(self):
323 def testIEnginePropertiesInterface(self):
325 """Does self.engine claim to implement IEngineProperties?"""
324 """Does self.engine claim to implement IEngineProperties?"""
326 self.assert_(es.IEngineProperties.providedBy(self.engine))
325 self.assert_(es.IEngineProperties.providedBy(self.engine))
327
326
328 def testIEnginePropertiesInterfaceMethods(self):
327 def testIEnginePropertiesInterfaceMethods(self):
329 """Does self.engine have the methods and attributes in IEngireProperties."""
328 """Does self.engine have the methods and attributes in IEngireProperties."""
330 for m in list(es.IEngineProperties):
329 for m in list(es.IEngineProperties):
331 self.assert_(hasattr(self.engine, m))
330 self.assert_(hasattr(self.engine, m))
332
331
333 def testGetSetProperties(self):
332 def testGetSetProperties(self):
334 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
333 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
335 d = self.engine.set_properties(dikt)
334 d = self.engine.set_properties(dikt)
336 d.addCallback(lambda r: self.engine.get_properties())
335 d.addCallback(lambda r: self.engine.get_properties())
337 d = self.assertDeferredEquals(d, dikt)
336 d = self.assertDeferredEquals(d, dikt)
338 d.addCallback(lambda r: self.engine.get_properties(('c',)))
337 d.addCallback(lambda r: self.engine.get_properties(('c',)))
339 d = self.assertDeferredEquals(d, {'c': dikt['c']})
338 d = self.assertDeferredEquals(d, {'c': dikt['c']})
340 d.addCallback(lambda r: self.engine.set_properties(dict(c=False)))
339 d.addCallback(lambda r: self.engine.set_properties(dict(c=False)))
341 d.addCallback(lambda r: self.engine.get_properties(('c', 'd')))
340 d.addCallback(lambda r: self.engine.get_properties(('c', 'd')))
342 d = self.assertDeferredEquals(d, dict(c=False, d=None))
341 d = self.assertDeferredEquals(d, dict(c=False, d=None))
343 return d
342 return d
344
343
345 def testClearProperties(self):
344 def testClearProperties(self):
346 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
345 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
347 d = self.engine.set_properties(dikt)
346 d = self.engine.set_properties(dikt)
348 d.addCallback(lambda r: self.engine.clear_properties())
347 d.addCallback(lambda r: self.engine.clear_properties())
349 d.addCallback(lambda r: self.engine.get_properties())
348 d.addCallback(lambda r: self.engine.get_properties())
350 d = self.assertDeferredEquals(d, {})
349 d = self.assertDeferredEquals(d, {})
351 return d
350 return d
352
351
353 def testDelHasProperties(self):
352 def testDelHasProperties(self):
354 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
353 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
355 d = self.engine.set_properties(dikt)
354 d = self.engine.set_properties(dikt)
356 d.addCallback(lambda r: self.engine.del_properties(('b','e')))
355 d.addCallback(lambda r: self.engine.del_properties(('b','e')))
357 d.addCallback(lambda r: self.engine.has_properties(('a','b','c','d','e')))
356 d.addCallback(lambda r: self.engine.has_properties(('a','b','c','d','e')))
358 d = self.assertDeferredEquals(d, [True, False, True, True, False])
357 d = self.assertDeferredEquals(d, [True, False, True, True, False])
359 return d
358 return d
360
359
361 def testStrictDict(self):
360 def testStrictDict(self):
362 s = """from IPython.kernel.engineservice import get_engine
361 s = """from IPython.kernel.engineservice import get_engine
363 p = get_engine(%s).properties"""%self.engine.id
362 p = get_engine(%s).properties"""%self.engine.id
364 d = self.engine.execute(s)
363 d = self.engine.execute(s)
365 d.addCallback(lambda r: self.engine.execute("p['a'] = lambda _:None"))
364 d.addCallback(lambda r: self.engine.execute("p['a'] = lambda _:None"))
366 d = self.assertDeferredRaises(d, error.InvalidProperty)
365 d = self.assertDeferredRaises(d, error.InvalidProperty)
367 d.addCallback(lambda r: self.engine.execute("p['a'] = range(5)"))
366 d.addCallback(lambda r: self.engine.execute("p['a'] = range(5)"))
368 d.addCallback(lambda r: self.engine.execute("p['a'].append(5)"))
367 d.addCallback(lambda r: self.engine.execute("p['a'].append(5)"))
369 d.addCallback(lambda r: self.engine.get_properties('a'))
368 d.addCallback(lambda r: self.engine.get_properties('a'))
370 d = self.assertDeferredEquals(d, dict(a=range(5)))
369 d = self.assertDeferredEquals(d, dict(a=range(5)))
371 return d
370 return d
372
371
373 Parametric(IEnginePropertiesTestCase)
372 Parametric(IEnginePropertiesTestCase)
@@ -1,838 +1,828 b''
1 # encoding: utf-8
1 # encoding: utf-8
2
2
3 """"""
3 """"""
4
4
5 __docformat__ = "restructuredtext en"
5 __docformat__ = "restructuredtext en"
6
6
7 #-------------------------------------------------------------------------------
7 #-------------------------------------------------------------------------------
8 # Copyright (C) 2008 The IPython Development Team
8 # Copyright (C) 2008 The IPython Development Team
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #-------------------------------------------------------------------------------
12 #-------------------------------------------------------------------------------
13
13
14 #-------------------------------------------------------------------------------
14 #-------------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #-------------------------------------------------------------------------------
16 #-------------------------------------------------------------------------------
17
17
18 from twisted.internet import defer
18 from twisted.internet import defer
19
19
20 from IPython.kernel import engineservice as es
20 from IPython.kernel import engineservice as es
21 from IPython.kernel import multiengine as me
21 from IPython.kernel import multiengine as me
22 from IPython.kernel import newserialized
22 from IPython.kernel import newserialized
23 from IPython.kernel.error import NotDefined
23 from IPython.kernel.error import NotDefined
24 from IPython.testing import util
24 from IPython.testing import util
25 from IPython.testing.parametric import parametric, Parametric
25 from IPython.testing.parametric import parametric, Parametric
26 from IPython.kernel import newserialized
26 from IPython.kernel import newserialized
27 from IPython.kernel.util import printer
27 from IPython.kernel.util import printer
28 from IPython.kernel.error import (InvalidEngineID,
28 from IPython.kernel.error import (InvalidEngineID,
29 NoEnginesRegistered,
29 NoEnginesRegistered,
30 CompositeError,
30 CompositeError,
31 InvalidDeferredID)
31 InvalidDeferredID)
32 from IPython.kernel.tests.engineservicetest import validCommands, invalidCommands
32 from IPython.kernel.tests.engineservicetest import validCommands, invalidCommands
33 from IPython.kernel.core.interpreter import Interpreter
33 from IPython.kernel.core.interpreter import Interpreter
34
34
35
35
36 #-------------------------------------------------------------------------------
36 #-------------------------------------------------------------------------------
37 # Base classes and utilities
37 # Base classes and utilities
38 #-------------------------------------------------------------------------------
38 #-------------------------------------------------------------------------------
39
39
40 class IMultiEngineBaseTestCase(object):
40 class IMultiEngineBaseTestCase(object):
41 """Basic utilities for working with multiengine tests.
41 """Basic utilities for working with multiengine tests.
42
42
43 Some subclass should define:
43 Some subclass should define:
44
44
45 * self.multiengine
45 * self.multiengine
46 * self.engines to keep track of engines for clean up"""
46 * self.engines to keep track of engines for clean up"""
47
47
48 def createShell(self):
48 def createShell(self):
49 return Interpreter()
49 return Interpreter()
50
50
51 def addEngine(self, n=1):
51 def addEngine(self, n=1):
52 for i in range(n):
52 for i in range(n):
53 e = es.EngineService()
53 e = es.EngineService()
54 e.startService()
54 e.startService()
55 regDict = self.controller.register_engine(es.QueuedEngine(e), None)
55 regDict = self.controller.register_engine(es.QueuedEngine(e), None)
56 e.id = regDict['id']
56 e.id = regDict['id']
57 self.engines.append(e)
57 self.engines.append(e)
58
58
59
59
60 def testf(x):
60 def testf(x):
61 return 2.0*x
61 return 2.0*x
62
62
63
63
64 globala = 99
64 globala = 99
65
65
66
66
67 def testg(x):
67 def testg(x):
68 return globala*x
68 return globala*x
69
69
70
70
71 def isdid(did):
71 def isdid(did):
72 if not isinstance(did, str):
72 if not isinstance(did, str):
73 return False
73 return False
74 if not len(did)==40:
74 if not len(did)==40:
75 return False
75 return False
76 return True
76 return True
77
77
78
78
79 def _raise_it(f):
79 def _raise_it(f):
80 try:
80 try:
81 f.raiseException()
81 f.raiseException()
82 except CompositeError, e:
82 except CompositeError, e:
83 e.raise_exception()
83 e.raise_exception()
84
84
85 #-------------------------------------------------------------------------------
85 #-------------------------------------------------------------------------------
86 # IMultiEngineTestCase
86 # IMultiEngineTestCase
87 #-------------------------------------------------------------------------------
87 #-------------------------------------------------------------------------------
88
88
89 class IMultiEngineTestCase(IMultiEngineBaseTestCase):
89 class IMultiEngineTestCase(IMultiEngineBaseTestCase):
90 """A test for any object that implements IEngineMultiplexer.
90 """A test for any object that implements IEngineMultiplexer.
91
91
92 self.multiengine must be defined and implement IEngineMultiplexer.
92 self.multiengine must be defined and implement IEngineMultiplexer.
93 """
93 """
94
94
95 def testIMultiEngineInterface(self):
95 def testIMultiEngineInterface(self):
96 """Does self.engine claim to implement IEngineCore?"""
96 """Does self.engine claim to implement IEngineCore?"""
97 self.assert_(me.IEngineMultiplexer.providedBy(self.multiengine))
97 self.assert_(me.IEngineMultiplexer.providedBy(self.multiengine))
98 self.assert_(me.IMultiEngine.providedBy(self.multiengine))
98 self.assert_(me.IMultiEngine.providedBy(self.multiengine))
99
99
100 def testIEngineMultiplexerInterfaceMethods(self):
100 def testIEngineMultiplexerInterfaceMethods(self):
101 """Does self.engine have the methods and attributes in IEngineCore."""
101 """Does self.engine have the methods and attributes in IEngineCore."""
102 for m in list(me.IEngineMultiplexer):
102 for m in list(me.IEngineMultiplexer):
103 self.assert_(hasattr(self.multiengine, m))
103 self.assert_(hasattr(self.multiengine, m))
104
104
105 def testIEngineMultiplexerDeferreds(self):
105 def testIEngineMultiplexerDeferreds(self):
106 self.addEngine(1)
106 self.addEngine(1)
107 d= self.multiengine.execute('a=5', targets=0)
107 d= self.multiengine.execute('a=5', targets=0)
108 d.addCallback(lambda _: self.multiengine.push(dict(a=5),targets=0))
108 d.addCallback(lambda _: self.multiengine.push(dict(a=5),targets=0))
109 d.addCallback(lambda _: self.multiengine.push(dict(a=5, b='asdf', c=[1,2,3]),targets=0))
109 d.addCallback(lambda _: self.multiengine.push(dict(a=5, b='asdf', c=[1,2,3]),targets=0))
110 d.addCallback(lambda _: self.multiengine.pull(('a','b','c'),targets=0))
110 d.addCallback(lambda _: self.multiengine.pull(('a','b','c'),targets=0))
111 d.addCallback(lambda _: self.multiengine.get_result(targets=0))
111 d.addCallback(lambda _: self.multiengine.get_result(targets=0))
112 d.addCallback(lambda _: self.multiengine.reset(targets=0))
112 d.addCallback(lambda _: self.multiengine.reset(targets=0))
113 d.addCallback(lambda _: self.multiengine.keys(targets=0))
113 d.addCallback(lambda _: self.multiengine.keys(targets=0))
114 d.addCallback(lambda _: self.multiengine.push_serialized(dict(a=newserialized.serialize(10)),targets=0))
114 d.addCallback(lambda _: self.multiengine.push_serialized(dict(a=newserialized.serialize(10)),targets=0))
115 d.addCallback(lambda _: self.multiengine.pull_serialized('a',targets=0))
115 d.addCallback(lambda _: self.multiengine.pull_serialized('a',targets=0))
116 d.addCallback(lambda _: self.multiengine.clear_queue(targets=0))
116 d.addCallback(lambda _: self.multiengine.clear_queue(targets=0))
117 d.addCallback(lambda _: self.multiengine.queue_status(targets=0))
117 d.addCallback(lambda _: self.multiengine.queue_status(targets=0))
118 return d
118 return d
119
119
120 def testInvalidEngineID(self):
120 def testInvalidEngineID(self):
121 self.addEngine(1)
121 self.addEngine(1)
122 badID = 100
122 badID = 100
123 d = self.multiengine.execute('a=5', targets=badID)
123 d = self.multiengine.execute('a=5', targets=badID)
124 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
124 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
125 d.addCallback(lambda _: self.multiengine.push(dict(a=5), targets=badID))
125 d.addCallback(lambda _: self.multiengine.push(dict(a=5), targets=badID))
126 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
126 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
127 d.addCallback(lambda _: self.multiengine.pull('a', targets=badID))
127 d.addCallback(lambda _: self.multiengine.pull('a', targets=badID))
128 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
128 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
129 d.addCallback(lambda _: self.multiengine.reset(targets=badID))
129 d.addCallback(lambda _: self.multiengine.reset(targets=badID))
130 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
130 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
131 d.addCallback(lambda _: self.multiengine.keys(targets=badID))
131 d.addCallback(lambda _: self.multiengine.keys(targets=badID))
132 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
132 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
133 d.addCallback(lambda _: self.multiengine.push_serialized(dict(a=newserialized.serialize(10)), targets=badID))
133 d.addCallback(lambda _: self.multiengine.push_serialized(dict(a=newserialized.serialize(10)), targets=badID))
134 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
134 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
135 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=badID))
135 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=badID))
136 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
136 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
137 d.addCallback(lambda _: self.multiengine.queue_status(targets=badID))
137 d.addCallback(lambda _: self.multiengine.queue_status(targets=badID))
138 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
138 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
139 return d
139 return d
140
140
141 def testNoEnginesRegistered(self):
141 def testNoEnginesRegistered(self):
142 badID = 'all'
142 badID = 'all'
143 d= self.multiengine.execute('a=5', targets=badID)
143 d= self.multiengine.execute('a=5', targets=badID)
144 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
144 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
145 d.addCallback(lambda _: self.multiengine.push(dict(a=5), targets=badID))
145 d.addCallback(lambda _: self.multiengine.push(dict(a=5), targets=badID))
146 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
146 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
147 d.addCallback(lambda _: self.multiengine.pull('a', targets=badID))
147 d.addCallback(lambda _: self.multiengine.pull('a', targets=badID))
148 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
148 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
149 d.addCallback(lambda _: self.multiengine.get_result(targets=badID))
149 d.addCallback(lambda _: self.multiengine.get_result(targets=badID))
150 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
150 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
151 d.addCallback(lambda _: self.multiengine.reset(targets=badID))
151 d.addCallback(lambda _: self.multiengine.reset(targets=badID))
152 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
152 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
153 d.addCallback(lambda _: self.multiengine.keys(targets=badID))
153 d.addCallback(lambda _: self.multiengine.keys(targets=badID))
154 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
154 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
155 d.addCallback(lambda _: self.multiengine.push_serialized(dict(a=newserialized.serialize(10)), targets=badID))
155 d.addCallback(lambda _: self.multiengine.push_serialized(dict(a=newserialized.serialize(10)), targets=badID))
156 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
156 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
157 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=badID))
157 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=badID))
158 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
158 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
159 d.addCallback(lambda _: self.multiengine.queue_status(targets=badID))
159 d.addCallback(lambda _: self.multiengine.queue_status(targets=badID))
160 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
160 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
161 return d
161 return d
162
162
163 def runExecuteAll(self, d, cmd, shell):
163 def runExecuteAll(self, d, cmd, shell):
164 actual = shell.execute(cmd)
164 actual = shell.execute(cmd)
165 d.addCallback(lambda _: self.multiengine.execute(cmd))
165 d.addCallback(lambda _: self.multiengine.execute(cmd))
166 def compare(result):
166 def compare(result):
167 for r in result:
167 for r in result:
168 actual['id'] = r['id']
168 actual['id'] = r['id']
169 self.assertEquals(r, actual)
169 self.assertEquals(r, actual)
170 d.addCallback(compare)
170 d.addCallback(compare)
171
171
172 def testExecuteAll(self):
172 def testExecuteAll(self):
173 self.addEngine(4)
173 self.addEngine(4)
174 d= defer.Deferred()
174 d= defer.Deferred()
175 shell = Interpreter()
175 shell = Interpreter()
176 for cmd in validCommands:
176 for cmd in validCommands:
177 self.runExecuteAll(d, cmd, shell)
177 self.runExecuteAll(d, cmd, shell)
178 d.callback(None)
178 d.callback(None)
179 return d
179 return d
180
180
181 # The following two methods show how to do parametrized
181 # The following two methods show how to do parametrized
182 # tests. This is really slick! Same is used above.
182 # tests. This is really slick! Same is used above.
183 def runExecuteFailures(self, cmd, exc):
183 def runExecuteFailures(self, cmd, exc):
184 self.addEngine(4)
184 self.addEngine(4)
185 d= self.multiengine.execute(cmd)
185 d= self.multiengine.execute(cmd)
186 d.addErrback(lambda f: self.assertRaises(exc, _raise_it, f))
186 d.addErrback(lambda f: self.assertRaises(exc, _raise_it, f))
187 return d
187 return d
188
188
189 @parametric
189 @parametric
190 def testExecuteFailures(cls):
190 def testExecuteFailures(cls):
191 return [(cls.runExecuteFailures,cmd,exc) for
191 return [(cls.runExecuteFailures,cmd,exc) for
192 cmd,exc in invalidCommands]
192 cmd,exc in invalidCommands]
193
193
194 def testPushPull(self):
194 def testPushPull(self):
195 self.addEngine(1)
195 self.addEngine(1)
196 objs = [10,"hi there",1.2342354,{"p":(1,2)}]
196 objs = [10,"hi there",1.2342354,{"p":(1,2)}]
197 d= self.multiengine.push(dict(key=objs[0]), targets=0)
197 d= self.multiengine.push(dict(key=objs[0]), targets=0)
198 d.addCallback(lambda _: self.multiengine.pull('key', targets=0))
198 d.addCallback(lambda _: self.multiengine.pull('key', targets=0))
199 d.addCallback(lambda r: self.assertEquals(r, [objs[0]]))
199 d.addCallback(lambda r: self.assertEquals(r, [objs[0]]))
200 d.addCallback(lambda _: self.multiengine.push(dict(key=objs[1]), targets=0))
200 d.addCallback(lambda _: self.multiengine.push(dict(key=objs[1]), targets=0))
201 d.addCallback(lambda _: self.multiengine.pull('key', targets=0))
201 d.addCallback(lambda _: self.multiengine.pull('key', targets=0))
202 d.addCallback(lambda r: self.assertEquals(r, [objs[1]]))
202 d.addCallback(lambda r: self.assertEquals(r, [objs[1]]))
203 d.addCallback(lambda _: self.multiengine.push(dict(key=objs[2]), targets=0))
203 d.addCallback(lambda _: self.multiengine.push(dict(key=objs[2]), targets=0))
204 d.addCallback(lambda _: self.multiengine.pull('key', targets=0))
204 d.addCallback(lambda _: self.multiengine.pull('key', targets=0))
205 d.addCallback(lambda r: self.assertEquals(r, [objs[2]]))
205 d.addCallback(lambda r: self.assertEquals(r, [objs[2]]))
206 d.addCallback(lambda _: self.multiengine.push(dict(key=objs[3]), targets=0))
206 d.addCallback(lambda _: self.multiengine.push(dict(key=objs[3]), targets=0))
207 d.addCallback(lambda _: self.multiengine.pull('key', targets=0))
207 d.addCallback(lambda _: self.multiengine.pull('key', targets=0))
208 d.addCallback(lambda r: self.assertEquals(r, [objs[3]]))
208 d.addCallback(lambda r: self.assertEquals(r, [objs[3]]))
209 d.addCallback(lambda _: self.multiengine.reset(targets=0))
209 d.addCallback(lambda _: self.multiengine.reset(targets=0))
210 d.addCallback(lambda _: self.multiengine.pull('a', targets=0))
210 d.addCallback(lambda _: self.multiengine.pull('a', targets=0))
211 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
211 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
212 d.addCallback(lambda _: self.multiengine.push(dict(a=10,b=20)))
212 d.addCallback(lambda _: self.multiengine.push(dict(a=10,b=20)))
213 d.addCallback(lambda _: self.multiengine.pull(('a','b')))
213 d.addCallback(lambda _: self.multiengine.pull(('a','b')))
214 d.addCallback(lambda r: self.assertEquals(r, [[10,20]]))
214 d.addCallback(lambda r: self.assertEquals(r, [[10,20]]))
215 return d
215 return d
216
216
217 def testPushPullAll(self):
217 def testPushPullAll(self):
218 self.addEngine(4)
218 self.addEngine(4)
219 d= self.multiengine.push(dict(a=10))
219 d= self.multiengine.push(dict(a=10))
220 d.addCallback(lambda _: self.multiengine.pull('a'))
220 d.addCallback(lambda _: self.multiengine.pull('a'))
221 d.addCallback(lambda r: self.assert_(r==[10,10,10,10]))
221 d.addCallback(lambda r: self.assert_(r==[10,10,10,10]))
222 d.addCallback(lambda _: self.multiengine.push(dict(a=10, b=20)))
222 d.addCallback(lambda _: self.multiengine.push(dict(a=10, b=20)))
223 d.addCallback(lambda _: self.multiengine.pull(('a','b')))
223 d.addCallback(lambda _: self.multiengine.pull(('a','b')))
224 d.addCallback(lambda r: self.assert_(r==4*[[10,20]]))
224 d.addCallback(lambda r: self.assert_(r==4*[[10,20]]))
225 d.addCallback(lambda _: self.multiengine.push(dict(a=10, b=20), targets=0))
225 d.addCallback(lambda _: self.multiengine.push(dict(a=10, b=20), targets=0))
226 d.addCallback(lambda _: self.multiengine.pull(('a','b'), targets=0))
226 d.addCallback(lambda _: self.multiengine.pull(('a','b'), targets=0))
227 d.addCallback(lambda r: self.assert_(r==[[10,20]]))
227 d.addCallback(lambda r: self.assert_(r==[[10,20]]))
228 d.addCallback(lambda _: self.multiengine.push(dict(a=None, b=None), targets=0))
228 d.addCallback(lambda _: self.multiengine.push(dict(a=None, b=None), targets=0))
229 d.addCallback(lambda _: self.multiengine.pull(('a','b'), targets=0))
229 d.addCallback(lambda _: self.multiengine.pull(('a','b'), targets=0))
230 d.addCallback(lambda r: self.assert_(r==[[None,None]]))
230 d.addCallback(lambda r: self.assert_(r==[[None,None]]))
231 return d
231 return d
232
232
233 def testPushPullSerialized(self):
233 def testPushPullSerialized(self):
234 self.addEngine(1)
234 self.addEngine(1)
235 objs = [10,"hi there",1.2342354,{"p":(1,2)}]
235 objs = [10,"hi there",1.2342354,{"p":(1,2)}]
236 d= self.multiengine.push_serialized(dict(key=newserialized.serialize(objs[0])), targets=0)
236 d= self.multiengine.push_serialized(dict(key=newserialized.serialize(objs[0])), targets=0)
237 d.addCallback(lambda _: self.multiengine.pull_serialized('key', targets=0))
237 d.addCallback(lambda _: self.multiengine.pull_serialized('key', targets=0))
238 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
238 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
239 d.addCallback(lambda r: self.assertEquals(r, objs[0]))
239 d.addCallback(lambda r: self.assertEquals(r, objs[0]))
240 d.addCallback(lambda _: self.multiengine.push_serialized(dict(key=newserialized.serialize(objs[1])), targets=0))
240 d.addCallback(lambda _: self.multiengine.push_serialized(dict(key=newserialized.serialize(objs[1])), targets=0))
241 d.addCallback(lambda _: self.multiengine.pull_serialized('key', targets=0))
241 d.addCallback(lambda _: self.multiengine.pull_serialized('key', targets=0))
242 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
242 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
243 d.addCallback(lambda r: self.assertEquals(r, objs[1]))
243 d.addCallback(lambda r: self.assertEquals(r, objs[1]))
244 d.addCallback(lambda _: self.multiengine.push_serialized(dict(key=newserialized.serialize(objs[2])), targets=0))
244 d.addCallback(lambda _: self.multiengine.push_serialized(dict(key=newserialized.serialize(objs[2])), targets=0))
245 d.addCallback(lambda _: self.multiengine.pull_serialized('key', targets=0))
245 d.addCallback(lambda _: self.multiengine.pull_serialized('key', targets=0))
246 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
246 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
247 d.addCallback(lambda r: self.assertEquals(r, objs[2]))
247 d.addCallback(lambda r: self.assertEquals(r, objs[2]))
248 d.addCallback(lambda _: self.multiengine.push_serialized(dict(key=newserialized.serialize(objs[3])), targets=0))
248 d.addCallback(lambda _: self.multiengine.push_serialized(dict(key=newserialized.serialize(objs[3])), targets=0))
249 d.addCallback(lambda _: self.multiengine.pull_serialized('key', targets=0))
249 d.addCallback(lambda _: self.multiengine.pull_serialized('key', targets=0))
250 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
250 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
251 d.addCallback(lambda r: self.assertEquals(r, objs[3]))
251 d.addCallback(lambda r: self.assertEquals(r, objs[3]))
252 d.addCallback(lambda _: self.multiengine.push(dict(a=10,b=range(5)), targets=0))
252 d.addCallback(lambda _: self.multiengine.push(dict(a=10,b=range(5)), targets=0))
253 d.addCallback(lambda _: self.multiengine.pull_serialized(('a','b'), targets=0))
253 d.addCallback(lambda _: self.multiengine.pull_serialized(('a','b'), targets=0))
254 d.addCallback(lambda serial: [newserialized.IUnSerialized(s).getObject() for s in serial[0]])
254 d.addCallback(lambda serial: [newserialized.IUnSerialized(s).getObject() for s in serial[0]])
255 d.addCallback(lambda r: self.assertEquals(r, [10, range(5)]))
255 d.addCallback(lambda r: self.assertEquals(r, [10, range(5)]))
256 d.addCallback(lambda _: self.multiengine.reset(targets=0))
256 d.addCallback(lambda _: self.multiengine.reset(targets=0))
257 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0))
257 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0))
258 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
258 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
259 return d
259 return d
260
260
261 objs = [10,"hi there",1.2342354,{"p":(1,2)}]
261 objs = [10,"hi there",1.2342354,{"p":(1,2)}]
262 d= defer.succeed(None)
262 d= defer.succeed(None)
263 for o in objs:
263 for o in objs:
264 self.multiengine.push_serialized(0, key=newserialized.serialize(o))
264 self.multiengine.push_serialized(0, key=newserialized.serialize(o))
265 value = self.multiengine.pull_serialized(0, 'key')
265 value = self.multiengine.pull_serialized(0, 'key')
266 value.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
266 value.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
267 d = self.assertDeferredEquals(value,o,d)
267 d = self.assertDeferredEquals(value,o,d)
268 return d
268 return d
269
269
270 def runGetResultAll(self, d, cmd, shell):
270 def runGetResultAll(self, d, cmd, shell):
271 actual = shell.execute(cmd)
271 actual = shell.execute(cmd)
272 d.addCallback(lambda _: self.multiengine.execute(cmd))
272 d.addCallback(lambda _: self.multiengine.execute(cmd))
273 d.addCallback(lambda _: self.multiengine.get_result())
273 d.addCallback(lambda _: self.multiengine.get_result())
274 def compare(result):
274 def compare(result):
275 for r in result:
275 for r in result:
276 actual['id'] = r['id']
276 actual['id'] = r['id']
277 self.assertEquals(r, actual)
277 self.assertEquals(r, actual)
278 d.addCallback(compare)
278 d.addCallback(compare)
279
279
280 def testGetResultAll(self):
280 def testGetResultAll(self):
281 self.addEngine(4)
281 self.addEngine(4)
282 d= defer.Deferred()
282 d= defer.Deferred()
283 shell = Interpreter()
283 shell = Interpreter()
284 for cmd in validCommands:
284 for cmd in validCommands:
285 self.runGetResultAll(d, cmd, shell)
285 self.runGetResultAll(d, cmd, shell)
286 d.callback(None)
286 d.callback(None)
287 return d
287 return d
288
288
289 def testGetResultDefault(self):
289 def testGetResultDefault(self):
290 self.addEngine(1)
290 self.addEngine(1)
291 target = 0
291 target = 0
292 cmd = 'a=5'
292 cmd = 'a=5'
293 shell = self.createShell()
293 shell = self.createShell()
294 shellResult = shell.execute(cmd)
294 shellResult = shell.execute(cmd)
295 def popit(dikt, key):
295 def popit(dikt, key):
296 dikt.pop(key)
296 dikt.pop(key)
297 return dikt
297 return dikt
298 d= self.multiengine.execute(cmd, targets=target)
298 d= self.multiengine.execute(cmd, targets=target)
299 d.addCallback(lambda _: self.multiengine.get_result(targets=target))
299 d.addCallback(lambda _: self.multiengine.get_result(targets=target))
300 d.addCallback(lambda r: self.assertEquals(shellResult, popit(r[0],'id')))
300 d.addCallback(lambda r: self.assertEquals(shellResult, popit(r[0],'id')))
301 return d
301 return d
302
302
303 def testGetResultFailure(self):
303 def testGetResultFailure(self):
304 self.addEngine(1)
304 self.addEngine(1)
305 d= self.multiengine.get_result(None, targets=0)
305 d= self.multiengine.get_result(None, targets=0)
306 d.addErrback(lambda f: self.assertRaises(IndexError, _raise_it, f))
306 d.addErrback(lambda f: self.assertRaises(IndexError, _raise_it, f))
307 d.addCallback(lambda _: self.multiengine.get_result(10, targets=0))
307 d.addCallback(lambda _: self.multiengine.get_result(10, targets=0))
308 d.addErrback(lambda f: self.assertRaises(IndexError, _raise_it, f))
308 d.addErrback(lambda f: self.assertRaises(IndexError, _raise_it, f))
309 return d
309 return d
310
310
311 def testPushFunction(self):
311 def testPushFunction(self):
312 self.addEngine(1)
312 self.addEngine(1)
313 d= self.multiengine.push_function(dict(f=testf), targets=0)
313 d= self.multiengine.push_function(dict(f=testf), targets=0)
314 d.addCallback(lambda _: self.multiengine.execute('result = f(10)', targets=0))
314 d.addCallback(lambda _: self.multiengine.execute('result = f(10)', targets=0))
315 d.addCallback(lambda _: self.multiengine.pull('result', targets=0))
315 d.addCallback(lambda _: self.multiengine.pull('result', targets=0))
316 d.addCallback(lambda r: self.assertEquals(r[0], testf(10)))
316 d.addCallback(lambda r: self.assertEquals(r[0], testf(10)))
317 d.addCallback(lambda _: self.multiengine.push(dict(globala=globala), targets=0))
317 d.addCallback(lambda _: self.multiengine.push(dict(globala=globala), targets=0))
318 d.addCallback(lambda _: self.multiengine.push_function(dict(g=testg), targets=0))
318 d.addCallback(lambda _: self.multiengine.push_function(dict(g=testg), targets=0))
319 d.addCallback(lambda _: self.multiengine.execute('result = g(10)', targets=0))
319 d.addCallback(lambda _: self.multiengine.execute('result = g(10)', targets=0))
320 d.addCallback(lambda _: self.multiengine.pull('result', targets=0))
320 d.addCallback(lambda _: self.multiengine.pull('result', targets=0))
321 d.addCallback(lambda r: self.assertEquals(r[0], testg(10)))
321 d.addCallback(lambda r: self.assertEquals(r[0], testg(10)))
322 return d
322 return d
323
323
324 def testPullFunction(self):
324 def testPullFunction(self):
325 self.addEngine(1)
325 self.addEngine(1)
326 d= self.multiengine.push(dict(a=globala), targets=0)
326 d= self.multiengine.push(dict(a=globala), targets=0)
327 d.addCallback(lambda _: self.multiengine.push_function(dict(f=testf), targets=0))
327 d.addCallback(lambda _: self.multiengine.push_function(dict(f=testf), targets=0))
328 d.addCallback(lambda _: self.multiengine.pull_function('f', targets=0))
328 d.addCallback(lambda _: self.multiengine.pull_function('f', targets=0))
329 d.addCallback(lambda r: self.assertEquals(r[0](10), testf(10)))
329 d.addCallback(lambda r: self.assertEquals(r[0](10), testf(10)))
330 d.addCallback(lambda _: self.multiengine.execute("def g(x): return x*x", targets=0))
330 d.addCallback(lambda _: self.multiengine.execute("def g(x): return x*x", targets=0))
331 d.addCallback(lambda _: self.multiengine.pull_function(('f','g'),targets=0))
331 d.addCallback(lambda _: self.multiengine.pull_function(('f','g'),targets=0))
332 d.addCallback(lambda r: self.assertEquals((r[0][0](10),r[0][1](10)), (testf(10), 100)))
332 d.addCallback(lambda r: self.assertEquals((r[0][0](10),r[0][1](10)), (testf(10), 100)))
333 return d
333 return d
334
334
335 def testPushFunctionAll(self):
335 def testPushFunctionAll(self):
336 self.addEngine(4)
336 self.addEngine(4)
337 d= self.multiengine.push_function(dict(f=testf))
337 d= self.multiengine.push_function(dict(f=testf))
338 d.addCallback(lambda _: self.multiengine.execute('result = f(10)'))
338 d.addCallback(lambda _: self.multiengine.execute('result = f(10)'))
339 d.addCallback(lambda _: self.multiengine.pull('result'))
339 d.addCallback(lambda _: self.multiengine.pull('result'))
340 d.addCallback(lambda r: self.assertEquals(r, 4*[testf(10)]))
340 d.addCallback(lambda r: self.assertEquals(r, 4*[testf(10)]))
341 d.addCallback(lambda _: self.multiengine.push(dict(globala=globala)))
341 d.addCallback(lambda _: self.multiengine.push(dict(globala=globala)))
342 d.addCallback(lambda _: self.multiengine.push_function(dict(testg=testg)))
342 d.addCallback(lambda _: self.multiengine.push_function(dict(testg=testg)))
343 d.addCallback(lambda _: self.multiengine.execute('result = testg(10)'))
343 d.addCallback(lambda _: self.multiengine.execute('result = testg(10)'))
344 d.addCallback(lambda _: self.multiengine.pull('result'))
344 d.addCallback(lambda _: self.multiengine.pull('result'))
345 d.addCallback(lambda r: self.assertEquals(r, 4*[testg(10)]))
345 d.addCallback(lambda r: self.assertEquals(r, 4*[testg(10)]))
346 return d
346 return d
347
347
348 def testPullFunctionAll(self):
348 def testPullFunctionAll(self):
349 self.addEngine(4)
349 self.addEngine(4)
350 d= self.multiengine.push_function(dict(f=testf))
350 d= self.multiengine.push_function(dict(f=testf))
351 d.addCallback(lambda _: self.multiengine.pull_function('f'))
351 d.addCallback(lambda _: self.multiengine.pull_function('f'))
352 d.addCallback(lambda r: self.assertEquals([func(10) for func in r], 4*[testf(10)]))
352 d.addCallback(lambda r: self.assertEquals([func(10) for func in r], 4*[testf(10)]))
353 return d
353 return d
354
354
355 def testGetIDs(self):
355 def testGetIDs(self):
356 self.addEngine(1)
356 self.addEngine(1)
357 d= self.multiengine.get_ids()
357 d= self.multiengine.get_ids()
358 d.addCallback(lambda r: self.assertEquals(r, [0]))
358 d.addCallback(lambda r: self.assertEquals(r, [0]))
359 d.addCallback(lambda _: self.addEngine(3))
359 d.addCallback(lambda _: self.addEngine(3))
360 d.addCallback(lambda _: self.multiengine.get_ids())
360 d.addCallback(lambda _: self.multiengine.get_ids())
361 d.addCallback(lambda r: self.assertEquals(r, [0,1,2,3]))
361 d.addCallback(lambda r: self.assertEquals(r, [0,1,2,3]))
362 return d
362 return d
363
363
364 def testClearQueue(self):
364 def testClearQueue(self):
365 self.addEngine(4)
365 self.addEngine(4)
366 d= self.multiengine.clear_queue()
366 d= self.multiengine.clear_queue()
367 d.addCallback(lambda r: self.assertEquals(r,4*[None]))
367 d.addCallback(lambda r: self.assertEquals(r,4*[None]))
368 return d
368 return d
369
369
370 def testQueueStatus(self):
370 def testQueueStatus(self):
371 self.addEngine(4)
371 self.addEngine(4)
372 d= self.multiengine.queue_status(targets=0)
372 d= self.multiengine.queue_status(targets=0)
373 d.addCallback(lambda r: self.assert_(isinstance(r[0],tuple)))
373 d.addCallback(lambda r: self.assert_(isinstance(r[0],tuple)))
374 return d
374 return d
375
375
376 def testGetSetProperties(self):
376 def testGetSetProperties(self):
377 self.addEngine(4)
377 self.addEngine(4)
378 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
378 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
379 d= self.multiengine.set_properties(dikt)
379 d= self.multiengine.set_properties(dikt)
380 d.addCallback(lambda r: self.multiengine.get_properties())
380 d.addCallback(lambda r: self.multiengine.get_properties())
381 d.addCallback(lambda r: self.assertEquals(r, 4*[dikt]))
381 d.addCallback(lambda r: self.assertEquals(r, 4*[dikt]))
382 d.addCallback(lambda r: self.multiengine.get_properties(('c',)))
382 d.addCallback(lambda r: self.multiengine.get_properties(('c',)))
383 d.addCallback(lambda r: self.assertEquals(r, 4*[{'c': dikt['c']}]))
383 d.addCallback(lambda r: self.assertEquals(r, 4*[{'c': dikt['c']}]))
384 d.addCallback(lambda r: self.multiengine.set_properties(dict(c=False)))
384 d.addCallback(lambda r: self.multiengine.set_properties(dict(c=False)))
385 d.addCallback(lambda r: self.multiengine.get_properties(('c', 'd')))
385 d.addCallback(lambda r: self.multiengine.get_properties(('c', 'd')))
386 d.addCallback(lambda r: self.assertEquals(r, 4*[dict(c=False, d=None)]))
386 d.addCallback(lambda r: self.assertEquals(r, 4*[dict(c=False, d=None)]))
387 return d
387 return d
388
388
389 def testClearProperties(self):
389 def testClearProperties(self):
390 self.addEngine(4)
390 self.addEngine(4)
391 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
391 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
392 d= self.multiengine.set_properties(dikt)
392 d= self.multiengine.set_properties(dikt)
393 d.addCallback(lambda r: self.multiengine.clear_properties())
393 d.addCallback(lambda r: self.multiengine.clear_properties())
394 d.addCallback(lambda r: self.multiengine.get_properties())
394 d.addCallback(lambda r: self.multiengine.get_properties())
395 d.addCallback(lambda r: self.assertEquals(r, 4*[{}]))
395 d.addCallback(lambda r: self.assertEquals(r, 4*[{}]))
396 return d
396 return d
397
397
398 def testDelHasProperties(self):
398 def testDelHasProperties(self):
399 self.addEngine(4)
399 self.addEngine(4)
400 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
400 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
401 d= self.multiengine.set_properties(dikt)
401 d= self.multiengine.set_properties(dikt)
402 d.addCallback(lambda r: self.multiengine.del_properties(('b','e')))
402 d.addCallback(lambda r: self.multiengine.del_properties(('b','e')))
403 d.addCallback(lambda r: self.multiengine.has_properties(('a','b','c','d','e')))
403 d.addCallback(lambda r: self.multiengine.has_properties(('a','b','c','d','e')))
404 d.addCallback(lambda r: self.assertEquals(r, 4*[[True, False, True, True, False]]))
404 d.addCallback(lambda r: self.assertEquals(r, 4*[[True, False, True, True, False]]))
405 return d
405 return d
406
406
407 Parametric(IMultiEngineTestCase)
407 Parametric(IMultiEngineTestCase)
408
408
409 #-------------------------------------------------------------------------------
409 #-------------------------------------------------------------------------------
410 # ISynchronousMultiEngineTestCase
410 # ISynchronousMultiEngineTestCase
411 #-------------------------------------------------------------------------------
411 #-------------------------------------------------------------------------------
412
412
413 class ISynchronousMultiEngineTestCase(IMultiEngineBaseTestCase):
413 class ISynchronousMultiEngineTestCase(IMultiEngineBaseTestCase):
414
414
415 def testISynchronousMultiEngineInterface(self):
415 def testISynchronousMultiEngineInterface(self):
416 """Does self.engine claim to implement IEngineCore?"""
416 """Does self.engine claim to implement IEngineCore?"""
417 self.assert_(me.ISynchronousEngineMultiplexer.providedBy(self.multiengine))
417 self.assert_(me.ISynchronousEngineMultiplexer.providedBy(self.multiengine))
418 self.assert_(me.ISynchronousMultiEngine.providedBy(self.multiengine))
418 self.assert_(me.ISynchronousMultiEngine.providedBy(self.multiengine))
419
419
420 def testExecute(self):
420 def testExecute(self):
421 self.addEngine(4)
421 self.addEngine(4)
422 execute = self.multiengine.execute
422 execute = self.multiengine.execute
423 d= execute('a=5', targets=0, block=True)
423 d= execute('a=5', targets=0, block=True)
424 d.addCallback(lambda r: self.assert_(len(r)==1))
424 d.addCallback(lambda r: self.assert_(len(r)==1))
425 d.addCallback(lambda _: execute('b=10'))
425 d.addCallback(lambda _: execute('b=10'))
426 d.addCallback(lambda r: self.assert_(len(r)==4))
426 d.addCallback(lambda r: self.assert_(len(r)==4))
427 d.addCallback(lambda _: execute('c=30', block=False))
427 d.addCallback(lambda _: execute('c=30', block=False))
428 d.addCallback(lambda did: self.assert_(isdid(did)))
428 d.addCallback(lambda did: self.assert_(isdid(did)))
429 d.addCallback(lambda _: execute('d=[0,1,2]', block=False))
429 d.addCallback(lambda _: execute('d=[0,1,2]', block=False))
430 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
430 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
431 d.addCallback(lambda r: self.assert_(len(r)==4))
431 d.addCallback(lambda r: self.assert_(len(r)==4))
432 return d
432 return d
433
433
434 def testPushPull(self):
434 def testPushPull(self):
435 data = dict(a=10, b=1.05, c=range(10), d={'e':(1,2),'f':'hi'})
435 data = dict(a=10, b=1.05, c=range(10), d={'e':(1,2),'f':'hi'})
436 self.addEngine(4)
436 self.addEngine(4)
437 push = self.multiengine.push
437 push = self.multiengine.push
438 pull = self.multiengine.pull
438 pull = self.multiengine.pull
439 d= push({'data':data}, targets=0)
439 d= push({'data':data}, targets=0)
440 d.addCallback(lambda r: pull('data', targets=0))
440 d.addCallback(lambda r: pull('data', targets=0))
441 d.addCallback(lambda r: self.assertEqual(r,[data]))
441 d.addCallback(lambda r: self.assertEqual(r,[data]))
442 d.addCallback(lambda _: push({'data':data}))
442 d.addCallback(lambda _: push({'data':data}))
443 d.addCallback(lambda r: pull('data'))
443 d.addCallback(lambda r: pull('data'))
444 d.addCallback(lambda r: self.assertEqual(r,4*[data]))
444 d.addCallback(lambda r: self.assertEqual(r,4*[data]))
445 d.addCallback(lambda _: push({'data':data}, block=False))
445 d.addCallback(lambda _: push({'data':data}, block=False))
446 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
446 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
447 d.addCallback(lambda _: pull('data', block=False))
447 d.addCallback(lambda _: pull('data', block=False))
448 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
448 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
449 d.addCallback(lambda r: self.assertEqual(r,4*[data]))
449 d.addCallback(lambda r: self.assertEqual(r,4*[data]))
450 d.addCallback(lambda _: push(dict(a=10,b=20)))
450 d.addCallback(lambda _: push(dict(a=10,b=20)))
451 d.addCallback(lambda _: pull(('a','b')))
451 d.addCallback(lambda _: pull(('a','b')))
452 d.addCallback(lambda r: self.assertEquals(r, 4*[[10,20]]))
452 d.addCallback(lambda r: self.assertEquals(r, 4*[[10,20]]))
453 return d
453 return d
454
454
455 def testPushPullFunction(self):
455 def testPushPullFunction(self):
456 self.addEngine(4)
456 self.addEngine(4)
457 pushf = self.multiengine.push_function
457 pushf = self.multiengine.push_function
458 pullf = self.multiengine.pull_function
458 pullf = self.multiengine.pull_function
459 push = self.multiengine.push
459 push = self.multiengine.push
460 pull = self.multiengine.pull
460 pull = self.multiengine.pull
461 execute = self.multiengine.execute
461 execute = self.multiengine.execute
462 d= pushf({'testf':testf}, targets=0)
462 d= pushf({'testf':testf}, targets=0)
463 d.addCallback(lambda r: pullf('testf', targets=0))
463 d.addCallback(lambda r: pullf('testf', targets=0))
464 d.addCallback(lambda r: self.assertEqual(r[0](1.0), testf(1.0)))
464 d.addCallback(lambda r: self.assertEqual(r[0](1.0), testf(1.0)))
465 d.addCallback(lambda _: execute('r = testf(10)', targets=0))
465 d.addCallback(lambda _: execute('r = testf(10)', targets=0))
466 d.addCallback(lambda _: pull('r', targets=0))
466 d.addCallback(lambda _: pull('r', targets=0))
467 d.addCallback(lambda r: self.assertEquals(r[0], testf(10)))
467 d.addCallback(lambda r: self.assertEquals(r[0], testf(10)))
468 d.addCallback(lambda _: pushf({'testf':testf}, block=False))
468 d.addCallback(lambda _: pushf({'testf':testf}, block=False))
469 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
469 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
470 d.addCallback(lambda _: pullf('testf', block=False))
470 d.addCallback(lambda _: pullf('testf', block=False))
471 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
471 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
472 d.addCallback(lambda r: self.assertEqual(r[0](1.0), testf(1.0)))
472 d.addCallback(lambda r: self.assertEqual(r[0](1.0), testf(1.0)))
473 d.addCallback(lambda _: execute("def g(x): return x*x", targets=0))
473 d.addCallback(lambda _: execute("def g(x): return x*x", targets=0))
474 d.addCallback(lambda _: pullf(('testf','g'),targets=0))
474 d.addCallback(lambda _: pullf(('testf','g'),targets=0))
475 d.addCallback(lambda r: self.assertEquals((r[0][0](10),r[0][1](10)), (testf(10), 100)))
475 d.addCallback(lambda r: self.assertEquals((r[0][0](10),r[0][1](10)), (testf(10), 100)))
476 return d
476 return d
477
477
478 def testGetResult(self):
478 def testGetResult(self):
479 shell = Interpreter()
479 shell = Interpreter()
480 result1 = shell.execute('a=10')
480 result1 = shell.execute('a=10')
481 result1['id'] = 0
481 result1['id'] = 0
482 result2 = shell.execute('b=20')
482 result2 = shell.execute('b=20')
483 result2['id'] = 0
483 result2['id'] = 0
484 execute= self.multiengine.execute
484 execute= self.multiengine.execute
485 get_result = self.multiengine.get_result
485 get_result = self.multiengine.get_result
486 self.addEngine(1)
486 self.addEngine(1)
487 d= execute('a=10')
487 d= execute('a=10')
488 d.addCallback(lambda _: get_result())
488 d.addCallback(lambda _: get_result())
489 d.addCallback(lambda r: self.assertEquals(r[0], result1))
489 d.addCallback(lambda r: self.assertEquals(r[0], result1))
490 d.addCallback(lambda _: execute('b=20'))
490 d.addCallback(lambda _: execute('b=20'))
491 d.addCallback(lambda _: get_result(1))
491 d.addCallback(lambda _: get_result(1))
492 d.addCallback(lambda r: self.assertEquals(r[0], result1))
492 d.addCallback(lambda r: self.assertEquals(r[0], result1))
493 d.addCallback(lambda _: get_result(2, block=False))
493 d.addCallback(lambda _: get_result(2, block=False))
494 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
494 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
495 d.addCallback(lambda r: self.assertEquals(r[0], result2))
495 d.addCallback(lambda r: self.assertEquals(r[0], result2))
496 return d
496 return d
497
497
498 def testResetAndKeys(self):
498 def testResetAndKeys(self):
499 self.addEngine(1)
499 self.addEngine(1)
500
500
501 #Blocking mode
501 #Blocking mode
502 d= self.multiengine.push(dict(a=10, b=20, c=range(10)), targets=0)
502 d= self.multiengine.push(dict(a=10, b=20, c=range(10)), targets=0)
503 d.addCallback(lambda _: self.multiengine.keys(targets=0))
503 d.addCallback(lambda _: self.multiengine.keys(targets=0))
504 def keys_found(keys):
504 def keys_found(keys):
505 self.assert_('a' in keys[0])
505 self.assert_('a' in keys[0])
506 self.assert_('b' in keys[0])
506 self.assert_('b' in keys[0])
507 self.assert_('b' in keys[0])
507 self.assert_('b' in keys[0])
508 d.addCallback(keys_found)
508 d.addCallback(keys_found)
509 d.addCallback(lambda _: self.multiengine.reset(targets=0))
509 d.addCallback(lambda _: self.multiengine.reset(targets=0))
510 d.addCallback(lambda _: self.multiengine.keys(targets=0))
510 d.addCallback(lambda _: self.multiengine.keys(targets=0))
511 def keys_not_found(keys):
511 def keys_not_found(keys):
512 self.assert_('a' not in keys[0])
512 self.assert_('a' not in keys[0])
513 self.assert_('b' not in keys[0])
513 self.assert_('b' not in keys[0])
514 self.assert_('b' not in keys[0])
514 self.assert_('b' not in keys[0])
515 d.addCallback(keys_not_found)
515 d.addCallback(keys_not_found)
516
516
517 #Non-blocking mode
517 #Non-blocking mode
518 d.addCallback(lambda _: self.multiengine.push(dict(a=10, b=20, c=range(10)), targets=0))
518 d.addCallback(lambda _: self.multiengine.push(dict(a=10, b=20, c=range(10)), targets=0))
519 d.addCallback(lambda _: self.multiengine.keys(targets=0, block=False))
519 d.addCallback(lambda _: self.multiengine.keys(targets=0, block=False))
520 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
520 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
521 def keys_found(keys):
521 def keys_found(keys):
522 self.assert_('a' in keys[0])
522 self.assert_('a' in keys[0])
523 self.assert_('b' in keys[0])
523 self.assert_('b' in keys[0])
524 self.assert_('b' in keys[0])
524 self.assert_('b' in keys[0])
525 d.addCallback(keys_found)
525 d.addCallback(keys_found)
526 d.addCallback(lambda _: self.multiengine.reset(targets=0, block=False))
526 d.addCallback(lambda _: self.multiengine.reset(targets=0, block=False))
527 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
527 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
528 d.addCallback(lambda _: self.multiengine.keys(targets=0, block=False))
528 d.addCallback(lambda _: self.multiengine.keys(targets=0, block=False))
529 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
529 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
530 def keys_not_found(keys):
530 def keys_not_found(keys):
531 self.assert_('a' not in keys[0])
531 self.assert_('a' not in keys[0])
532 self.assert_('b' not in keys[0])
532 self.assert_('b' not in keys[0])
533 self.assert_('b' not in keys[0])
533 self.assert_('b' not in keys[0])
534 d.addCallback(keys_not_found)
534 d.addCallback(keys_not_found)
535
535
536 return d
536 return d
537
537
538 def testPushPullSerialized(self):
538 def testPushPullSerialized(self):
539 self.addEngine(1)
539 self.addEngine(1)
540 dikt = dict(a=10,b='hi there',c=1.2345,d={'p':(1,2)})
540 dikt = dict(a=10,b='hi there',c=1.2345,d={'p':(1,2)})
541 sdikt = {}
541 sdikt = {}
542 for k,v in dikt.iteritems():
542 for k,v in dikt.iteritems():
543 sdikt[k] = newserialized.serialize(v)
543 sdikt[k] = newserialized.serialize(v)
544 d= self.multiengine.push_serialized(dict(a=sdikt['a']), targets=0)
544 d= self.multiengine.push_serialized(dict(a=sdikt['a']), targets=0)
545 d.addCallback(lambda _: self.multiengine.pull('a',targets=0))
545 d.addCallback(lambda _: self.multiengine.pull('a',targets=0))
546 d.addCallback(lambda r: self.assertEquals(r[0], dikt['a']))
546 d.addCallback(lambda r: self.assertEquals(r[0], dikt['a']))
547 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0))
547 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0))
548 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
548 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
549 d.addCallback(lambda r: self.assertEquals(r, dikt['a']))
549 d.addCallback(lambda r: self.assertEquals(r, dikt['a']))
550 d.addCallback(lambda _: self.multiengine.push_serialized(sdikt, targets=0))
550 d.addCallback(lambda _: self.multiengine.push_serialized(sdikt, targets=0))
551 d.addCallback(lambda _: self.multiengine.pull_serialized(sdikt.keys(), targets=0))
551 d.addCallback(lambda _: self.multiengine.pull_serialized(sdikt.keys(), targets=0))
552 d.addCallback(lambda serial: [newserialized.IUnSerialized(s).getObject() for s in serial[0]])
552 d.addCallback(lambda serial: [newserialized.IUnSerialized(s).getObject() for s in serial[0]])
553 d.addCallback(lambda r: self.assertEquals(r, dikt.values()))
553 d.addCallback(lambda r: self.assertEquals(r, dikt.values()))
554 d.addCallback(lambda _: self.multiengine.reset(targets=0))
554 d.addCallback(lambda _: self.multiengine.reset(targets=0))
555 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0))
555 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0))
556 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
556 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
557
557
558 #Non-blocking mode
558 #Non-blocking mode
559 d.addCallback(lambda r: self.multiengine.push_serialized(dict(a=sdikt['a']), targets=0, block=False))
559 d.addCallback(lambda r: self.multiengine.push_serialized(dict(a=sdikt['a']), targets=0, block=False))
560 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
560 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
561 d.addCallback(lambda _: self.multiengine.pull('a',targets=0))
561 d.addCallback(lambda _: self.multiengine.pull('a',targets=0))
562 d.addCallback(lambda r: self.assertEquals(r[0], dikt['a']))
562 d.addCallback(lambda r: self.assertEquals(r[0], dikt['a']))
563 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0, block=False))
563 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0, block=False))
564 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
564 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
565 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
565 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
566 d.addCallback(lambda r: self.assertEquals(r, dikt['a']))
566 d.addCallback(lambda r: self.assertEquals(r, dikt['a']))
567 d.addCallback(lambda _: self.multiengine.push_serialized(sdikt, targets=0, block=False))
567 d.addCallback(lambda _: self.multiengine.push_serialized(sdikt, targets=0, block=False))
568 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
568 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
569 d.addCallback(lambda _: self.multiengine.pull_serialized(sdikt.keys(), targets=0, block=False))
569 d.addCallback(lambda _: self.multiengine.pull_serialized(sdikt.keys(), targets=0, block=False))
570 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
570 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
571 d.addCallback(lambda serial: [newserialized.IUnSerialized(s).getObject() for s in serial[0]])
571 d.addCallback(lambda serial: [newserialized.IUnSerialized(s).getObject() for s in serial[0]])
572 d.addCallback(lambda r: self.assertEquals(r, dikt.values()))
572 d.addCallback(lambda r: self.assertEquals(r, dikt.values()))
573 d.addCallback(lambda _: self.multiengine.reset(targets=0))
573 d.addCallback(lambda _: self.multiengine.reset(targets=0))
574 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0, block=False))
574 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0, block=False))
575 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
575 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
576 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
576 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
577 return d
577 return d
578
578
579 def testClearQueue(self):
579 def testClearQueue(self):
580 self.addEngine(4)
580 self.addEngine(4)
581 d= self.multiengine.clear_queue()
581 d= self.multiengine.clear_queue()
582 d.addCallback(lambda r: self.multiengine.clear_queue(block=False))
582 d.addCallback(lambda r: self.multiengine.clear_queue(block=False))
583 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
583 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
584 d.addCallback(lambda r: self.assertEquals(r,4*[None]))
584 d.addCallback(lambda r: self.assertEquals(r,4*[None]))
585 return d
585 return d
586
586
587 def testQueueStatus(self):
587 def testQueueStatus(self):
588 self.addEngine(4)
588 self.addEngine(4)
589 d= self.multiengine.queue_status(targets=0)
589 d= self.multiengine.queue_status(targets=0)
590 d.addCallback(lambda r: self.assert_(isinstance(r[0],tuple)))
590 d.addCallback(lambda r: self.assert_(isinstance(r[0],tuple)))
591 d.addCallback(lambda r: self.multiengine.queue_status(targets=0, block=False))
591 d.addCallback(lambda r: self.multiengine.queue_status(targets=0, block=False))
592 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
592 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
593 d.addCallback(lambda r: self.assert_(isinstance(r[0],tuple)))
593 d.addCallback(lambda r: self.assert_(isinstance(r[0],tuple)))
594 return d
594 return d
595
595
596 def testGetIDs(self):
596 def testGetIDs(self):
597 self.addEngine(1)
597 self.addEngine(1)
598 d= self.multiengine.get_ids()
598 d= self.multiengine.get_ids()
599 d.addCallback(lambda r: self.assertEquals(r, [0]))
599 d.addCallback(lambda r: self.assertEquals(r, [0]))
600 d.addCallback(lambda _: self.addEngine(3))
600 d.addCallback(lambda _: self.addEngine(3))
601 d.addCallback(lambda _: self.multiengine.get_ids())
601 d.addCallback(lambda _: self.multiengine.get_ids())
602 d.addCallback(lambda r: self.assertEquals(r, [0,1,2,3]))
602 d.addCallback(lambda r: self.assertEquals(r, [0,1,2,3]))
603 return d
603 return d
604
604
605 def testGetSetProperties(self):
605 def testGetSetProperties(self):
606 self.addEngine(4)
606 self.addEngine(4)
607 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
607 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
608 d= self.multiengine.set_properties(dikt)
608 d= self.multiengine.set_properties(dikt)
609 d.addCallback(lambda r: self.multiengine.get_properties())
609 d.addCallback(lambda r: self.multiengine.get_properties())
610 d.addCallback(lambda r: self.assertEquals(r, 4*[dikt]))
610 d.addCallback(lambda r: self.assertEquals(r, 4*[dikt]))
611 d.addCallback(lambda r: self.multiengine.get_properties(('c',)))
611 d.addCallback(lambda r: self.multiengine.get_properties(('c',)))
612 d.addCallback(lambda r: self.assertEquals(r, 4*[{'c': dikt['c']}]))
612 d.addCallback(lambda r: self.assertEquals(r, 4*[{'c': dikt['c']}]))
613 d.addCallback(lambda r: self.multiengine.set_properties(dict(c=False)))
613 d.addCallback(lambda r: self.multiengine.set_properties(dict(c=False)))
614 d.addCallback(lambda r: self.multiengine.get_properties(('c', 'd')))
614 d.addCallback(lambda r: self.multiengine.get_properties(('c', 'd')))
615 d.addCallback(lambda r: self.assertEquals(r, 4*[dict(c=False, d=None)]))
615 d.addCallback(lambda r: self.assertEquals(r, 4*[dict(c=False, d=None)]))
616
616
617 #Non-blocking
617 #Non-blocking
618 d.addCallback(lambda r: self.multiengine.set_properties(dikt, block=False))
618 d.addCallback(lambda r: self.multiengine.set_properties(dikt, block=False))
619 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
619 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
620 d.addCallback(lambda r: self.multiengine.get_properties(block=False))
620 d.addCallback(lambda r: self.multiengine.get_properties(block=False))
621 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
621 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
622 d.addCallback(lambda r: self.assertEquals(r, 4*[dikt]))
622 d.addCallback(lambda r: self.assertEquals(r, 4*[dikt]))
623 d.addCallback(lambda r: self.multiengine.get_properties(('c',), block=False))
623 d.addCallback(lambda r: self.multiengine.get_properties(('c',), block=False))
624 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
624 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
625 d.addCallback(lambda r: self.assertEquals(r, 4*[{'c': dikt['c']}]))
625 d.addCallback(lambda r: self.assertEquals(r, 4*[{'c': dikt['c']}]))
626 d.addCallback(lambda r: self.multiengine.set_properties(dict(c=False), block=False))
626 d.addCallback(lambda r: self.multiengine.set_properties(dict(c=False), block=False))
627 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
627 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
628 d.addCallback(lambda r: self.multiengine.get_properties(('c', 'd'), block=False))
628 d.addCallback(lambda r: self.multiengine.get_properties(('c', 'd'), block=False))
629 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
629 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
630 d.addCallback(lambda r: self.assertEquals(r, 4*[dict(c=False, d=None)]))
630 d.addCallback(lambda r: self.assertEquals(r, 4*[dict(c=False, d=None)]))
631 return d
631 return d
632
632
633 def testClearProperties(self):
633 def testClearProperties(self):
634 self.addEngine(4)
634 self.addEngine(4)
635 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
635 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
636 d= self.multiengine.set_properties(dikt)
636 d= self.multiengine.set_properties(dikt)
637 d.addCallback(lambda r: self.multiengine.clear_properties())
637 d.addCallback(lambda r: self.multiengine.clear_properties())
638 d.addCallback(lambda r: self.multiengine.get_properties())
638 d.addCallback(lambda r: self.multiengine.get_properties())
639 d.addCallback(lambda r: self.assertEquals(r, 4*[{}]))
639 d.addCallback(lambda r: self.assertEquals(r, 4*[{}]))
640
640
641 #Non-blocking
641 #Non-blocking
642 d.addCallback(lambda r: self.multiengine.set_properties(dikt, block=False))
642 d.addCallback(lambda r: self.multiengine.set_properties(dikt, block=False))
643 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
643 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
644 d.addCallback(lambda r: self.multiengine.clear_properties(block=False))
644 d.addCallback(lambda r: self.multiengine.clear_properties(block=False))
645 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
645 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
646 d.addCallback(lambda r: self.multiengine.get_properties(block=False))
646 d.addCallback(lambda r: self.multiengine.get_properties(block=False))
647 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
647 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
648 d.addCallback(lambda r: self.assertEquals(r, 4*[{}]))
648 d.addCallback(lambda r: self.assertEquals(r, 4*[{}]))
649 return d
649 return d
650
650
651 def testDelHasProperties(self):
651 def testDelHasProperties(self):
652 self.addEngine(4)
652 self.addEngine(4)
653 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
653 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
654 d= self.multiengine.set_properties(dikt)
654 d= self.multiengine.set_properties(dikt)
655 d.addCallback(lambda r: self.multiengine.del_properties(('b','e')))
655 d.addCallback(lambda r: self.multiengine.del_properties(('b','e')))
656 d.addCallback(lambda r: self.multiengine.has_properties(('a','b','c','d','e')))
656 d.addCallback(lambda r: self.multiengine.has_properties(('a','b','c','d','e')))
657 d.addCallback(lambda r: self.assertEquals(r, 4*[[True, False, True, True, False]]))
657 d.addCallback(lambda r: self.assertEquals(r, 4*[[True, False, True, True, False]]))
658
658
659 #Non-blocking
659 #Non-blocking
660 d.addCallback(lambda r: self.multiengine.set_properties(dikt, block=False))
660 d.addCallback(lambda r: self.multiengine.set_properties(dikt, block=False))
661 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
661 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
662 d.addCallback(lambda r: self.multiengine.del_properties(('b','e'), block=False))
662 d.addCallback(lambda r: self.multiengine.del_properties(('b','e'), block=False))
663 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
663 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
664 d.addCallback(lambda r: self.multiengine.has_properties(('a','b','c','d','e'), block=False))
664 d.addCallback(lambda r: self.multiengine.has_properties(('a','b','c','d','e'), block=False))
665 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
665 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
666 d.addCallback(lambda r: self.assertEquals(r, 4*[[True, False, True, True, False]]))
666 d.addCallback(lambda r: self.assertEquals(r, 4*[[True, False, True, True, False]]))
667 return d
667 return d
668
668
669 def test_clear_pending_deferreds(self):
669 def test_clear_pending_deferreds(self):
670 self.addEngine(4)
670 self.addEngine(4)
671 did_list = []
671 did_list = []
672 d= self.multiengine.execute('a=10',block=False)
672 d= self.multiengine.execute('a=10',block=False)
673 d.addCallback(lambda did: did_list.append(did))
673 d.addCallback(lambda did: did_list.append(did))
674 d.addCallback(lambda _: self.multiengine.push(dict(b=10),block=False))
674 d.addCallback(lambda _: self.multiengine.push(dict(b=10),block=False))
675 d.addCallback(lambda did: did_list.append(did))
675 d.addCallback(lambda did: did_list.append(did))
676 d.addCallback(lambda _: self.multiengine.pull(('a','b'),block=False))
676 d.addCallback(lambda _: self.multiengine.pull(('a','b'),block=False))
677 d.addCallback(lambda did: did_list.append(did))
677 d.addCallback(lambda did: did_list.append(did))
678 d.addCallback(lambda _: self.multiengine.clear_pending_deferreds())
678 d.addCallback(lambda _: self.multiengine.clear_pending_deferreds())
679 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[0],True))
679 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[0],True))
680 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
680 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
681 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[1],True))
681 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[1],True))
682 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
682 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
683 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[2],True))
683 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[2],True))
684 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
684 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
685 return d
685 return d
686
686
687 #-------------------------------------------------------------------------------
687 #-------------------------------------------------------------------------------
688 # Coordinator test cases
688 # Coordinator test cases
689 #-------------------------------------------------------------------------------
689 #-------------------------------------------------------------------------------
690
690
691 class IMultiEngineCoordinatorTestCase(object):
691 class IMultiEngineCoordinatorTestCase(object):
692
692
693 def testScatterGather(self):
693 def testScatterGather(self):
694 self.addEngine(4)
694 self.addEngine(4)
695 d= self.multiengine.scatter('a', range(16))
695 d= self.multiengine.scatter('a', range(16))
696 d.addCallback(lambda r: self.multiengine.gather('a'))
696 d.addCallback(lambda r: self.multiengine.gather('a'))
697 d.addCallback(lambda r: self.assertEquals(r, range(16)))
697 d.addCallback(lambda r: self.assertEquals(r, range(16)))
698 d.addCallback(lambda _: self.multiengine.gather('asdf'))
698 d.addCallback(lambda _: self.multiengine.gather('asdf'))
699 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
699 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
700 return d
700 return d
701
701
702 def testScatterGatherNumpy(self):
702 def testScatterGatherNumpy(self):
703 try:
703 try:
704 import numpy
704 import numpy
705 from numpy.testing.utils import assert_array_equal, assert_array_almost_equal
705 from numpy.testing.utils import assert_array_equal, assert_array_almost_equal
706 except:
706 except:
707 return
707 return
708 else:
708 else:
709 self.addEngine(4)
709 self.addEngine(4)
710 a = numpy.arange(16)
710 a = numpy.arange(16)
711 d = self.multiengine.scatter('a', a)
711 d = self.multiengine.scatter('a', a)
712 d.addCallback(lambda r: self.multiengine.gather('a'))
712 d.addCallback(lambda r: self.multiengine.gather('a'))
713 d.addCallback(lambda r: assert_array_equal(r, a))
713 d.addCallback(lambda r: assert_array_equal(r, a))
714 return d
714 return d
715
715
716 def testMap(self):
716 def testMap(self):
717 self.addEngine(4)
717 self.addEngine(4)
718 def f(x):
718 def f(x):
719 return x**2
719 return x**2
720 data = range(16)
720 data = range(16)
721 d= self.multiengine.map(f, data)
721 d= self.multiengine.map(f, data)
722 d.addCallback(lambda r: self.assertEquals(r,[f(x) for x in data]))
722 d.addCallback(lambda r: self.assertEquals(r,[f(x) for x in data]))
723 return d
723 return d
724
724
725
725
726 class ISynchronousMultiEngineCoordinatorTestCase(IMultiEngineCoordinatorTestCase):
726 class ISynchronousMultiEngineCoordinatorTestCase(IMultiEngineCoordinatorTestCase):
727
727
728 def testScatterGatherNonblocking(self):
728 def testScatterGatherNonblocking(self):
729 self.addEngine(4)
729 self.addEngine(4)
730 d= self.multiengine.scatter('a', range(16), block=False)
730 d= self.multiengine.scatter('a', range(16), block=False)
731 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
731 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
732 d.addCallback(lambda r: self.multiengine.gather('a', block=False))
732 d.addCallback(lambda r: self.multiengine.gather('a', block=False))
733 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
733 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
734 d.addCallback(lambda r: self.assertEquals(r, range(16)))
734 d.addCallback(lambda r: self.assertEquals(r, range(16)))
735 return d
735 return d
736
736
737 def testScatterGatherNumpyNonblocking(self):
737 def testScatterGatherNumpyNonblocking(self):
738 try:
738 try:
739 import numpy
739 import numpy
740 from numpy.testing.utils import assert_array_equal, assert_array_almost_equal
740 from numpy.testing.utils import assert_array_equal, assert_array_almost_equal
741 except:
741 except:
742 return
742 return
743 else:
743 else:
744 self.addEngine(4)
744 self.addEngine(4)
745 a = numpy.arange(16)
745 a = numpy.arange(16)
746 d = self.multiengine.scatter('a', a, block=False)
746 d = self.multiengine.scatter('a', a, block=False)
747 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
747 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
748 d.addCallback(lambda r: self.multiengine.gather('a', block=False))
748 d.addCallback(lambda r: self.multiengine.gather('a', block=False))
749 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
749 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
750 d.addCallback(lambda r: assert_array_equal(r, a))
750 d.addCallback(lambda r: assert_array_equal(r, a))
751 return d
751 return d
752
752
753 def testMapNonblocking(self):
754 self.addEngine(4)
755 def f(x):
756 return x**2
757 data = range(16)
758 d= self.multiengine.map(f, data, block=False)
759 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
760 d.addCallback(lambda r: self.assertEquals(r,[f(x) for x in data]))
761 return d
762
763 def test_clear_pending_deferreds(self):
753 def test_clear_pending_deferreds(self):
764 self.addEngine(4)
754 self.addEngine(4)
765 did_list = []
755 did_list = []
766 d= self.multiengine.scatter('a',range(16),block=False)
756 d= self.multiengine.scatter('a',range(16),block=False)
767 d.addCallback(lambda did: did_list.append(did))
757 d.addCallback(lambda did: did_list.append(did))
768 d.addCallback(lambda _: self.multiengine.gather('a',block=False))
758 d.addCallback(lambda _: self.multiengine.gather('a',block=False))
769 d.addCallback(lambda did: did_list.append(did))
759 d.addCallback(lambda did: did_list.append(did))
770 d.addCallback(lambda _: self.multiengine.map(lambda x: x, range(16),block=False))
760 d.addCallback(lambda _: self.multiengine.map(lambda x: x, range(16),block=False))
771 d.addCallback(lambda did: did_list.append(did))
761 d.addCallback(lambda did: did_list.append(did))
772 d.addCallback(lambda _: self.multiengine.clear_pending_deferreds())
762 d.addCallback(lambda _: self.multiengine.clear_pending_deferreds())
773 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[0],True))
763 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[0],True))
774 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
764 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
775 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[1],True))
765 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[1],True))
776 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
766 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
777 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[2],True))
767 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[2],True))
778 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
768 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
779 return d
769 return d
780
770
781 #-------------------------------------------------------------------------------
771 #-------------------------------------------------------------------------------
782 # Extras test cases
772 # Extras test cases
783 #-------------------------------------------------------------------------------
773 #-------------------------------------------------------------------------------
784
774
785 class IMultiEngineExtrasTestCase(object):
775 class IMultiEngineExtrasTestCase(object):
786
776
787 def testZipPull(self):
777 def testZipPull(self):
788 self.addEngine(4)
778 self.addEngine(4)
789 d= self.multiengine.push(dict(a=10,b=20))
779 d= self.multiengine.push(dict(a=10,b=20))
790 d.addCallback(lambda r: self.multiengine.zip_pull(('a','b')))
780 d.addCallback(lambda r: self.multiengine.zip_pull(('a','b')))
791 d.addCallback(lambda r: self.assert_(r, [4*[10],4*[20]]))
781 d.addCallback(lambda r: self.assert_(r, [4*[10],4*[20]]))
792 return d
782 return d
793
783
794 def testRun(self):
784 def testRun(self):
795 self.addEngine(4)
785 self.addEngine(4)
796 import tempfile
786 import tempfile
797 fname = tempfile.mktemp('foo.py')
787 fname = tempfile.mktemp('foo.py')
798 f= open(fname, 'w')
788 f= open(fname, 'w')
799 f.write('a = 10\nb=30')
789 f.write('a = 10\nb=30')
800 f.close()
790 f.close()
801 d= self.multiengine.run(fname)
791 d= self.multiengine.run(fname)
802 d.addCallback(lambda r: self.multiengine.pull(('a','b')))
792 d.addCallback(lambda r: self.multiengine.pull(('a','b')))
803 d.addCallback(lambda r: self.assertEquals(r, 4*[[10,30]]))
793 d.addCallback(lambda r: self.assertEquals(r, 4*[[10,30]]))
804 return d
794 return d
805
795
806
796
807 class ISynchronousMultiEngineExtrasTestCase(IMultiEngineExtrasTestCase):
797 class ISynchronousMultiEngineExtrasTestCase(IMultiEngineExtrasTestCase):
808
798
809 def testZipPullNonblocking(self):
799 def testZipPullNonblocking(self):
810 self.addEngine(4)
800 self.addEngine(4)
811 d= self.multiengine.push(dict(a=10,b=20))
801 d= self.multiengine.push(dict(a=10,b=20))
812 d.addCallback(lambda r: self.multiengine.zip_pull(('a','b'), block=False))
802 d.addCallback(lambda r: self.multiengine.zip_pull(('a','b'), block=False))
813 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
803 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
814 d.addCallback(lambda r: self.assert_(r, [4*[10],4*[20]]))
804 d.addCallback(lambda r: self.assert_(r, [4*[10],4*[20]]))
815 return d
805 return d
816
806
817 def testRunNonblocking(self):
807 def testRunNonblocking(self):
818 self.addEngine(4)
808 self.addEngine(4)
819 import tempfile
809 import tempfile
820 fname = tempfile.mktemp('foo.py')
810 fname = tempfile.mktemp('foo.py')
821 f= open(fname, 'w')
811 f= open(fname, 'w')
822 f.write('a = 10\nb=30')
812 f.write('a = 10\nb=30')
823 f.close()
813 f.close()
824 d= self.multiengine.run(fname, block=False)
814 d= self.multiengine.run(fname, block=False)
825 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
815 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
826 d.addCallback(lambda r: self.multiengine.pull(('a','b')))
816 d.addCallback(lambda r: self.multiengine.pull(('a','b')))
827 d.addCallback(lambda r: self.assertEquals(r, 4*[[10,30]]))
817 d.addCallback(lambda r: self.assertEquals(r, 4*[[10,30]]))
828 return d
818 return d
829
819
830
820
831 #-------------------------------------------------------------------------------
821 #-------------------------------------------------------------------------------
832 # IFullSynchronousMultiEngineTestCase
822 # IFullSynchronousMultiEngineTestCase
833 #-------------------------------------------------------------------------------
823 #-------------------------------------------------------------------------------
834
824
835 class IFullSynchronousMultiEngineTestCase(ISynchronousMultiEngineTestCase,
825 class IFullSynchronousMultiEngineTestCase(ISynchronousMultiEngineTestCase,
836 ISynchronousMultiEngineCoordinatorTestCase,
826 ISynchronousMultiEngineCoordinatorTestCase,
837 ISynchronousMultiEngineExtrasTestCase):
827 ISynchronousMultiEngineExtrasTestCase):
838 pass
828 pass
@@ -1,158 +1,187 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # encoding: utf-8
2 # encoding: utf-8
3
3
4 __docformat__ = "restructuredtext en"
4 __docformat__ = "restructuredtext en"
5
5
6 #-------------------------------------------------------------------------------
6 #-------------------------------------------------------------------------------
7 # Copyright (C) 2008 The IPython Development Team
7 # Copyright (C) 2008 The IPython Development Team
8 #
8 #
9 # Distributed under the terms of the BSD License. The full license is in
9 # Distributed under the terms of the BSD License. The full license is in
10 # the file COPYING, distributed as part of this software.
10 # the file COPYING, distributed as part of this software.
11 #-------------------------------------------------------------------------------
11 #-------------------------------------------------------------------------------
12
12
13 #-------------------------------------------------------------------------------
13 #-------------------------------------------------------------------------------
14 # Imports
14 # Imports
15 #-------------------------------------------------------------------------------
15 #-------------------------------------------------------------------------------
16
16
17 import time
17 import time
18
18
19 from IPython.kernel import task, engineservice as es
19 from IPython.kernel import task, engineservice as es
20 from IPython.kernel.util import printer
20 from IPython.kernel.util import printer
21 from IPython.kernel import error
21 from IPython.kernel import error
22
22
23 #-------------------------------------------------------------------------------
23 #-------------------------------------------------------------------------------
24 # Tests
24 # Tests
25 #-------------------------------------------------------------------------------
25 #-------------------------------------------------------------------------------
26
26
27 def _raise_it(f):
27 def _raise_it(f):
28 try:
28 try:
29 f.raiseException()
29 f.raiseException()
30 except CompositeError, e:
30 except CompositeError, e:
31 e.raise_exception()
31 e.raise_exception()
32
32
33 class TaskTestBase(object):
33 class TaskTestBase(object):
34
34
35 def addEngine(self, n=1):
35 def addEngine(self, n=1):
36 for i in range(n):
36 for i in range(n):
37 e = es.EngineService()
37 e = es.EngineService()
38 e.startService()
38 e.startService()
39 regDict = self.controller.register_engine(es.QueuedEngine(e), None)
39 regDict = self.controller.register_engine(es.QueuedEngine(e), None)
40 e.id = regDict['id']
40 e.id = regDict['id']
41 self.engines.append(e)
41 self.engines.append(e)
42
42
43
43
44 class ITaskControllerTestCase(TaskTestBase):
44 class ITaskControllerTestCase(TaskTestBase):
45
45
46 def testTaskIDs(self):
46 def test_task_ids(self):
47 self.addEngine(1)
47 self.addEngine(1)
48 d = self.tc.run(task.Task('a=5'))
48 d = self.tc.run(task.StringTask('a=5'))
49 d.addCallback(lambda r: self.assertEquals(r, 0))
49 d.addCallback(lambda r: self.assertEquals(r, 0))
50 d.addCallback(lambda r: self.tc.run(task.Task('a=5')))
50 d.addCallback(lambda r: self.tc.run(task.StringTask('a=5')))
51 d.addCallback(lambda r: self.assertEquals(r, 1))
51 d.addCallback(lambda r: self.assertEquals(r, 1))
52 d.addCallback(lambda r: self.tc.run(task.Task('a=5')))
52 d.addCallback(lambda r: self.tc.run(task.StringTask('a=5')))
53 d.addCallback(lambda r: self.assertEquals(r, 2))
53 d.addCallback(lambda r: self.assertEquals(r, 2))
54 d.addCallback(lambda r: self.tc.run(task.Task('a=5')))
54 d.addCallback(lambda r: self.tc.run(task.StringTask('a=5')))
55 d.addCallback(lambda r: self.assertEquals(r, 3))
55 d.addCallback(lambda r: self.assertEquals(r, 3))
56 return d
56 return d
57
57
58 def testAbort(self):
58 def test_abort(self):
59 """Cannot do a proper abort test, because blocking execution prevents
59 """Cannot do a proper abort test, because blocking execution prevents
60 abort from being called before task completes"""
60 abort from being called before task completes"""
61 self.addEngine(1)
61 self.addEngine(1)
62 t = task.Task('a=5')
62 t = task.StringTask('a=5')
63 d = self.tc.abort(0)
63 d = self.tc.abort(0)
64 d.addErrback(lambda f: self.assertRaises(IndexError, f.raiseException))
64 d.addErrback(lambda f: self.assertRaises(IndexError, f.raiseException))
65 d.addCallback(lambda _:self.tc.run(t))
65 d.addCallback(lambda _:self.tc.run(t))
66 d.addCallback(self.tc.abort)
66 d.addCallback(self.tc.abort)
67 d.addErrback(lambda f: self.assertRaises(IndexError, f.raiseException))
67 d.addErrback(lambda f: self.assertRaises(IndexError, f.raiseException))
68 return d
68 return d
69
69
70 def testAbortType(self):
70 def test_abort_type(self):
71 self.addEngine(1)
71 self.addEngine(1)
72 d = self.tc.abort('asdfadsf')
72 d = self.tc.abort('asdfadsf')
73 d.addErrback(lambda f: self.assertRaises(TypeError, f.raiseException))
73 d.addErrback(lambda f: self.assertRaises(TypeError, f.raiseException))
74 return d
74 return d
75
75
76 def testClears(self):
76 def test_clear_before_and_after(self):
77 self.addEngine(1)
77 self.addEngine(1)
78 t = task.Task('a=1', clear_before=True, pull='b', clear_after=True)
78 t = task.StringTask('a=1', clear_before=True, pull='b', clear_after=True)
79 d = self.multiengine.execute('b=1', targets=0)
79 d = self.multiengine.execute('b=1', targets=0)
80 d.addCallback(lambda _: self.tc.run(t))
80 d.addCallback(lambda _: self.tc.run(t))
81 d.addCallback(lambda tid: self.tc.get_task_result(tid,block=True))
81 d.addCallback(lambda tid: self.tc.get_task_result(tid,block=True))
82 d.addCallback(lambda tr: tr.failure)
82 d.addCallback(lambda tr: tr.failure)
83 d.addErrback(lambda f: self.assertRaises(NameError, f.raiseException))
83 d.addErrback(lambda f: self.assertRaises(NameError, f.raiseException))
84 d.addCallback(lambda _:self.multiengine.pull('a', targets=0))
84 d.addCallback(lambda _:self.multiengine.pull('a', targets=0))
85 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
85 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
86 return d
86 return d
87
87
88 def testSimpleRetries(self):
88 def test_simple_retries(self):
89 self.addEngine(1)
89 self.addEngine(1)
90 t = task.Task("i += 1\nassert i == 16", pull='i',retries=10)
90 t = task.StringTask("i += 1\nassert i == 16", pull='i',retries=10)
91 t2 = task.Task("i += 1\nassert i == 16", pull='i',retries=10)
91 t2 = task.StringTask("i += 1\nassert i == 16", pull='i',retries=10)
92 d = self.multiengine.execute('i=0', targets=0)
92 d = self.multiengine.execute('i=0', targets=0)
93 d.addCallback(lambda r: self.tc.run(t))
93 d.addCallback(lambda r: self.tc.run(t))
94 d.addCallback(self.tc.get_task_result, block=True)
94 d.addCallback(self.tc.get_task_result, block=True)
95 d.addCallback(lambda tr: tr.ns.i)
95 d.addCallback(lambda tr: tr.ns.i)
96 d.addErrback(lambda f: self.assertRaises(AssertionError, f.raiseException))
96 d.addErrback(lambda f: self.assertRaises(AssertionError, f.raiseException))
97
97
98 d.addCallback(lambda r: self.tc.run(t2))
98 d.addCallback(lambda r: self.tc.run(t2))
99 d.addCallback(self.tc.get_task_result, block=True)
99 d.addCallback(self.tc.get_task_result, block=True)
100 d.addCallback(lambda tr: tr.ns.i)
100 d.addCallback(lambda tr: tr.ns.i)
101 d.addCallback(lambda r: self.assertEquals(r, 16))
101 d.addCallback(lambda r: self.assertEquals(r, 16))
102 return d
102 return d
103
103
104 def testRecoveryTasks(self):
104 def test_recovery_tasks(self):
105 self.addEngine(1)
105 self.addEngine(1)
106 t = task.Task("i=16", pull='i')
106 t = task.StringTask("i=16", pull='i')
107 t2 = task.Task("raise Exception", recovery_task=t, retries = 2)
107 t2 = task.StringTask("raise Exception", recovery_task=t, retries = 2)
108
108
109 d = self.tc.run(t2)
109 d = self.tc.run(t2)
110 d.addCallback(self.tc.get_task_result, block=True)
110 d.addCallback(self.tc.get_task_result, block=True)
111 d.addCallback(lambda tr: tr.ns.i)
111 d.addCallback(lambda tr: tr.ns.i)
112 d.addCallback(lambda r: self.assertEquals(r, 16))
112 d.addCallback(lambda r: self.assertEquals(r, 16))
113 return d
113 return d
114
114
115 # def testInfiniteRecoveryLoop(self):
115 def test_setup_ns(self):
116 # self.addEngine(1)
117 # t = task.Task("raise Exception", retries = 5)
118 # t2 = task.Task("assert True", retries = 2, recovery_task = t)
119 # t.recovery_task = t2
120 #
121 # d = self.tc.run(t)
122 # d.addCallback(self.tc.get_task_result, block=True)
123 # d.addCallback(lambda tr: tr.ns.i)
124 # d.addBoth(printer)
125 # d.addErrback(lambda f: self.assertRaises(AssertionError, f.raiseException))
126 # return d
127 #
128 def testSetupNS(self):
129 self.addEngine(1)
116 self.addEngine(1)
130 d = self.multiengine.execute('a=0', targets=0)
117 d = self.multiengine.execute('a=0', targets=0)
131 ns = dict(a=1, b=0)
118 ns = dict(a=1, b=0)
132 t = task.Task("", push=ns, pull=['a','b'])
119 t = task.StringTask("", push=ns, pull=['a','b'])
133 d.addCallback(lambda r: self.tc.run(t))
120 d.addCallback(lambda r: self.tc.run(t))
134 d.addCallback(self.tc.get_task_result, block=True)
121 d.addCallback(self.tc.get_task_result, block=True)
135 d.addCallback(lambda tr: {'a':tr.ns.a, 'b':tr['b']})
122 d.addCallback(lambda tr: {'a':tr.ns.a, 'b':tr['b']})
136 d.addCallback(lambda r: self.assertEquals(r, ns))
123 d.addCallback(lambda r: self.assertEquals(r, ns))
137 return d
124 return d
138
125
139 def testTaskResults(self):
126 def test_string_task_results(self):
140 self.addEngine(1)
127 self.addEngine(1)
141 t1 = task.Task('a=5', pull='a')
128 t1 = task.StringTask('a=5', pull='a')
142 d = self.tc.run(t1)
129 d = self.tc.run(t1)
143 d.addCallback(self.tc.get_task_result, block=True)
130 d.addCallback(self.tc.get_task_result, block=True)
144 d.addCallback(lambda tr: (tr.ns.a,tr['a'],tr.failure, tr.raiseException()))
131 d.addCallback(lambda tr: (tr.ns.a,tr['a'],tr.failure, tr.raise_exception()))
145 d.addCallback(lambda r: self.assertEquals(r, (5,5,None,None)))
132 d.addCallback(lambda r: self.assertEquals(r, (5,5,None,None)))
146
133
147 t2 = task.Task('7=5')
134 t2 = task.StringTask('7=5')
148 d.addCallback(lambda r: self.tc.run(t2))
135 d.addCallback(lambda r: self.tc.run(t2))
149 d.addCallback(self.tc.get_task_result, block=True)
136 d.addCallback(self.tc.get_task_result, block=True)
150 d.addCallback(lambda tr: tr.ns)
137 d.addCallback(lambda tr: tr.ns)
151 d.addErrback(lambda f: self.assertRaises(SyntaxError, f.raiseException))
138 d.addErrback(lambda f: self.assertRaises(SyntaxError, f.raiseException))
152
139
153 t3 = task.Task('', pull='b')
140 t3 = task.StringTask('', pull='b')
154 d.addCallback(lambda r: self.tc.run(t3))
141 d.addCallback(lambda r: self.tc.run(t3))
155 d.addCallback(self.tc.get_task_result, block=True)
142 d.addCallback(self.tc.get_task_result, block=True)
156 d.addCallback(lambda tr: tr.ns)
143 d.addCallback(lambda tr: tr.ns)
157 d.addErrback(lambda f: self.assertRaises(NameError, f.raiseException))
144 d.addErrback(lambda f: self.assertRaises(NameError, f.raiseException))
158 return d
145 return d
146
147 def test_map_task(self):
148 self.addEngine(1)
149 t1 = task.MapTask(lambda x: 2*x,(10,))
150 d = self.tc.run(t1)
151 d.addCallback(self.tc.get_task_result, block=True)
152 d.addCallback(lambda r: self.assertEquals(r,20))
153
154 t2 = task.MapTask(lambda : 20)
155 d.addCallback(lambda _: self.tc.run(t2))
156 d.addCallback(self.tc.get_task_result, block=True)
157 d.addCallback(lambda r: self.assertEquals(r,20))
158
159 t3 = task.MapTask(lambda x: x,(),{'x':20})
160 d.addCallback(lambda _: self.tc.run(t3))
161 d.addCallback(self.tc.get_task_result, block=True)
162 d.addCallback(lambda r: self.assertEquals(r,20))
163 return d
164
165 def test_map_task_failure(self):
166 self.addEngine(1)
167 t1 = task.MapTask(lambda x: 1/0,(10,))
168 d = self.tc.run(t1)
169 d.addCallback(self.tc.get_task_result, block=True)
170 d.addErrback(lambda f: self.assertRaises(ZeroDivisionError, f.raiseException))
171 return d
172
173 def test_map_task_args(self):
174 self.assertRaises(TypeError, task.MapTask, 'asdfasdf')
175 self.assertRaises(TypeError, task.MapTask, lambda x: x, 10)
176 self.assertRaises(TypeError, task.MapTask, lambda x: x, (10,),30)
177
178 def test_clear(self):
179 self.addEngine(1)
180 t1 = task.MapTask(lambda x: 2*x,(10,))
181 d = self.tc.run(t1)
182 d.addCallback(lambda _: self.tc.get_task_result(0, block=True))
183 d.addCallback(lambda r: self.assertEquals(r,20))
184 d.addCallback(lambda _: self.tc.clear())
185 d.addCallback(lambda _: self.tc.get_task_result(0, block=True))
186 d.addErrback(lambda f: self.assertRaises(IndexError, f.raiseException))
187 return d
@@ -1,92 +1,92 b''
1 # encoding: utf-8
1 # encoding: utf-8
2
2
3 """This file contains unittests for the enginepb.py module."""
3 """This file contains unittests for the enginepb.py module."""
4
4
5 __docformat__ = "restructuredtext en"
5 __docformat__ = "restructuredtext en"
6
6
7 #-------------------------------------------------------------------------------
7 #-------------------------------------------------------------------------------
8 # Copyright (C) 2008 The IPython Development Team
8 # Copyright (C) 2008 The IPython Development Team
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #-------------------------------------------------------------------------------
12 #-------------------------------------------------------------------------------
13
13
14 #-------------------------------------------------------------------------------
14 #-------------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #-------------------------------------------------------------------------------
16 #-------------------------------------------------------------------------------
17
17
18 try:
18 try:
19 from twisted.python import components
19 from twisted.python import components
20 from twisted.internet import reactor, defer
20 from twisted.internet import reactor, defer
21 from twisted.spread import pb
21 from twisted.spread import pb
22 from twisted.internet.base import DelayedCall
22 from twisted.internet.base import DelayedCall
23 DelayedCall.debug = True
23 DelayedCall.debug = True
24
24
25 import zope.interface as zi
25 import zope.interface as zi
26
26
27 from IPython.kernel.fcutil import Tub, UnauthenticatedTub
27 from IPython.kernel.fcutil import Tub, UnauthenticatedTub
28 from IPython.kernel import engineservice as es
28 from IPython.kernel import engineservice as es
29 from IPython.testing.util import DeferredTestCase
29 from IPython.testing.util import DeferredTestCase
30 from IPython.kernel.controllerservice import IControllerBase
30 from IPython.kernel.controllerservice import IControllerBase
31 from IPython.kernel.enginefc import FCRemoteEngineRefFromService, IEngineBase
31 from IPython.kernel.enginefc import FCRemoteEngineRefFromService, IEngineBase
32 from IPython.kernel.engineservice import IEngineQueued
32 from IPython.kernel.engineservice import IEngineQueued
33 from IPython.kernel.engineconnector import EngineConnector
33 from IPython.kernel.engineconnector import EngineConnector
34
34
35 from IPython.kernel.tests.engineservicetest import \
35 from IPython.kernel.tests.engineservicetest import \
36 IEngineCoreTestCase, \
36 IEngineCoreTestCase, \
37 IEngineSerializedTestCase, \
37 IEngineSerializedTestCase, \
38 IEngineQueuedTestCase
38 IEngineQueuedTestCase
39 except ImportError:
39 except ImportError:
40 print "we got an error!!!"
40 print "we got an error!!!"
41 pass
41 raise
42 else:
42 else:
43 class EngineFCTest(DeferredTestCase,
43 class EngineFCTest(DeferredTestCase,
44 IEngineCoreTestCase,
44 IEngineCoreTestCase,
45 IEngineSerializedTestCase,
45 IEngineSerializedTestCase,
46 IEngineQueuedTestCase
46 IEngineQueuedTestCase
47 ):
47 ):
48
48
49 zi.implements(IControllerBase)
49 zi.implements(IControllerBase)
50
50
51 def setUp(self):
51 def setUp(self):
52
52
53 # Start a server and append to self.servers
53 # Start a server and append to self.servers
54 self.controller_reference = FCRemoteEngineRefFromService(self)
54 self.controller_reference = FCRemoteEngineRefFromService(self)
55 self.controller_tub = Tub()
55 self.controller_tub = Tub()
56 self.controller_tub.listenOn('tcp:10105:interface=127.0.0.1')
56 self.controller_tub.listenOn('tcp:10105:interface=127.0.0.1')
57 self.controller_tub.setLocation('127.0.0.1:10105')
57 self.controller_tub.setLocation('127.0.0.1:10105')
58
58
59 furl = self.controller_tub.registerReference(self.controller_reference)
59 furl = self.controller_tub.registerReference(self.controller_reference)
60 self.controller_tub.startService()
60 self.controller_tub.startService()
61
61
62 # Start an EngineService and append to services/client
62 # Start an EngineService and append to services/client
63 self.engine_service = es.EngineService()
63 self.engine_service = es.EngineService()
64 self.engine_service.startService()
64 self.engine_service.startService()
65 self.engine_tub = Tub()
65 self.engine_tub = Tub()
66 self.engine_tub.startService()
66 self.engine_tub.startService()
67 engine_connector = EngineConnector(self.engine_tub)
67 engine_connector = EngineConnector(self.engine_tub)
68 d = engine_connector.connect_to_controller(self.engine_service, furl)
68 d = engine_connector.connect_to_controller(self.engine_service, furl)
69 # This deferred doesn't fire until after register_engine has returned and
69 # This deferred doesn't fire until after register_engine has returned and
70 # thus, self.engine has been defined and the tets can proceed.
70 # thus, self.engine has been defined and the tets can proceed.
71 return d
71 return d
72
72
73 def tearDown(self):
73 def tearDown(self):
74 dlist = []
74 dlist = []
75 # Shut down the engine
75 # Shut down the engine
76 d = self.engine_tub.stopService()
76 d = self.engine_tub.stopService()
77 dlist.append(d)
77 dlist.append(d)
78 # Shut down the controller
78 # Shut down the controller
79 d = self.controller_tub.stopService()
79 d = self.controller_tub.stopService()
80 dlist.append(d)
80 dlist.append(d)
81 return defer.DeferredList(dlist)
81 return defer.DeferredList(dlist)
82
82
83 #---------------------------------------------------------------------------
83 #---------------------------------------------------------------------------
84 # Make me look like a basic controller
84 # Make me look like a basic controller
85 #---------------------------------------------------------------------------
85 #---------------------------------------------------------------------------
86
86
87 def register_engine(self, engine_ref, id=None, ip=None, port=None, pid=None):
87 def register_engine(self, engine_ref, id=None, ip=None, port=None, pid=None):
88 self.engine = IEngineQueued(IEngineBase(engine_ref))
88 self.engine = IEngineQueued(IEngineBase(engine_ref))
89 return {'id':id}
89 return {'id':id}
90
90
91 def unregister_engine(self, id):
91 def unregister_engine(self, id):
92 pass No newline at end of file
92 pass
@@ -1,70 +1,144 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # encoding: utf-8
2 # encoding: utf-8
3
3
4 __docformat__ = "restructuredtext en"
4 __docformat__ = "restructuredtext en"
5
5
6 #-------------------------------------------------------------------------------
6 #-------------------------------------------------------------------------------
7 # Copyright (C) 2008 The IPython Development Team
7 # Copyright (C) 2008 The IPython Development Team
8 #
8 #
9 # Distributed under the terms of the BSD License. The full license is in
9 # Distributed under the terms of the BSD License. The full license is in
10 # the file COPYING, distributed as part of this software.
10 # the file COPYING, distributed as part of this software.
11 #-------------------------------------------------------------------------------
11 #-------------------------------------------------------------------------------
12
12
13 #-------------------------------------------------------------------------------
13 #-------------------------------------------------------------------------------
14 # Imports
14 # Imports
15 #-------------------------------------------------------------------------------
15 #-------------------------------------------------------------------------------
16
16
17 try:
17 try:
18 from twisted.internet import defer, reactor
18 from twisted.internet import defer, reactor
19
19
20 from IPython.kernel.fcutil import Tub, UnauthenticatedTub
20 from IPython.kernel.fcutil import Tub, UnauthenticatedTub
21
21
22 from IPython.testing.util import DeferredTestCase
22 from IPython.testing.util import DeferredTestCase
23 from IPython.kernel.controllerservice import ControllerService
23 from IPython.kernel.controllerservice import ControllerService
24 from IPython.kernel.multiengine import IMultiEngine
24 from IPython.kernel.multiengine import IMultiEngine
25 from IPython.kernel.tests.multienginetest import IFullSynchronousMultiEngineTestCase
25 from IPython.kernel.tests.multienginetest import IFullSynchronousMultiEngineTestCase
26 from IPython.kernel.multienginefc import IFCSynchronousMultiEngine
26 from IPython.kernel.multienginefc import IFCSynchronousMultiEngine
27 from IPython.kernel import multiengine as me
27 from IPython.kernel import multiengine as me
28 from IPython.kernel.clientconnector import ClientConnector
28 from IPython.kernel.clientconnector import ClientConnector
29 from IPython.kernel.parallelfunction import ParallelFunction
30 from IPython.kernel.error import CompositeError
31 from IPython.kernel.util import printer
29 except ImportError:
32 except ImportError:
30 pass
33 pass
31 else:
34 else:
35
36 def _raise_it(f):
37 try:
38 f.raiseException()
39 except CompositeError, e:
40 e.raise_exception()
41
42
32 class FullSynchronousMultiEngineTestCase(DeferredTestCase, IFullSynchronousMultiEngineTestCase):
43 class FullSynchronousMultiEngineTestCase(DeferredTestCase, IFullSynchronousMultiEngineTestCase):
33
44
34 def setUp(self):
45 def setUp(self):
35
46
36 self.engines = []
47 self.engines = []
37
48
38 self.controller = ControllerService()
49 self.controller = ControllerService()
39 self.controller.startService()
50 self.controller.startService()
40 self.imultiengine = IMultiEngine(self.controller)
51 self.imultiengine = IMultiEngine(self.controller)
41 self.mec_referenceable = IFCSynchronousMultiEngine(self.imultiengine)
52 self.mec_referenceable = IFCSynchronousMultiEngine(self.imultiengine)
42
53
43 self.controller_tub = Tub()
54 self.controller_tub = Tub()
44 self.controller_tub.listenOn('tcp:10105:interface=127.0.0.1')
55 self.controller_tub.listenOn('tcp:10105:interface=127.0.0.1')
45 self.controller_tub.setLocation('127.0.0.1:10105')
56 self.controller_tub.setLocation('127.0.0.1:10105')
46
57
47 furl = self.controller_tub.registerReference(self.mec_referenceable)
58 furl = self.controller_tub.registerReference(self.mec_referenceable)
48 self.controller_tub.startService()
59 self.controller_tub.startService()
49
60
50 self.client_tub = ClientConnector()
61 self.client_tub = ClientConnector()
51 d = self.client_tub.get_multiengine_client(furl)
62 d = self.client_tub.get_multiengine_client(furl)
52 d.addCallback(self.handle_got_client)
63 d.addCallback(self.handle_got_client)
53 return d
64 return d
54
65
55 def handle_got_client(self, client):
66 def handle_got_client(self, client):
56 self.multiengine = client
67 self.multiengine = client
57
68
58 def tearDown(self):
69 def tearDown(self):
59 dlist = []
70 dlist = []
60 # Shut down the multiengine client
71 # Shut down the multiengine client
61 d = self.client_tub.tub.stopService()
72 d = self.client_tub.tub.stopService()
62 dlist.append(d)
73 dlist.append(d)
63 # Shut down the engines
74 # Shut down the engines
64 for e in self.engines:
75 for e in self.engines:
65 e.stopService()
76 e.stopService()
66 # Shut down the controller
77 # Shut down the controller
67 d = self.controller_tub.stopService()
78 d = self.controller_tub.stopService()
68 d.addBoth(lambda _: self.controller.stopService())
79 d.addBoth(lambda _: self.controller.stopService())
69 dlist.append(d)
80 dlist.append(d)
70 return defer.DeferredList(dlist)
81 return defer.DeferredList(dlist)
82
83 def test_mapper(self):
84 self.addEngine(4)
85 m = self.multiengine.mapper()
86 self.assertEquals(m.multiengine,self.multiengine)
87 self.assertEquals(m.dist,'b')
88 self.assertEquals(m.targets,'all')
89 self.assertEquals(m.block,True)
90
91 def test_map_default(self):
92 self.addEngine(4)
93 m = self.multiengine.mapper()
94 d = m.map(lambda x: 2*x, range(10))
95 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
96 d.addCallback(lambda _: self.multiengine.map(lambda x: 2*x, range(10)))
97 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
98 return d
99
100 def test_map_noblock(self):
101 self.addEngine(4)
102 m = self.multiengine.mapper(block=False)
103 d = m.map(lambda x: 2*x, range(10))
104 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
105 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
106 return d
107
108 def test_mapper_fail(self):
109 self.addEngine(4)
110 m = self.multiengine.mapper()
111 d = m.map(lambda x: 1/0, range(10))
112 d.addBoth(lambda f: self.assertRaises(ZeroDivisionError, _raise_it, f))
113 return d
114
115 def test_parallel(self):
116 self.addEngine(4)
117 p = self.multiengine.parallel()
118 self.assert_(isinstance(p, ParallelFunction))
119 @p
120 def f(x): return 2*x
121 d = f(range(10))
122 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
123 return d
124
125 def test_parallel_noblock(self):
126 self.addEngine(1)
127 p = self.multiengine.parallel(block=False)
128 self.assert_(isinstance(p, ParallelFunction))
129 @p
130 def f(x): return 2*x
131 d = f(range(10))
132 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
133 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
134 return d
135
136 def test_parallel_fail(self):
137 self.addEngine(4)
138 p = self.multiengine.parallel()
139 self.assert_(isinstance(p, ParallelFunction))
140 @p
141 def f(x): return 1/0
142 d = f(range(10))
143 d.addBoth(lambda f: self.assertRaises(ZeroDivisionError, _raise_it, f))
144 return d No newline at end of file
@@ -1,186 +1,186 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # encoding: utf-8
2 # encoding: utf-8
3
3
4 """Tests for pendingdeferred.py"""
4 """Tests for pendingdeferred.py"""
5
5
6 __docformat__ = "restructuredtext en"
6 __docformat__ = "restructuredtext en"
7
7
8 #-------------------------------------------------------------------------------
8 #-------------------------------------------------------------------------------
9 # Copyright (C) 2008 The IPython Development Team
9 # Copyright (C) 2008 The IPython Development Team
10 #
10 #
11 # Distributed under the terms of the BSD License. The full license is in
11 # Distributed under the terms of the BSD License. The full license is in
12 # the file COPYING, distributed as part of this software.
12 # the file COPYING, distributed as part of this software.
13 #-------------------------------------------------------------------------------
13 #-------------------------------------------------------------------------------
14
14
15 #-------------------------------------------------------------------------------
15 #-------------------------------------------------------------------------------
16 # Imports
16 # Imports
17 #-------------------------------------------------------------------------------
17 #-------------------------------------------------------------------------------
18
18
19 try:
19 try:
20 from twisted.internet import defer
20 from twisted.internet import defer
21 from twisted.python import failure
21 from twisted.python import failure
22
22
23 from IPython.testing import tcommon
24 #from IPython.testing.tcommon import *
25 from IPython.testing.util import DeferredTestCase
23 from IPython.testing.util import DeferredTestCase
26 import IPython.kernel.pendingdeferred as pd
24 import IPython.kernel.pendingdeferred as pd
27 from IPython.kernel import error
25 from IPython.kernel import error
28 from IPython.kernel.util import printer
26 from IPython.kernel.util import printer
29 except ImportError:
27 except ImportError:
30 pass
28 pass
31 else:
29 else:
30
31 class Foo(object):
32
33 def bar(self, bahz):
34 return defer.succeed('blahblah: %s' % bahz)
32
35
33 class TwoPhaseFoo(pd.PendingDeferredManager):
36 class TwoPhaseFoo(pd.PendingDeferredManager):
34
37
35 def __init__(self, foo):
38 def __init__(self, foo):
36 self.foo = foo
39 self.foo = foo
37 pd.PendingDeferredManager.__init__(self)
40 pd.PendingDeferredManager.__init__(self)
38
41
39 @pd.two_phase
42 @pd.two_phase
40 def bar(self, bahz):
43 def bar(self, bahz):
41 return self.foo.bar(bahz)
44 return self.foo.bar(bahz)
42
45
43 class PendingDeferredManagerTest(DeferredTestCase):
46 class PendingDeferredManagerTest(DeferredTestCase):
44
47
45 def setUp(self):
48 def setUp(self):
46 self.pdm = pd.PendingDeferredManager()
49 self.pdm = pd.PendingDeferredManager()
47
50
48 def tearDown(self):
51 def tearDown(self):
49 pass
52 pass
50
53
51 def testBasic(self):
54 def testBasic(self):
52 dDict = {}
55 dDict = {}
53 # Create 10 deferreds and save them
56 # Create 10 deferreds and save them
54 for i in range(10):
57 for i in range(10):
55 d = defer.Deferred()
58 d = defer.Deferred()
56 did = self.pdm.save_pending_deferred(d)
59 did = self.pdm.save_pending_deferred(d)
57 dDict[did] = d
60 dDict[did] = d
58 # Make sure they are begin saved
61 # Make sure they are begin saved
59 for k in dDict.keys():
62 for k in dDict.keys():
60 self.assert_(self.pdm.quick_has_id(k))
63 self.assert_(self.pdm.quick_has_id(k))
61 # Get the pending deferred (block=True), then callback with 'foo' and compare
64 # Get the pending deferred (block=True), then callback with 'foo' and compare
62 for did in dDict.keys()[0:5]:
65 for did in dDict.keys()[0:5]:
63 d = self.pdm.get_pending_deferred(did,block=True)
66 d = self.pdm.get_pending_deferred(did,block=True)
64 dDict[did].callback('foo')
67 dDict[did].callback('foo')
65 d.addCallback(lambda r: self.assert_(r=='foo'))
68 d.addCallback(lambda r: self.assert_(r=='foo'))
66 # Get the pending deferreds with (block=False) and make sure ResultNotCompleted is raised
69 # Get the pending deferreds with (block=False) and make sure ResultNotCompleted is raised
67 for did in dDict.keys()[5:10]:
70 for did in dDict.keys()[5:10]:
68 d = self.pdm.get_pending_deferred(did,block=False)
71 d = self.pdm.get_pending_deferred(did,block=False)
69 d.addErrback(lambda f: self.assertRaises(error.ResultNotCompleted, f.raiseException))
72 d.addErrback(lambda f: self.assertRaises(error.ResultNotCompleted, f.raiseException))
70 # Now callback the last 5, get them and compare.
73 # Now callback the last 5, get them and compare.
71 for did in dDict.keys()[5:10]:
74 for did in dDict.keys()[5:10]:
72 dDict[did].callback('foo')
75 dDict[did].callback('foo')
73 d = self.pdm.get_pending_deferred(did,block=False)
76 d = self.pdm.get_pending_deferred(did,block=False)
74 d.addCallback(lambda r: self.assert_(r=='foo'))
77 d.addCallback(lambda r: self.assert_(r=='foo'))
75
78
76 def test_save_then_delete(self):
79 def test_save_then_delete(self):
77 d = defer.Deferred()
80 d = defer.Deferred()
78 did = self.pdm.save_pending_deferred(d)
81 did = self.pdm.save_pending_deferred(d)
79 self.assert_(self.pdm.quick_has_id(did))
82 self.assert_(self.pdm.quick_has_id(did))
80 self.pdm.delete_pending_deferred(did)
83 self.pdm.delete_pending_deferred(did)
81 self.assert_(not self.pdm.quick_has_id(did))
84 self.assert_(not self.pdm.quick_has_id(did))
82
85
83 def test_save_get_delete(self):
86 def test_save_get_delete(self):
84 d = defer.Deferred()
87 d = defer.Deferred()
85 did = self.pdm.save_pending_deferred(d)
88 did = self.pdm.save_pending_deferred(d)
86 d2 = self.pdm.get_pending_deferred(did,True)
89 d2 = self.pdm.get_pending_deferred(did,True)
87 d2.addErrback(lambda f: self.assertRaises(error.AbortedPendingDeferredError, f.raiseException))
90 d2.addErrback(lambda f: self.assertRaises(error.AbortedPendingDeferredError, f.raiseException))
88 self.pdm.delete_pending_deferred(did)
91 self.pdm.delete_pending_deferred(did)
89 return d2
92 return d2
90
93
91 def test_double_get(self):
94 def test_double_get(self):
92 d = defer.Deferred()
95 d = defer.Deferred()
93 did = self.pdm.save_pending_deferred(d)
96 did = self.pdm.save_pending_deferred(d)
94 d2 = self.pdm.get_pending_deferred(did,True)
97 d2 = self.pdm.get_pending_deferred(did,True)
95 d3 = self.pdm.get_pending_deferred(did,True)
98 d3 = self.pdm.get_pending_deferred(did,True)
96 d3.addErrback(lambda f: self.assertRaises(error.InvalidDeferredID, f.raiseException))
99 d3.addErrback(lambda f: self.assertRaises(error.InvalidDeferredID, f.raiseException))
97
100
98 def test_get_after_callback(self):
101 def test_get_after_callback(self):
99 d = defer.Deferred()
102 d = defer.Deferred()
100 did = self.pdm.save_pending_deferred(d)
103 did = self.pdm.save_pending_deferred(d)
101 d.callback('foo')
104 d.callback('foo')
102 d2 = self.pdm.get_pending_deferred(did,True)
105 d2 = self.pdm.get_pending_deferred(did,True)
103 d2.addCallback(lambda r: self.assertEquals(r,'foo'))
106 d2.addCallback(lambda r: self.assertEquals(r,'foo'))
104 self.assert_(not self.pdm.quick_has_id(did))
107 self.assert_(not self.pdm.quick_has_id(did))
105
108
106 def test_get_before_callback(self):
109 def test_get_before_callback(self):
107 d = defer.Deferred()
110 d = defer.Deferred()
108 did = self.pdm.save_pending_deferred(d)
111 did = self.pdm.save_pending_deferred(d)
109 d2 = self.pdm.get_pending_deferred(did,True)
112 d2 = self.pdm.get_pending_deferred(did,True)
110 d.callback('foo')
113 d.callback('foo')
111 d2.addCallback(lambda r: self.assertEquals(r,'foo'))
114 d2.addCallback(lambda r: self.assertEquals(r,'foo'))
112 self.assert_(not self.pdm.quick_has_id(did))
115 self.assert_(not self.pdm.quick_has_id(did))
113 d = defer.Deferred()
116 d = defer.Deferred()
114 did = self.pdm.save_pending_deferred(d)
117 did = self.pdm.save_pending_deferred(d)
115 d2 = self.pdm.get_pending_deferred(did,True)
118 d2 = self.pdm.get_pending_deferred(did,True)
116 d2.addCallback(lambda r: self.assertEquals(r,'foo'))
119 d2.addCallback(lambda r: self.assertEquals(r,'foo'))
117 d.callback('foo')
120 d.callback('foo')
118 self.assert_(not self.pdm.quick_has_id(did))
121 self.assert_(not self.pdm.quick_has_id(did))
119
122
120 def test_get_after_errback(self):
123 def test_get_after_errback(self):
121 class MyError(Exception):
124 class MyError(Exception):
122 pass
125 pass
123 d = defer.Deferred()
126 d = defer.Deferred()
124 did = self.pdm.save_pending_deferred(d)
127 did = self.pdm.save_pending_deferred(d)
125 d.errback(failure.Failure(MyError('foo')))
128 d.errback(failure.Failure(MyError('foo')))
126 d2 = self.pdm.get_pending_deferred(did,True)
129 d2 = self.pdm.get_pending_deferred(did,True)
127 d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException))
130 d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException))
128 self.assert_(not self.pdm.quick_has_id(did))
131 self.assert_(not self.pdm.quick_has_id(did))
129
132
130 def test_get_before_errback(self):
133 def test_get_before_errback(self):
131 class MyError(Exception):
134 class MyError(Exception):
132 pass
135 pass
133 d = defer.Deferred()
136 d = defer.Deferred()
134 did = self.pdm.save_pending_deferred(d)
137 did = self.pdm.save_pending_deferred(d)
135 d2 = self.pdm.get_pending_deferred(did,True)
138 d2 = self.pdm.get_pending_deferred(did,True)
136 d.errback(failure.Failure(MyError('foo')))
139 d.errback(failure.Failure(MyError('foo')))
137 d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException))
140 d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException))
138 self.assert_(not self.pdm.quick_has_id(did))
141 self.assert_(not self.pdm.quick_has_id(did))
139 d = defer.Deferred()
142 d = defer.Deferred()
140 did = self.pdm.save_pending_deferred(d)
143 did = self.pdm.save_pending_deferred(d)
141 d2 = self.pdm.get_pending_deferred(did,True)
144 d2 = self.pdm.get_pending_deferred(did,True)
142 d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException))
145 d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException))
143 d.errback(failure.Failure(MyError('foo')))
146 d.errback(failure.Failure(MyError('foo')))
144 self.assert_(not self.pdm.quick_has_id(did))
147 self.assert_(not self.pdm.quick_has_id(did))
145
148
146 def test_noresult_noblock(self):
149 def test_noresult_noblock(self):
147 d = defer.Deferred()
150 d = defer.Deferred()
148 did = self.pdm.save_pending_deferred(d)
151 did = self.pdm.save_pending_deferred(d)
149 d2 = self.pdm.get_pending_deferred(did,False)
152 d2 = self.pdm.get_pending_deferred(did,False)
150 d2.addErrback(lambda f: self.assertRaises(error.ResultNotCompleted, f.raiseException))
153 d2.addErrback(lambda f: self.assertRaises(error.ResultNotCompleted, f.raiseException))
151
154
152 def test_with_callbacks(self):
155 def test_with_callbacks(self):
153 d = defer.Deferred()
156 d = defer.Deferred()
154 d.addCallback(lambda r: r+' foo')
157 d.addCallback(lambda r: r+' foo')
155 d.addCallback(lambda r: r+' bar')
158 d.addCallback(lambda r: r+' bar')
156 did = self.pdm.save_pending_deferred(d)
159 did = self.pdm.save_pending_deferred(d)
157 d2 = self.pdm.get_pending_deferred(did,True)
160 d2 = self.pdm.get_pending_deferred(did,True)
158 d.callback('bam')
161 d.callback('bam')
159 d2.addCallback(lambda r: self.assertEquals(r,'bam foo bar'))
162 d2.addCallback(lambda r: self.assertEquals(r,'bam foo bar'))
160
163
161 def test_with_errbacks(self):
164 def test_with_errbacks(self):
162 class MyError(Exception):
165 class MyError(Exception):
163 pass
166 pass
164 d = defer.Deferred()
167 d = defer.Deferred()
165 d.addCallback(lambda r: 'foo')
168 d.addCallback(lambda r: 'foo')
166 d.addErrback(lambda f: 'caught error')
169 d.addErrback(lambda f: 'caught error')
167 did = self.pdm.save_pending_deferred(d)
170 did = self.pdm.save_pending_deferred(d)
168 d2 = self.pdm.get_pending_deferred(did,True)
171 d2 = self.pdm.get_pending_deferred(did,True)
169 d.errback(failure.Failure(MyError('bam')))
172 d.errback(failure.Failure(MyError('bam')))
170 d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException))
173 d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException))
171
174
172 def test_nested_deferreds(self):
175 def test_nested_deferreds(self):
173 d = defer.Deferred()
176 d = defer.Deferred()
174 d2 = defer.Deferred()
177 d2 = defer.Deferred()
175 d.addCallback(lambda r: d2)
178 d.addCallback(lambda r: d2)
176 did = self.pdm.save_pending_deferred(d)
179 did = self.pdm.save_pending_deferred(d)
177 d.callback('foo')
180 d.callback('foo')
178 d3 = self.pdm.get_pending_deferred(did,False)
181 d3 = self.pdm.get_pending_deferred(did,False)
179 d3.addErrback(lambda f: self.assertRaises(error.ResultNotCompleted, f.raiseException))
182 d3.addErrback(lambda f: self.assertRaises(error.ResultNotCompleted, f.raiseException))
180 d2.callback('bar')
183 d2.callback('bar')
181 d3 = self.pdm.get_pending_deferred(did,False)
184 d3 = self.pdm.get_pending_deferred(did,False)
182 d3.addCallback(lambda r: self.assertEquals(r,'bar'))
185 d3.addCallback(lambda r: self.assertEquals(r,'bar'))
183
186
184
185 # Global object expected by Twisted's trial
186 testSuite = lambda : makeTestSuite(__name__,dt_files,dt_modules)
@@ -1,90 +1,161 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # encoding: utf-8
2 # encoding: utf-8
3
3
4 __docformat__ = "restructuredtext en"
4 __docformat__ = "restructuredtext en"
5
5
6 #-------------------------------------------------------------------------------
6 #-------------------------------------------------------------------------------
7 # Copyright (C) 2008 The IPython Development Team
7 # Copyright (C) 2008 The IPython Development Team
8 #
8 #
9 # Distributed under the terms of the BSD License. The full license is in
9 # Distributed under the terms of the BSD License. The full license is in
10 # the file COPYING, distributed as part of this software.
10 # the file COPYING, distributed as part of this software.
11 #-------------------------------------------------------------------------------
11 #-------------------------------------------------------------------------------
12
12
13 #-------------------------------------------------------------------------------
13 #-------------------------------------------------------------------------------
14 # Imports
14 # Imports
15 #-------------------------------------------------------------------------------
15 #-------------------------------------------------------------------------------
16
16
17 try:
17 try:
18 import time
18 import time
19
19
20 from twisted.internet import defer, reactor
20 from twisted.internet import defer, reactor
21
21
22 from IPython.kernel.fcutil import Tub, UnauthenticatedTub
22 from IPython.kernel.fcutil import Tub, UnauthenticatedTub
23
23
24 from IPython.kernel import task as taskmodule
24 from IPython.kernel import task as taskmodule
25 from IPython.kernel import controllerservice as cs
25 from IPython.kernel import controllerservice as cs
26 import IPython.kernel.multiengine as me
26 import IPython.kernel.multiengine as me
27 from IPython.testing.util import DeferredTestCase
27 from IPython.testing.util import DeferredTestCase
28 from IPython.kernel.multienginefc import IFCSynchronousMultiEngine
28 from IPython.kernel.multienginefc import IFCSynchronousMultiEngine
29 from IPython.kernel.taskfc import IFCTaskController
29 from IPython.kernel.taskfc import IFCTaskController
30 from IPython.kernel.util import printer
30 from IPython.kernel.util import printer
31 from IPython.kernel.tests.tasktest import ITaskControllerTestCase
31 from IPython.kernel.tests.tasktest import ITaskControllerTestCase
32 from IPython.kernel.clientconnector import ClientConnector
32 from IPython.kernel.clientconnector import ClientConnector
33 from IPython.kernel.error import CompositeError
34 from IPython.kernel.parallelfunction import ParallelFunction
33 except ImportError:
35 except ImportError:
34 pass
36 pass
35 else:
37 else:
36
38
37 #-------------------------------------------------------------------------------
39 #-------------------------------------------------------------------------------
38 # Tests
40 # Tests
39 #-------------------------------------------------------------------------------
41 #-------------------------------------------------------------------------------
40
42
43 def _raise_it(f):
44 try:
45 f.raiseException()
46 except CompositeError, e:
47 e.raise_exception()
48
41 class TaskTest(DeferredTestCase, ITaskControllerTestCase):
49 class TaskTest(DeferredTestCase, ITaskControllerTestCase):
42
50
43 def setUp(self):
51 def setUp(self):
44
52
45 self.engines = []
53 self.engines = []
46
54
47 self.controller = cs.ControllerService()
55 self.controller = cs.ControllerService()
48 self.controller.startService()
56 self.controller.startService()
49 self.imultiengine = me.IMultiEngine(self.controller)
57 self.imultiengine = me.IMultiEngine(self.controller)
50 self.itc = taskmodule.ITaskController(self.controller)
58 self.itc = taskmodule.ITaskController(self.controller)
51 self.itc.failurePenalty = 0
59 self.itc.failurePenalty = 0
52
60
53 self.mec_referenceable = IFCSynchronousMultiEngine(self.imultiengine)
61 self.mec_referenceable = IFCSynchronousMultiEngine(self.imultiengine)
54 self.tc_referenceable = IFCTaskController(self.itc)
62 self.tc_referenceable = IFCTaskController(self.itc)
55
63
56 self.controller_tub = Tub()
64 self.controller_tub = Tub()
57 self.controller_tub.listenOn('tcp:10105:interface=127.0.0.1')
65 self.controller_tub.listenOn('tcp:10105:interface=127.0.0.1')
58 self.controller_tub.setLocation('127.0.0.1:10105')
66 self.controller_tub.setLocation('127.0.0.1:10105')
59
67
60 mec_furl = self.controller_tub.registerReference(self.mec_referenceable)
68 mec_furl = self.controller_tub.registerReference(self.mec_referenceable)
61 tc_furl = self.controller_tub.registerReference(self.tc_referenceable)
69 tc_furl = self.controller_tub.registerReference(self.tc_referenceable)
62 self.controller_tub.startService()
70 self.controller_tub.startService()
63
71
64 self.client_tub = ClientConnector()
72 self.client_tub = ClientConnector()
65 d = self.client_tub.get_multiengine_client(mec_furl)
73 d = self.client_tub.get_multiengine_client(mec_furl)
66 d.addCallback(self.handle_mec_client)
74 d.addCallback(self.handle_mec_client)
67 d.addCallback(lambda _: self.client_tub.get_task_client(tc_furl))
75 d.addCallback(lambda _: self.client_tub.get_task_client(tc_furl))
68 d.addCallback(self.handle_tc_client)
76 d.addCallback(self.handle_tc_client)
69 return d
77 return d
70
78
71 def handle_mec_client(self, client):
79 def handle_mec_client(self, client):
72 self.multiengine = client
80 self.multiengine = client
73
81
74 def handle_tc_client(self, client):
82 def handle_tc_client(self, client):
75 self.tc = client
83 self.tc = client
76
84
77 def tearDown(self):
85 def tearDown(self):
78 dlist = []
86 dlist = []
79 # Shut down the multiengine client
87 # Shut down the multiengine client
80 d = self.client_tub.tub.stopService()
88 d = self.client_tub.tub.stopService()
81 dlist.append(d)
89 dlist.append(d)
82 # Shut down the engines
90 # Shut down the engines
83 for e in self.engines:
91 for e in self.engines:
84 e.stopService()
92 e.stopService()
85 # Shut down the controller
93 # Shut down the controller
86 d = self.controller_tub.stopService()
94 d = self.controller_tub.stopService()
87 d.addBoth(lambda _: self.controller.stopService())
95 d.addBoth(lambda _: self.controller.stopService())
88 dlist.append(d)
96 dlist.append(d)
89 return defer.DeferredList(dlist)
97 return defer.DeferredList(dlist)
90
98
99 def test_mapper(self):
100 self.addEngine(1)
101 m = self.tc.mapper()
102 self.assertEquals(m.task_controller,self.tc)
103 self.assertEquals(m.clear_before,False)
104 self.assertEquals(m.clear_after,False)
105 self.assertEquals(m.retries,0)
106 self.assertEquals(m.recovery_task,None)
107 self.assertEquals(m.depend,None)
108 self.assertEquals(m.block,True)
109
110 def test_map_default(self):
111 self.addEngine(1)
112 m = self.tc.mapper()
113 d = m.map(lambda x: 2*x, range(10))
114 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
115 d.addCallback(lambda _: self.tc.map(lambda x: 2*x, range(10)))
116 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
117 return d
118
119 def test_map_noblock(self):
120 self.addEngine(1)
121 m = self.tc.mapper(block=False)
122 d = m.map(lambda x: 2*x, range(10))
123 d.addCallback(lambda r: self.assertEquals(r,[x for x in range(10)]))
124 return d
125
126 def test_mapper_fail(self):
127 self.addEngine(1)
128 m = self.tc.mapper()
129 d = m.map(lambda x: 1/0, range(10))
130 d.addBoth(lambda f: self.assertRaises(ZeroDivisionError, _raise_it, f))
131 return d
132
133 def test_parallel(self):
134 self.addEngine(1)
135 p = self.tc.parallel()
136 self.assert_(isinstance(p, ParallelFunction))
137 @p
138 def f(x): return 2*x
139 d = f(range(10))
140 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
141 return d
142
143 def test_parallel_noblock(self):
144 self.addEngine(1)
145 p = self.tc.parallel(block=False)
146 self.assert_(isinstance(p, ParallelFunction))
147 @p
148 def f(x): return 2*x
149 d = f(range(10))
150 d.addCallback(lambda r: self.assertEquals(r,[x for x in range(10)]))
151 return d
152
153 def test_parallel_fail(self):
154 self.addEngine(1)
155 p = self.tc.parallel()
156 self.assert_(isinstance(p, ParallelFunction))
157 @p
158 def f(x): return 1/0
159 d = f(range(10))
160 d.addBoth(lambda f: self.assertRaises(ZeroDivisionError, _raise_it, f))
161 return d No newline at end of file
@@ -1,90 +1,90 b''
1 """
1 """
2 An exceptionally lousy site spider
2 An exceptionally lousy site spider
3 Ken Kinder <ken@kenkinder.com>
3 Ken Kinder <ken@kenkinder.com>
4
4
5 This module gives an example of how the TaskClient interface to the
5 This module gives an example of how the TaskClient interface to the
6 IPython controller works. Before running this script start the IPython controller
6 IPython controller works. Before running this script start the IPython controller
7 and some engines using something like::
7 and some engines using something like::
8
8
9 ipcluster -n 4
9 ipcluster -n 4
10 """
10 """
11 from twisted.python.failure import Failure
11 from twisted.python.failure import Failure
12 from IPython.kernel import client
12 from IPython.kernel import client
13 import time
13 import time
14
14
15 fetchParse = """
15 fetchParse = """
16 from twisted.web import microdom
16 from twisted.web import microdom
17 import urllib2
17 import urllib2
18 import urlparse
18 import urlparse
19
19
20 def fetchAndParse(url, data=None):
20 def fetchAndParse(url, data=None):
21 links = []
21 links = []
22 try:
22 try:
23 page = urllib2.urlopen(url, data=data)
23 page = urllib2.urlopen(url, data=data)
24 except Exception:
24 except Exception:
25 return links
25 return links
26 else:
26 else:
27 if page.headers.type == 'text/html':
27 if page.headers.type == 'text/html':
28 doc = microdom.parseString(page.read(), beExtremelyLenient=True)
28 doc = microdom.parseString(page.read(), beExtremelyLenient=True)
29 for node in doc.getElementsByTagName('a'):
29 for node in doc.getElementsByTagName('a'):
30 if node.getAttribute('href'):
30 if node.getAttribute('href'):
31 links.append(urlparse.urljoin(url, node.getAttribute('href')))
31 links.append(urlparse.urljoin(url, node.getAttribute('href')))
32 return links
32 return links
33 """
33 """
34
34
35 class DistributedSpider(object):
35 class DistributedSpider(object):
36
36
37 # Time to wait between polling for task results.
37 # Time to wait between polling for task results.
38 pollingDelay = 0.5
38 pollingDelay = 0.5
39
39
40 def __init__(self, site):
40 def __init__(self, site):
41 self.tc = client.TaskClient()
41 self.tc = client.TaskClient()
42 self.rc = client.MultiEngineClient()
42 self.rc = client.MultiEngineClient()
43 self.rc.execute(fetchParse)
43 self.rc.execute(fetchParse)
44
44
45 self.allLinks = []
45 self.allLinks = []
46 self.linksWorking = {}
46 self.linksWorking = {}
47 self.linksDone = {}
47 self.linksDone = {}
48
48
49 self.site = site
49 self.site = site
50
50
51 def visitLink(self, url):
51 def visitLink(self, url):
52 if url not in self.allLinks:
52 if url not in self.allLinks:
53 self.allLinks.append(url)
53 self.allLinks.append(url)
54 if url.startswith(self.site):
54 if url.startswith(self.site):
55 print ' ', url
55 print ' ', url
56 self.linksWorking[url] = self.tc.run(client.Task('links = fetchAndParse(url)', pull=['links'], push={'url': url}))
56 self.linksWorking[url] = self.tc.run(client.StringTask('links = fetchAndParse(url)', pull=['links'], push={'url': url}))
57
57
58 def onVisitDone(self, result, url):
58 def onVisitDone(self, result, url):
59 print url, ':'
59 print url, ':'
60 self.linksDone[url] = None
60 self.linksDone[url] = None
61 del self.linksWorking[url]
61 del self.linksWorking[url]
62 if isinstance(result.failure, Failure):
62 if isinstance(result.failure, Failure):
63 txt = result.failure.getTraceback()
63 txt = result.failure.getTraceback()
64 for line in txt.split('\n'):
64 for line in txt.split('\n'):
65 print ' ', line
65 print ' ', line
66 else:
66 else:
67 for link in result.ns.links:
67 for link in result.ns.links:
68 self.visitLink(link)
68 self.visitLink(link)
69
69
70 def run(self):
70 def run(self):
71 self.visitLink(self.site)
71 self.visitLink(self.site)
72 while self.linksWorking:
72 while self.linksWorking:
73 print len(self.linksWorking), 'pending...'
73 print len(self.linksWorking), 'pending...'
74 self.synchronize()
74 self.synchronize()
75 time.sleep(self.pollingDelay)
75 time.sleep(self.pollingDelay)
76
76
77 def synchronize(self):
77 def synchronize(self):
78 for url, taskId in self.linksWorking.items():
78 for url, taskId in self.linksWorking.items():
79 # Calling get_task_result with block=False will return None if the
79 # Calling get_task_result with block=False will return None if the
80 # task is not done yet. This provides a simple way of polling.
80 # task is not done yet. This provides a simple way of polling.
81 result = self.tc.get_task_result(taskId, block=False)
81 result = self.tc.get_task_result(taskId, block=False)
82 if result is not None:
82 if result is not None:
83 self.onVisitDone(result, url)
83 self.onVisitDone(result, url)
84
84
85 def main():
85 def main():
86 distributedSpider = DistributedSpider(raw_input('Enter site to crawl: '))
86 distributedSpider = DistributedSpider(raw_input('Enter site to crawl: '))
87 distributedSpider.run()
87 distributedSpider.run()
88
88
89 if __name__ == '__main__':
89 if __name__ == '__main__':
90 main()
90 main()
@@ -1,14 +1,14 b''
1 """
1 """
2 A Distributed Hello world
2 A Distributed Hello world
3 Ken Kinder <ken@kenkinder.com>
3 Ken Kinder <ken@kenkinder.com>
4 """
4 """
5 from IPython.kernel import client
5 from IPython.kernel import client
6
6
7 tc = client.TaskClient()
7 tc = client.TaskClient()
8 mec = client.MultiEngineClient()
8 mec = client.MultiEngineClient()
9
9
10 mec.execute('import time')
10 mec.execute('import time')
11 hello_taskid = tc.run(client.Task('time.sleep(3) ; word = "Hello,"', pull=('word')))
11 hello_taskid = tc.run(client.StringTask('time.sleep(3) ; word = "Hello,"', pull=('word')))
12 world_taskid = tc.run(client.Task('time.sleep(3) ; word = "World!"', pull=('word')))
12 world_taskid = tc.run(client.StringTask('time.sleep(3) ; word = "World!"', pull=('word')))
13 print "Submitted tasks:", hello_taskid, world_taskid
13 print "Submitted tasks:", hello_taskid, world_taskid
14 print tc.get_task_result(hello_taskid,block=True).ns.word, tc.get_task_result(world_taskid,block=True).ns.word
14 print tc.get_task_result(hello_taskid,block=True).ns.word, tc.get_task_result(world_taskid,block=True).ns.word
@@ -1,71 +1,71 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # encoding: utf-8
2 # encoding: utf-8
3 """Run a Monte-Carlo options pricer in parallel."""
3 """Run a Monte-Carlo options pricer in parallel."""
4
4
5 from IPython.kernel import client
5 from IPython.kernel import client
6 import numpy as N
6 import numpy as N
7 from mcpricer import MCOptionPricer
7 from mcpricer import MCOptionPricer
8
8
9
9
10 tc = client.TaskClient()
10 tc = client.TaskClient()
11 rc = client.MultiEngineClient()
11 rc = client.MultiEngineClient()
12
12
13 # Initialize the common code on the engines
13 # Initialize the common code on the engines
14 rc.run('mcpricer.py')
14 rc.run('mcpricer.py')
15
15
16 # Push the variables that won't change
16 # Push the variables that won't change
17 #(stock print, interest rate, days and MC paths)
17 #(stock print, interest rate, days and MC paths)
18 rc.push(dict(S=100.0, r=0.05, days=260, paths=10000))
18 rc.push(dict(S=100.0, r=0.05, days=260, paths=10000))
19
19
20 task_string = """\
20 task_string = """\
21 op = MCOptionPricer(S,K,sigma,r,days,paths)
21 op = MCOptionPricer(S,K,sigma,r,days,paths)
22 op.run()
22 op.run()
23 vp, ap, vc, ac = op.vanilla_put, op.asian_put, op.vanilla_call, op.asian_call
23 vp, ap, vc, ac = op.vanilla_put, op.asian_put, op.vanilla_call, op.asian_call
24 """
24 """
25
25
26 # Create arrays of strike prices and volatilities
26 # Create arrays of strike prices and volatilities
27 K_vals = N.linspace(90.0,100.0,5)
27 K_vals = N.linspace(90.0,100.0,5)
28 sigma_vals = N.linspace(0.0, 0.2,5)
28 sigma_vals = N.linspace(0.0, 0.2,5)
29
29
30 # Submit tasks
30 # Submit tasks
31 taskids = []
31 taskids = []
32 for K in K_vals:
32 for K in K_vals:
33 for sigma in sigma_vals:
33 for sigma in sigma_vals:
34 t = client.Task(task_string,
34 t = client.StringTask(task_string,
35 push=dict(sigma=sigma,K=K),
35 push=dict(sigma=sigma,K=K),
36 pull=('vp','ap','vc','ac','sigma','K'))
36 pull=('vp','ap','vc','ac','sigma','K'))
37 taskids.append(tc.run(t))
37 taskids.append(tc.run(t))
38
38
39 print "Submitted tasks: ", taskids
39 print "Submitted tasks: ", taskids
40
40
41 # Block until tasks are completed
41 # Block until tasks are completed
42 tc.barrier(taskids)
42 tc.barrier(taskids)
43
43
44 # Get the results
44 # Get the results
45 results = [tc.get_task_result(tid) for tid in taskids]
45 results = [tc.get_task_result(tid) for tid in taskids]
46
46
47 # Assemble the result
47 # Assemble the result
48 vc = N.empty(K_vals.shape[0]*sigma_vals.shape[0],dtype='float64')
48 vc = N.empty(K_vals.shape[0]*sigma_vals.shape[0],dtype='float64')
49 vp = N.empty(K_vals.shape[0]*sigma_vals.shape[0],dtype='float64')
49 vp = N.empty(K_vals.shape[0]*sigma_vals.shape[0],dtype='float64')
50 ac = N.empty(K_vals.shape[0]*sigma_vals.shape[0],dtype='float64')
50 ac = N.empty(K_vals.shape[0]*sigma_vals.shape[0],dtype='float64')
51 ap = N.empty(K_vals.shape[0]*sigma_vals.shape[0],dtype='float64')
51 ap = N.empty(K_vals.shape[0]*sigma_vals.shape[0],dtype='float64')
52 for i, tr in enumerate(results):
52 for i, tr in enumerate(results):
53 ns = tr.ns
53 ns = tr.ns
54 vc[i] = ns.vc
54 vc[i] = ns.vc
55 vp[i] = ns.vp
55 vp[i] = ns.vp
56 ac[i] = ns.ac
56 ac[i] = ns.ac
57 ap[i] = ns.ap
57 ap[i] = ns.ap
58 vc.shape = (K_vals.shape[0],sigma_vals.shape[0])
58 vc.shape = (K_vals.shape[0],sigma_vals.shape[0])
59 vp.shape = (K_vals.shape[0],sigma_vals.shape[0])
59 vp.shape = (K_vals.shape[0],sigma_vals.shape[0])
60 ac.shape = (K_vals.shape[0],sigma_vals.shape[0])
60 ac.shape = (K_vals.shape[0],sigma_vals.shape[0])
61 ap.shape = (K_vals.shape[0],sigma_vals.shape[0])
61 ap.shape = (K_vals.shape[0],sigma_vals.shape[0])
62
62
63
63
64 def plot_options(K_vals, sigma_vals, prices):
64 def plot_options(K_vals, sigma_vals, prices):
65 """Make a contour plot of the option prices."""
65 """Make a contour plot of the option prices."""
66 import pylab
66 import pylab
67 pylab.contourf(sigma_vals, K_vals, prices)
67 pylab.contourf(sigma_vals, K_vals, prices)
68 pylab.colorbar()
68 pylab.colorbar()
69 pylab.title("Option Price")
69 pylab.title("Option Price")
70 pylab.xlabel("Volatility")
70 pylab.xlabel("Volatility")
71 pylab.ylabel("Strike Price")
71 pylab.ylabel("Strike Price")
@@ -1,18 +1,18 b''
1 from IPython.kernel import client
1 from IPython.kernel import client
2
2
3 tc = client.TaskClient()
3 tc = client.TaskClient()
4 rc = client.MultiEngineClient()
4 rc = client.MultiEngineClient()
5
5
6 rc.push(dict(d=30))
6 rc.push(dict(d=30))
7
7
8 cmd1 = """\
8 cmd1 = """\
9 a = 5
9 a = 5
10 b = 10*d
10 b = 10*d
11 c = a*b*d
11 c = a*b*d
12 """
12 """
13
13
14 t1 = client.Task(cmd1, clear_before=False, clear_after=True, pull=['a','b','c'])
14 t1 = client.StringTask(cmd1, clear_before=False, clear_after=True, pull=['a','b','c'])
15 tid1 = tc.run(t1)
15 tid1 = tc.run(t1)
16 tr1 = tc.get_task_result(tid1,block=True)
16 tr1 = tc.get_task_result(tid1,block=True)
17 tr1.raiseException()
17 tr1.raise_exception()
18 print "a, b: ", tr1.ns.a, tr1.ns.b No newline at end of file
18 print "a, b: ", tr1.ns.a, tr1.ns.b
@@ -1,44 +1,44 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # encoding: utf-8
2 # encoding: utf-8
3
3
4 from IPython.kernel import client
4 from IPython.kernel import client
5 import time
5 import time
6
6
7 tc = client.TaskClient()
7 tc = client.TaskClient()
8 mec = client.MultiEngineClient()
8 mec = client.MultiEngineClient()
9
9
10 mec.execute('import time')
10 mec.execute('import time')
11
11
12 for i in range(24):
12 for i in range(24):
13 tc.irun('time.sleep(1)')
13 tc.run(client.StringTask('time.sleep(1)'))
14
14
15 for i in range(6):
15 for i in range(6):
16 time.sleep(1.0)
16 time.sleep(1.0)
17 print "Queue status (vebose=False)"
17 print "Queue status (vebose=False)"
18 print tc.queue_status()
18 print tc.queue_status()
19
19
20 for i in range(24):
20 for i in range(24):
21 tc.irun('time.sleep(1)')
21 tc.run(client.StringTask('time.sleep(1)'))
22
22
23 for i in range(6):
23 for i in range(6):
24 time.sleep(1.0)
24 time.sleep(1.0)
25 print "Queue status (vebose=True)"
25 print "Queue status (vebose=True)"
26 print tc.queue_status(True)
26 print tc.queue_status(True)
27
27
28 for i in range(12):
28 for i in range(12):
29 tc.irun('time.sleep(2)')
29 tc.run(client.StringTask('time.sleep(2)'))
30
30
31 print "Queue status (vebose=True)"
31 print "Queue status (vebose=True)"
32 print tc.queue_status(True)
32 print tc.queue_status(True)
33
33
34 qs = tc.queue_status(True)
34 qs = tc.queue_status(True)
35 sched = qs['scheduled']
35 sched = qs['scheduled']
36
36
37 for tid in sched[-4:]:
37 for tid in sched[-4:]:
38 tc.abort(tid)
38 tc.abort(tid)
39
39
40 for i in range(6):
40 for i in range(6):
41 time.sleep(1.0)
41 time.sleep(1.0)
42 print "Queue status (vebose=True)"
42 print "Queue status (vebose=True)"
43 print tc.queue_status(True)
43 print tc.queue_status(True)
44
44
@@ -1,77 +1,77 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 """Test the performance of the task farming system.
2 """Test the performance of the task farming system.
3
3
4 This script submits a set of tasks to the TaskClient. The tasks
4 This script submits a set of tasks to the TaskClient. The tasks
5 are basically just a time.sleep(t), where t is a random number between
5 are basically just a time.sleep(t), where t is a random number between
6 two limits that can be configured at the command line. To run
6 two limits that can be configured at the command line. To run
7 the script there must first be an IPython controller and engines running::
7 the script there must first be an IPython controller and engines running::
8
8
9 ipcluster -n 16
9 ipcluster -n 16
10
10
11 A good test to run with 16 engines is::
11 A good test to run with 16 engines is::
12
12
13 python task_profiler.py -n 128 -t 0.01 -T 1.0
13 python task_profiler.py -n 128 -t 0.01 -T 1.0
14
14
15 This should show a speedup of 13-14x. The limitation here is that the
15 This should show a speedup of 13-14x. The limitation here is that the
16 overhead of a single task is about 0.001-0.01 seconds.
16 overhead of a single task is about 0.001-0.01 seconds.
17 """
17 """
18 import random, sys
18 import random, sys
19 from optparse import OptionParser
19 from optparse import OptionParser
20
20
21 from IPython.genutils import time
21 from IPython.genutils import time
22 from IPython.kernel import client
22 from IPython.kernel import client
23
23
24 def main():
24 def main():
25 parser = OptionParser()
25 parser = OptionParser()
26 parser.set_defaults(n=100)
26 parser.set_defaults(n=100)
27 parser.set_defaults(tmin=1)
27 parser.set_defaults(tmin=1)
28 parser.set_defaults(tmax=60)
28 parser.set_defaults(tmax=60)
29 parser.set_defaults(controller='localhost')
29 parser.set_defaults(controller='localhost')
30 parser.set_defaults(meport=10105)
30 parser.set_defaults(meport=10105)
31 parser.set_defaults(tport=10113)
31 parser.set_defaults(tport=10113)
32
32
33 parser.add_option("-n", type='int', dest='n',
33 parser.add_option("-n", type='int', dest='n',
34 help='the number of tasks to run')
34 help='the number of tasks to run')
35 parser.add_option("-t", type='float', dest='tmin',
35 parser.add_option("-t", type='float', dest='tmin',
36 help='the minimum task length in seconds')
36 help='the minimum task length in seconds')
37 parser.add_option("-T", type='float', dest='tmax',
37 parser.add_option("-T", type='float', dest='tmax',
38 help='the maximum task length in seconds')
38 help='the maximum task length in seconds')
39 parser.add_option("-c", type='string', dest='controller',
39 parser.add_option("-c", type='string', dest='controller',
40 help='the address of the controller')
40 help='the address of the controller')
41 parser.add_option("-p", type='int', dest='meport',
41 parser.add_option("-p", type='int', dest='meport',
42 help="the port on which the controller listens for the MultiEngine/RemoteController client")
42 help="the port on which the controller listens for the MultiEngine/RemoteController client")
43 parser.add_option("-P", type='int', dest='tport',
43 parser.add_option("-P", type='int', dest='tport',
44 help="the port on which the controller listens for the TaskClient client")
44 help="the port on which the controller listens for the TaskClient client")
45
45
46 (opts, args) = parser.parse_args()
46 (opts, args) = parser.parse_args()
47 assert opts.tmax >= opts.tmin, "tmax must not be smaller than tmin"
47 assert opts.tmax >= opts.tmin, "tmax must not be smaller than tmin"
48
48
49 rc = client.MultiEngineClient()
49 rc = client.MultiEngineClient()
50 tc = client.TaskClient()
50 tc = client.TaskClient()
51 print tc.task_controller
51 print tc.task_controller
52 rc.block=True
52 rc.block=True
53 nengines = len(rc.get_ids())
53 nengines = len(rc.get_ids())
54 rc.execute('from IPython.genutils import time')
54 rc.execute('from IPython.genutils import time')
55
55
56 # the jobs should take a random time within a range
56 # the jobs should take a random time within a range
57 times = [random.random()*(opts.tmax-opts.tmin)+opts.tmin for i in range(opts.n)]
57 times = [random.random()*(opts.tmax-opts.tmin)+opts.tmin for i in range(opts.n)]
58 tasks = [client.Task("time.sleep(%f)"%t) for t in times]
58 tasks = [client.StringTask("time.sleep(%f)"%t) for t in times]
59 stime = sum(times)
59 stime = sum(times)
60
60
61 print "executing %i tasks, totalling %.1f secs on %i engines"%(opts.n, stime, nengines)
61 print "executing %i tasks, totalling %.1f secs on %i engines"%(opts.n, stime, nengines)
62 time.sleep(1)
62 time.sleep(1)
63 start = time.time()
63 start = time.time()
64 taskids = [tc.run(t) for t in tasks]
64 taskids = [tc.run(t) for t in tasks]
65 tc.barrier(taskids)
65 tc.barrier(taskids)
66 stop = time.time()
66 stop = time.time()
67
67
68 ptime = stop-start
68 ptime = stop-start
69 scale = stime/ptime
69 scale = stime/ptime
70
70
71 print "executed %.1f secs in %.1f secs"%(stime, ptime)
71 print "executed %.1f secs in %.1f secs"%(stime, ptime)
72 print "%.3fx parallel performance on %i engines"%(scale, nengines)
72 print "%.3fx parallel performance on %i engines"%(scale, nengines)
73 print "%.1f%% of theoretical max"%(100*scale/nengines)
73 print "%.1f%% of theoretical max"%(100*scale/nengines)
74
74
75
75
76 if __name__ == '__main__':
76 if __name__ == '__main__':
77 main()
77 main()
@@ -1,173 +1,195 b''
1 .. _changes:
1 .. _changes:
2
2
3 ==========
3 ==========
4 What's new
4 What's new
5 ==========
5 ==========
6
6
7 .. contents::
7 .. contents::
8
8
9 Release 0.9
9 Release 0.9
10 ===========
10 ===========
11
11
12 New features
12 New features
13 ------------
13 ------------
14
14
15 * The notion of a task has been completely reworked. An `ITask` interface has
16 been created. This interface defines the methods that tasks need to implement.
17 These methods are now responsible for things like submitting tasks and processing
18 results. There are two basic task types: :class:`IPython.kernel.task.StringTask`
19 (this is the old `Task` object, but renamed) and the new
20 :class:`IPython.kernel.task.MapTask`, which is based on a function.
21 * A new interface, :class:`IPython.kernel.mapper.IMapper` has been defined to
22 standardize the idea of a `map` method. This interface has a single
23 `map` method that has the same syntax as the built-in `map`. We have also defined
24 a `mapper` factory interface that creates objects that implement
25 :class:`IPython.kernel.mapper.IMapper` for different controllers. Both
26 the multiengine and task controller now have mapping capabilties.
27 * The parallel function capabilities have been reworks. The major changes are that
28 i) there is now an `@parallel` magic that creates parallel functions, ii)
29 the syntax for mulitple variable follows that of `map`, iii) both the
30 multiengine and task controller now have a parallel function implementation.
15 * All of the parallel computing capabilities from `ipython1-dev` have been merged into
31 * All of the parallel computing capabilities from `ipython1-dev` have been merged into
16 IPython proper. This resulted in the following new subpackages:
32 IPython proper. This resulted in the following new subpackages:
17 :mod:`IPython.kernel`, :mod:`IPython.kernel.core`, :mod:`IPython.config`,
33 :mod:`IPython.kernel`, :mod:`IPython.kernel.core`, :mod:`IPython.config`,
18 :mod:`IPython.tools` and :mod:`IPython.testing`.
34 :mod:`IPython.tools` and :mod:`IPython.testing`.
19 * As part of merging in the `ipython1-dev` stuff, the `setup.py` script and friends
35 * As part of merging in the `ipython1-dev` stuff, the `setup.py` script and friends
20 have been completely refactored. Now we are checking for dependencies using
36 have been completely refactored. Now we are checking for dependencies using
21 the approach that matplotlib uses.
37 the approach that matplotlib uses.
22 * The documentation has been completely reorganized to accept the documentation
38 * The documentation has been completely reorganized to accept the documentation
23 from `ipython1-dev`.
39 from `ipython1-dev`.
24 * We have switched to using Foolscap for all of our network protocols in
40 * We have switched to using Foolscap for all of our network protocols in
25 :mod:`IPython.kernel`. This gives us secure connections that are both encrypted
41 :mod:`IPython.kernel`. This gives us secure connections that are both encrypted
26 and authenticated.
42 and authenticated.
27 * We have a brand new `COPYING.txt` files that describes the IPython license
43 * We have a brand new `COPYING.txt` files that describes the IPython license
28 and copyright. The biggest change is that we are putting "The IPython
44 and copyright. The biggest change is that we are putting "The IPython
29 Development Team" as the copyright holder. We give more details about exactly
45 Development Team" as the copyright holder. We give more details about exactly
30 what this means in this file. All developer should read this and use the new
46 what this means in this file. All developer should read this and use the new
31 banner in all IPython source code files.
47 banner in all IPython source code files.
32 * sh profile: ./foo runs foo as system command, no need to do !./foo anymore
48 * sh profile: ./foo runs foo as system command, no need to do !./foo anymore
33 * String lists now support 'sort(field, nums = True)' method (to easily
49 * String lists now support 'sort(field, nums = True)' method (to easily
34 sort system command output). Try it with 'a = !ls -l ; a.sort(1, nums=1)'
50 sort system command output). Try it with 'a = !ls -l ; a.sort(1, nums=1)'
35 * '%cpaste foo' now assigns the pasted block as string list, instead of string
51 * '%cpaste foo' now assigns the pasted block as string list, instead of string
36 * The ipcluster script now run by default with no security. This is done because
52 * The ipcluster script now run by default with no security. This is done because
37 the main usage of the script is for starting things on localhost. Eventually
53 the main usage of the script is for starting things on localhost. Eventually
38 when ipcluster is able to start things on other hosts, we will put security
54 when ipcluster is able to start things on other hosts, we will put security
39 back.
55 back.
40
56
41
42
43 Bug fixes
57 Bug fixes
44 ---------
58 ---------
45
59
60 * The colors escapes in the multiengine client are now turned off on win32 as they
61 don't print correctly.
46 * The :mod:`IPython.kernel.scripts.ipengine` script was exec'ing mpi_import_statement
62 * The :mod:`IPython.kernel.scripts.ipengine` script was exec'ing mpi_import_statement
47 incorrectly, which was leading the engine to crash when mpi was enabled.
63 incorrectly, which was leading the engine to crash when mpi was enabled.
48 * A few subpackages has missing `__init__.py` files.
64 * A few subpackages has missing `__init__.py` files.
49 * The documentation is only created is Sphinx is found. Previously, the `setup.py`
65 * The documentation is only created is Sphinx is found. Previously, the `setup.py`
50 script would fail if it was missing.
66 script would fail if it was missing.
51
67
52 Backwards incompatible changes
68 Backwards incompatible changes
53 ------------------------------
69 ------------------------------
54
70
71 * :class:`IPython.kernel.client.Task` has been renamed
72 :class:`IPython.kernel.client.StringTask` to make way for new task types.
73 * The keyword argument `style` has been renamed `dist` in `scatter`, `gather`
74 and `map`.
75 * Renamed the values that the rename `dist` keyword argument can have from
76 `'basic'` to `'b'`.
55 * IPython has a larger set of dependencies if you want all of its capabilities.
77 * IPython has a larger set of dependencies if you want all of its capabilities.
56 See the `setup.py` script for details.
78 See the `setup.py` script for details.
57 * The constructors for :class:`IPython.kernel.client.MultiEngineClient` and
79 * The constructors for :class:`IPython.kernel.client.MultiEngineClient` and
58 :class:`IPython.kernel.client.TaskClient` no longer take the (ip,port) tuple.
80 :class:`IPython.kernel.client.TaskClient` no longer take the (ip,port) tuple.
59 Instead they take the filename of a file that contains the FURL for that
81 Instead they take the filename of a file that contains the FURL for that
60 client. If the FURL file is in your IPYTHONDIR, it will be found automatically
82 client. If the FURL file is in your IPYTHONDIR, it will be found automatically
61 and the constructor can be left empty.
83 and the constructor can be left empty.
62 * The asynchronous clients in :mod:`IPython.kernel.asyncclient` are now created
84 * The asynchronous clients in :mod:`IPython.kernel.asyncclient` are now created
63 using the factory functions :func:`get_multiengine_client` and
85 using the factory functions :func:`get_multiengine_client` and
64 :func:`get_task_client`. These return a `Deferred` to the actual client.
86 :func:`get_task_client`. These return a `Deferred` to the actual client.
65 * The command line options to `ipcontroller` and `ipengine` have changed to
87 * The command line options to `ipcontroller` and `ipengine` have changed to
66 reflect the new Foolscap network protocol and the FURL files. Please see the
88 reflect the new Foolscap network protocol and the FURL files. Please see the
67 help for these scripts for details.
89 help for these scripts for details.
68 * The configuration files for the kernel have changed because of the Foolscap stuff.
90 * The configuration files for the kernel have changed because of the Foolscap stuff.
69 If you were using custom config files before, you should delete them and regenerate
91 If you were using custom config files before, you should delete them and regenerate
70 new ones.
92 new ones.
71
93
72 Changes merged in from IPython1
94 Changes merged in from IPython1
73 -------------------------------
95 -------------------------------
74
96
75 New features
97 New features
76 ............
98 ............
77
99
78 * Much improved ``setup.py`` and ``setupegg.py`` scripts. Because Twisted
100 * Much improved ``setup.py`` and ``setupegg.py`` scripts. Because Twisted
79 and zope.interface are now easy installable, we can declare them as dependencies
101 and zope.interface are now easy installable, we can declare them as dependencies
80 in our setupegg.py script.
102 in our setupegg.py script.
81 * IPython is now compatible with Twisted 2.5.0 and 8.x.
103 * IPython is now compatible with Twisted 2.5.0 and 8.x.
82 * Added a new example of how to use :mod:`ipython1.kernel.asynclient`.
104 * Added a new example of how to use :mod:`ipython1.kernel.asynclient`.
83 * Initial draft of a process daemon in :mod:`ipython1.daemon`. This has not
105 * Initial draft of a process daemon in :mod:`ipython1.daemon`. This has not
84 been merged into IPython and is still in `ipython1-dev`.
106 been merged into IPython and is still in `ipython1-dev`.
85 * The ``TaskController`` now has methods for getting the queue status.
107 * The ``TaskController`` now has methods for getting the queue status.
86 * The ``TaskResult`` objects not have information about how long the task
108 * The ``TaskResult`` objects not have information about how long the task
87 took to run.
109 took to run.
88 * We are attaching additional attributes to exceptions ``(_ipython_*)`` that
110 * We are attaching additional attributes to exceptions ``(_ipython_*)`` that
89 we use to carry additional info around.
111 we use to carry additional info around.
90 * New top-level module :mod:`asyncclient` that has asynchronous versions (that
112 * New top-level module :mod:`asyncclient` that has asynchronous versions (that
91 return deferreds) of the client classes. This is designed to users who want
113 return deferreds) of the client classes. This is designed to users who want
92 to run their own Twisted reactor
114 to run their own Twisted reactor
93 * All the clients in :mod:`client` are now based on Twisted. This is done by
115 * All the clients in :mod:`client` are now based on Twisted. This is done by
94 running the Twisted reactor in a separate thread and using the
116 running the Twisted reactor in a separate thread and using the
95 :func:`blockingCallFromThread` function that is in recent versions of Twisted.
117 :func:`blockingCallFromThread` function that is in recent versions of Twisted.
96 * Functions can now be pushed/pulled to/from engines using
118 * Functions can now be pushed/pulled to/from engines using
97 :meth:`MultiEngineClient.push_function` and :meth:`MultiEngineClient.pull_function`.
119 :meth:`MultiEngineClient.push_function` and :meth:`MultiEngineClient.pull_function`.
98 * Gather/scatter are now implemented in the client to reduce the work load
120 * Gather/scatter are now implemented in the client to reduce the work load
99 of the controller and improve performance.
121 of the controller and improve performance.
100 * Complete rewrite of the IPython docuementation. All of the documentation
122 * Complete rewrite of the IPython docuementation. All of the documentation
101 from the IPython website has been moved into docs/source as restructured
123 from the IPython website has been moved into docs/source as restructured
102 text documents. PDF and HTML documentation are being generated using
124 text documents. PDF and HTML documentation are being generated using
103 Sphinx.
125 Sphinx.
104 * New developer oriented documentation: development guidelines and roadmap.
126 * New developer oriented documentation: development guidelines and roadmap.
105 * Traditional ``ChangeLog`` has been changed to a more useful ``changes.txt`` file
127 * Traditional ``ChangeLog`` has been changed to a more useful ``changes.txt`` file
106 that is organized by release and is meant to provide something more relevant
128 that is organized by release and is meant to provide something more relevant
107 for users.
129 for users.
108
130
109 Bug fixes
131 Bug fixes
110 .........
132 .........
111
133
112 * Created a proper ``MANIFEST.in`` file to create source distributions.
134 * Created a proper ``MANIFEST.in`` file to create source distributions.
113 * Fixed a bug in the ``MultiEngine`` interface. Previously, multi-engine
135 * Fixed a bug in the ``MultiEngine`` interface. Previously, multi-engine
114 actions were being collected with a :class:`DeferredList` with
136 actions were being collected with a :class:`DeferredList` with
115 ``fireononeerrback=1``. This meant that methods were returning
137 ``fireononeerrback=1``. This meant that methods were returning
116 before all engines had given their results. This was causing extremely odd
138 before all engines had given their results. This was causing extremely odd
117 bugs in certain cases. To fix this problem, we have 1) set
139 bugs in certain cases. To fix this problem, we have 1) set
118 ``fireononeerrback=0`` to make sure all results (or exceptions) are in
140 ``fireononeerrback=0`` to make sure all results (or exceptions) are in
119 before returning and 2) introduced a :exc:`CompositeError` exception
141 before returning and 2) introduced a :exc:`CompositeError` exception
120 that wraps all of the engine exceptions. This is a huge change as it means
142 that wraps all of the engine exceptions. This is a huge change as it means
121 that users will have to catch :exc:`CompositeError` rather than the actual
143 that users will have to catch :exc:`CompositeError` rather than the actual
122 exception.
144 exception.
123
145
124 Backwards incompatible changes
146 Backwards incompatible changes
125 ..............................
147 ..............................
126
148
127 * All names have been renamed to conform to the lowercase_with_underscore
149 * All names have been renamed to conform to the lowercase_with_underscore
128 convention. This will require users to change references to all names like
150 convention. This will require users to change references to all names like
129 ``queueStatus`` to ``queue_status``.
151 ``queueStatus`` to ``queue_status``.
130 * Previously, methods like :meth:`MultiEngineClient.push` and
152 * Previously, methods like :meth:`MultiEngineClient.push` and
131 :meth:`MultiEngineClient.push` used ``*args`` and ``**kwargs``. This was
153 :meth:`MultiEngineClient.push` used ``*args`` and ``**kwargs``. This was
132 becoming a problem as we weren't able to introduce new keyword arguments into
154 becoming a problem as we weren't able to introduce new keyword arguments into
133 the API. Now these methods simple take a dict or sequence. This has also allowed
155 the API. Now these methods simple take a dict or sequence. This has also allowed
134 us to get rid of the ``*All`` methods like :meth:`pushAll` and :meth:`pullAll`.
156 us to get rid of the ``*All`` methods like :meth:`pushAll` and :meth:`pullAll`.
135 These things are now handled with the ``targets`` keyword argument that defaults
157 These things are now handled with the ``targets`` keyword argument that defaults
136 to ``'all'``.
158 to ``'all'``.
137 * The :attr:`MultiEngineClient.magicTargets` has been renamed to
159 * The :attr:`MultiEngineClient.magicTargets` has been renamed to
138 :attr:`MultiEngineClient.targets`.
160 :attr:`MultiEngineClient.targets`.
139 * All methods in the MultiEngine interface now accept the optional keyword argument
161 * All methods in the MultiEngine interface now accept the optional keyword argument
140 ``block``.
162 ``block``.
141 * Renamed :class:`RemoteController` to :class:`MultiEngineClient` and
163 * Renamed :class:`RemoteController` to :class:`MultiEngineClient` and
142 :class:`TaskController` to :class:`TaskClient`.
164 :class:`TaskController` to :class:`TaskClient`.
143 * Renamed the top-level module from :mod:`api` to :mod:`client`.
165 * Renamed the top-level module from :mod:`api` to :mod:`client`.
144 * Most methods in the multiengine interface now raise a :exc:`CompositeError` exception
166 * Most methods in the multiengine interface now raise a :exc:`CompositeError` exception
145 that wraps the user's exceptions, rather than just raising the raw user's exception.
167 that wraps the user's exceptions, rather than just raising the raw user's exception.
146 * Changed the ``setupNS`` and ``resultNames`` in the ``Task`` class to ``push``
168 * Changed the ``setupNS`` and ``resultNames`` in the ``Task`` class to ``push``
147 and ``pull``.
169 and ``pull``.
148
170
149 Release 0.8.4
171 Release 0.8.4
150 =============
172 =============
151
173
152 Someone needs to describe what went into 0.8.4.
174 Someone needs to describe what went into 0.8.4.
153
175
154 Release 0.8.2
176 Release 0.8.2
155 =============
177 =============
156
178
157 * %pushd/%popd behave differently; now "pushd /foo" pushes CURRENT directory
179 * %pushd/%popd behave differently; now "pushd /foo" pushes CURRENT directory
158 and jumps to /foo. The current behaviour is closer to the documented
180 and jumps to /foo. The current behaviour is closer to the documented
159 behaviour, and should not trip anyone.
181 behaviour, and should not trip anyone.
160
182
161 Release 0.8.3
183 Release 0.8.3
162 =============
184 =============
163
185
164 * pydb is now disabled by default (due to %run -d problems). You can enable
186 * pydb is now disabled by default (due to %run -d problems). You can enable
165 it by passing -pydb command line argument to IPython. Note that setting
187 it by passing -pydb command line argument to IPython. Note that setting
166 it in config file won't work.
188 it in config file won't work.
167
189
168 Older releases
190 Older releases
169 ==============
191 ==============
170
192
171 Changes in earlier releases of IPython are described in the older file ``ChangeLog``.
193 Changes in earlier releases of IPython are described in the older file ``ChangeLog``.
172 Please refer to this document for details.
194 Please refer to this document for details.
173
195
@@ -1,9 +1,11 b''
1 .. _install_index:
2
1 ==================
3 ==================
2 Installation
4 Installation
3 ==================
5 ==================
4
6
5 .. toctree::
7 .. toctree::
6 :maxdepth: 2
8 :maxdepth: 2
7
9
8 basic.txt
10 basic.txt
9 advanced.txt
11 advanced.txt
@@ -1,189 +1,174 b''
1 .. _overview:
1 .. _overview:
2
2
3 ============
3 ============
4 Introduction
4 Introduction
5 ============
5 ============
6
6
7 This is the official documentation for IPython 0.x series (i.e. what
8 we are used to refer to just as "IPython"). The original text of the
9 manual (most of which is still in place) has been authored by Fernando
10 Perez, but as recommended usage patterns and new features have
11 emerged, this manual has been updated to reflect that fact. Most of
12 the additions have been authored by Ville M. Vainio.
13
14 The manual has been generated from reStructuredText source markup with
15 Sphinx, which should make it much easier to keep it up-to-date in the
16 future. Some reST artifacts and bugs may still be apparent in the
17 documentation, but this should improve as the toolchain matures.
18
19 Overview
7 Overview
20 ========
8 ========
21
9
22 One of Python's most useful features is its interactive interpreter.
10 One of Python's most useful features is its interactive interpreter.
23 This system allows very fast testing of ideas without the overhead of
11 This system allows very fast testing of ideas without the overhead of
24 creating test files as is typical in most programming languages.
12 creating test files as is typical in most programming languages.
25 However, the interpreter supplied with the standard Python distribution
13 However, the interpreter supplied with the standard Python distribution
26 is somewhat limited for extended interactive use.
14 is somewhat limited for extended interactive use.
27
15
28 IPython is a free software project (released under the BSD license)
16 The goal of IPython is to create a comprehensive environment for
29 which tries to:
17 interactive and exploratory computing. To support, this goal, IPython
18 has two main components:
19
20 * An enhanced interactive Python shell.
21 * An architecture for interactive parallel computing.
22
23 All of IPython is open source (released under the revised BSD license).
24
25 Enhanced interactive Python shell
26 =================================
27
28 IPython's interactive shell (`ipython`), has the following goals:
30
29
31 1. Provide an interactive shell superior to Python's default. IPython
30 1. Provide an interactive shell superior to Python's default. IPython
32 has many features for object introspection, system shell access,
31 has many features for object introspection, system shell access,
33 and its own special command system for adding functionality when
32 and its own special command system for adding functionality when
34 working interactively. It tries to be a very efficient environment
33 working interactively. It tries to be a very efficient environment
35 both for Python code development and for exploration of problems
34 both for Python code development and for exploration of problems
36 using Python objects (in situations like data analysis).
35 using Python objects (in situations like data analysis).
37 2. Serve as an embeddable, ready to use interpreter for your own
36 2. Serve as an embeddable, ready to use interpreter for your own
38 programs. IPython can be started with a single call from inside
37 programs. IPython can be started with a single call from inside
39 another program, providing access to the current namespace. This
38 another program, providing access to the current namespace. This
40 can be very useful both for debugging purposes and for situations
39 can be very useful both for debugging purposes and for situations
41 where a blend of batch-processing and interactive exploration are
40 where a blend of batch-processing and interactive exploration are
42 needed.
41 needed.
43 3. Offer a flexible framework which can be used as the base
42 3. Offer a flexible framework which can be used as the base
44 environment for other systems with Python as the underlying
43 environment for other systems with Python as the underlying
45 language. Specifically scientific environments like Mathematica,
44 language. Specifically scientific environments like Mathematica,
46 IDL and Matlab inspired its design, but similar ideas can be
45 IDL and Matlab inspired its design, but similar ideas can be
47 useful in many fields.
46 useful in many fields.
48 4. Allow interactive testing of threaded graphical toolkits. IPython
47 4. Allow interactive testing of threaded graphical toolkits. IPython
49 has support for interactive, non-blocking control of GTK, Qt and
48 has support for interactive, non-blocking control of GTK, Qt and
50 WX applications via special threading flags. The normal Python
49 WX applications via special threading flags. The normal Python
51 shell can only do this for Tkinter applications.
50 shell can only do this for Tkinter applications.
52
51
53
52 Main features of the interactive shell
54 Main features
53 --------------------------------------
55 -------------
54
56
55 * Dynamic object introspection. One can access docstrings, function
57 * Dynamic object introspection. One can access docstrings, function
56 definition prototypes, source code, source files and other details
58 definition prototypes, source code, source files and other details
57 of any object accessible to the interpreter with a single
59 of any object accessible to the interpreter with a single
58 keystroke (:samp:`?`, and using :samp:`??` provides additional detail).
60 keystroke ('?', and using '??' provides additional detail).
59 * Searching through modules and namespaces with :samp:`*` wildcards, both
61 * Searching through modules and namespaces with '*' wildcards, both
60 when using the :samp:`?` system and via the :samp:`%psearch` command.
62 when using the '?' system and via the %psearch command.
61 * Completion in the local namespace, by typing :kbd:`TAB` at the prompt.
63 * Completion in the local namespace, by typing TAB at the prompt.
62 This works for keywords, modules, methods, variables and files in the
64 This works for keywords, modules, methods, variables and files in the
63 current directory. This is supported via the readline library, and
65 current directory. This is supported via the readline library, and
64 full access to configuring readline's behavior is provided.
66 full access to configuring readline's behavior is provided.
65 Custom completers can be implemented easily for different purposes
67 Custom completers can be implemented easily for different purposes
66 (system commands, magic arguments etc.)
68 (system commands, magic arguments etc.)
67 * Numbered input/output prompts with command history (persistent
69 * Numbered input/output prompts with command history (persistent
68 across sessions and tied to each profile), full searching in this
70 across sessions and tied to each profile), full searching in this
69 history and caching of all input and output.
71 history and caching of all input and output.
70 * User-extensible 'magic' commands. A set of commands prefixed with
72 * User-extensible 'magic' commands. A set of commands prefixed with
71 :samp:`%` is available for controlling IPython itself and provides
73 % is available for controlling IPython itself and provides
72 directory control, namespace information and many aliases to
74 directory control, namespace information and many aliases to
73 common system shell commands.
75 common system shell commands.
74 * Alias facility for defining your own system aliases.
76 * Alias facility for defining your own system aliases.
75 * Complete system shell access. Lines starting with :samp:`!` are passed
77 * Complete system shell access. Lines starting with ! are passed
76 directly to the system shell, and using :samp:`!!` or :samp:`var = !cmd`
78 directly to the system shell, and using !! or var = !cmd
77 captures shell output into python variables for further use.
79 captures shell output into python variables for further use.
78 * Background execution of Python commands in a separate thread.
80 * Background execution of Python commands in a separate thread.
79 IPython has an internal job manager called jobs, and a
81 IPython has an internal job manager called jobs, and a
80 conveninence backgrounding magic function called :samp:`%bg`.
82 conveninence backgrounding magic function called %bg.
81 * The ability to expand python variables when calling the system
83 * The ability to expand python variables when calling the system
82 shell. In a shell command, any python variable prefixed with :samp:`$` is
84 shell. In a shell command, any python variable prefixed with $ is
83 expanded. A double :samp:`$$` allows passing a literal :samp:`$` to the shell (for
85 expanded. A double $$ allows passing a literal $ to the shell (for
84 access to shell and environment variables like :envvar:`PATH`).
86 access to shell and environment variables like $PATH).
85 * Filesystem navigation, via a magic :samp:`%cd` command, along with a
87 * Filesystem navigation, via a magic %cd command, along with a
86 persistent bookmark system (using :samp:`%bookmark`) for fast access to
88 persistent bookmark system (using %bookmark) for fast access to
87 frequently visited directories.
89 frequently visited directories.
88 * A lightweight persistence framework via the :samp:`%store` command, which
90 * A lightweight persistence framework via the %store command, which
89 allows you to save arbitrary Python variables. These get restored
91 allows you to save arbitrary Python variables. These get restored
90 automatically when your session restarts.
92 automatically when your session restarts.
91 * Automatic indentation (optional) of code as you type (through the
93 * Automatic indentation (optional) of code as you type (through the
92 readline library).
94 readline library).
93 * Macro system for quickly re-executing multiple lines of previous
95 * Macro system for quickly re-executing multiple lines of previous
94 input with a single name. Macros can be stored persistently via
96 input with a single name. Macros can be stored persistently via
95 :samp:`%store` and edited via :samp:`%edit`.
97 %store and edited via %edit.
96 * Session logging (you can then later use these logs as code in your
98 * Session logging (you can then later use these logs as code in your
97 programs). Logs can optionally timestamp all input, and also store
99 programs). Logs can optionally timestamp all input, and also store
98 session output (marked as comments, so the log remains valid
100 session output (marked as comments, so the log remains valid
99 Python source code).
101 Python source code).
100 * Session restoring: logs can be replayed to restore a previous
102 * Session restoring: logs can be replayed to restore a previous
101 session to the state where you left it.
103 session to the state where you left it.
102 * Verbose and colored exception traceback printouts. Easier to parse
104 * Verbose and colored exception traceback printouts. Easier to parse
103 visually, and in verbose mode they produce a lot of useful
105 visually, and in verbose mode they produce a lot of useful
104 debugging information (basically a terminal version of the cgitb
106 debugging information (basically a terminal version of the cgitb
105 module).
107 module).
106 * Auto-parentheses: callable objects can be executed without
108 * Auto-parentheses: callable objects can be executed without
107 parentheses: :samp:`sin 3` is automatically converted to :samp:`sin(3)`.
109 parentheses: 'sin 3' is automatically converted to 'sin(3)'.
108 * Auto-quoting: using :samp:`,`, or :samp:`;` as the first character forces
110 * Auto-quoting: using ',' or ';' as the first character forces
109 auto-quoting of the rest of the line: :samp:`,my_function a b` becomes
111 auto-quoting of the rest of the line: ',my_function a b' becomes
110 automatically :samp:`my_function("a","b")`, while :samp:`;my_function a b`
112 automatically 'my_function("a","b")', while ';my_function a b'
111 becomes :samp:`my_function("a b")`.
113 becomes 'my_function("a b")'.
112 * Extensible input syntax. You can define filters that pre-process
114 * Extensible input syntax. You can define filters that pre-process
113 user input to simplify input in special situations. This allows
115 user input to simplify input in special situations. This allows
114 for example pasting multi-line code fragments which start with
116 for example pasting multi-line code fragments which start with
115 :samp:`>>>` or :samp:`...` such as those from other python sessions or the
117 '>>>' or '...' such as those from other python sessions or the
116 standard Python documentation.
118 standard Python documentation.
117 * Flexible configuration system. It uses a configuration file which
119 * Flexible configuration system. It uses a configuration file which
118 allows permanent setting of all command-line options, module
120 allows permanent setting of all command-line options, module
119 loading, code and file execution. The system allows recursive file
121 loading, code and file execution. The system allows recursive file
120 inclusion, so you can have a base file with defaults and layers
122 inclusion, so you can have a base file with defaults and layers
121 which load other customizations for particular projects.
123 which load other customizations for particular projects.
122 * Embeddable. You can call IPython as a python shell inside your own
124 * Embeddable. You can call IPython as a python shell inside your own
123 python programs. This can be used both for debugging code or for
125 python programs. This can be used both for debugging code or for
124 providing interactive abilities to your programs with knowledge
126 providing interactive abilities to your programs with knowledge
125 about the local namespaces (very useful in debugging and data
127 about the local namespaces (very useful in debugging and data
126 analysis situations).
128 analysis situations).
127 * Easy debugger access. You can set IPython to call up an enhanced
129 * Easy debugger access. You can set IPython to call up an enhanced
128 version of the Python debugger (pdb) every time there is an
130 version of the Python debugger (pdb) every time there is an
129 uncaught exception. This drops you inside the code which triggered
131 uncaught exception. This drops you inside the code which triggered
130 the exception with all the data live and it is possible to
132 the exception with all the data live and it is possible to
131 navigate the stack to rapidly isolate the source of a bug. The
133 navigate the stack to rapidly isolate the source of a bug. The
132 :samp:`%run` magic command (with the :samp:`-d` option) can run any script under
134 %run magic command -with the -d option- can run any script under
133 pdb's control, automatically setting initial breakpoints for you.
135 pdb's control, automatically setting initial breakpoints for you.
134 This version of pdb has IPython-specific improvements, including
136 This version of pdb has IPython-specific improvements, including
135 tab-completion and traceback coloring support. For even easier
137 tab-completion and traceback coloring support. For even easier
136 debugger access, try :samp:`%debug` after seeing an exception. winpdb is
138 debugger access, try %debug after seeing an exception. winpdb is
137 also supported, see ipy_winpdb extension.
139 also supported, see ipy_winpdb extension.
138 * Profiler support. You can run single statements (similar to
140 * Profiler support. You can run single statements (similar to
139 :samp:`profile.run()`) or complete programs under the profiler's control.
141 profile.run()) or complete programs under the profiler's control.
140 While this is possible with standard cProfile or profile modules,
142 While this is possible with standard cProfile or profile modules,
141 IPython wraps this functionality with magic commands (see :samp:`%prun`
143 IPython wraps this functionality with magic commands (see '%prun'
142 and :samp:`%run -p`) convenient for rapid interactive work.
144 and '%run -p') convenient for rapid interactive work.
143 * Doctest support. The special :samp:`%doctest_mode` command toggles a mode
145 * Doctest support. The special %doctest_mode command toggles a mode
144 that allows you to paste existing doctests (with leading :samp:`>>>`
146 that allows you to paste existing doctests (with leading '>>>'
145 prompts and whitespace) and uses doctest-compatible prompts and
147 prompts and whitespace) and uses doctest-compatible prompts and
146 output, so you can use IPython sessions as doctest code.
148 output, so you can use IPython sessions as doctest code.
147
149
148 Interactive parallel computing
149 ==============================
150
151 Increasingly, parallel computer hardware, such as multicore CPUs, clusters and supercomputers, is becoming ubiquitous. Over the last 3 years, we have developed an
152 architecture within IPython that allows such hardware to be used quickly and easily
153 from Python. Moreover, this architecture is designed to support interactive and
154 collaborative parallel computing.
155
156 For more information, see our :ref:`overview <parallel_index>` of using IPython for
157 parallel computing.
150
158
151 Portability and Python requirements
159 Portability and Python requirements
152 -----------------------------------
160 -----------------------------------
153
161
154 Python requirements: IPython requires with Python version 2.3 or newer.
162 As of the 0.9 release, IPython requires Python 2.4 or greater. We have
155 If you are still using Python 2.2 and can not upgrade, the last version
163 not begun to test IPython on Python 2.6 or 3.0, but we expect it will
156 of IPython which worked with Python 2.2 was 0.6.15, so you will have to
164 work with some minor changes.
157 use that.
165
158
166 IPython is known to work on the following operating systems:
159 IPython is developed under Linux, but it should work in any reasonable
167
160 Unix-type system (tested OK under Solaris and the BSD family, for which
168 * Linux
161 a port exists thanks to Dryice Liu).
169 * AIX
162
170 * Most other Unix-like OSs (Solaris, BSD, etc.)
163 Mac OS X: it works, apparently without any problems (thanks to Jim Boyle
171 * Mac OS X
164 at Lawrence Livermore for the information). Thanks to Andrea Riciputi,
172 * Windows (CygWin, XP, Vista, etc.)
165 Fink support is available.
173
166
174 See :ref:`here <install_index>` for instructions on how to install IPython. No newline at end of file
167 CygWin: it works mostly OK, though some users have reported problems
168 with prompt coloring. No satisfactory solution to this has been found so
169 far, you may want to disable colors permanently in the ipythonrc
170 configuration file if you experience problems. If you have proper color
171 support under cygwin, please post to the IPython mailing list so this
172 issue can be resolved for all users.
173
174 Windows: it works well under Windows Vista/XP/2k, and I suspect NT should
175 behave similarly. Section "Installation under windows" describes
176 installation details for Windows, including some additional tools needed
177 on this platform.
178
179 Windows 9x support is present, and has been reported to work fine (at
180 least on WinME).
181
182 Location
183 --------
184
185 IPython is generously hosted at http://ipython.scipy.org by the
186 Enthought, Inc and the SciPy project. This site offers downloads,
187 subversion access, mailing lists and a bug tracking system. I am very
188 grateful to Enthought (http://www.enthought.com) and all of the SciPy
189 team for their contribution. No newline at end of file
@@ -1,15 +1,17 b''
1 .. _parallel_index:
2
1 ====================================
3 ====================================
2 Using IPython for Parallel computing
4 Using IPython for Parallel computing
3 ====================================
5 ====================================
4
6
5 User Documentation
7 User Documentation
6 ==================
8 ==================
7
9
8 .. toctree::
10 .. toctree::
9 :maxdepth: 2
11 :maxdepth: 2
10
12
11 parallel_intro.txt
13 parallel_intro.txt
12 parallel_multiengine.txt
14 parallel_multiengine.txt
13 parallel_task.txt
15 parallel_task.txt
14 parallel_mpi.txt
16 parallel_mpi.txt
15
17
1 NO CONTENT: file was removed
NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now