##// END OF EJS Templates
move IPython.zmq.parallel to IPython.parallel
MinRK -
Show More
@@ -0,0 +1,316 b''
1 #!/usr/bin/env python
2 # encoding: utf-8
3 """
4 Job and task components for writing .xml files that the Windows HPC Server
5 2008 can use to start jobs.
6 """
7
8 #-----------------------------------------------------------------------------
9 # Copyright (C) 2008-2009 The IPython Development Team
10 #
11 # Distributed under the terms of the BSD License. The full license is in
12 # the file COPYING, distributed as part of this software.
13 #-----------------------------------------------------------------------------
14
15 #-----------------------------------------------------------------------------
16 # Imports
17 #-----------------------------------------------------------------------------
18
19 from __future__ import with_statement
20
21 import os
22 import re
23 import uuid
24
25 from xml.etree import ElementTree as ET
26
27 from IPython.config.configurable import Configurable
28 from IPython.utils.traitlets import (
29 Str, Int, List, Instance,
30 Enum, Bool, CStr
31 )
32
33 #-----------------------------------------------------------------------------
34 # Job and Task classes
35 #-----------------------------------------------------------------------------
36
37
38 def as_str(value):
39 if isinstance(value, str):
40 return value
41 elif isinstance(value, bool):
42 if value:
43 return 'true'
44 else:
45 return 'false'
46 elif isinstance(value, (int, float)):
47 return repr(value)
48 else:
49 return value
50
51
52 def indent(elem, level=0):
53 i = "\n" + level*" "
54 if len(elem):
55 if not elem.text or not elem.text.strip():
56 elem.text = i + " "
57 if not elem.tail or not elem.tail.strip():
58 elem.tail = i
59 for elem in elem:
60 indent(elem, level+1)
61 if not elem.tail or not elem.tail.strip():
62 elem.tail = i
63 else:
64 if level and (not elem.tail or not elem.tail.strip()):
65 elem.tail = i
66
67
68 def find_username():
69 domain = os.environ.get('USERDOMAIN')
70 username = os.environ.get('USERNAME','')
71 if domain is None:
72 return username
73 else:
74 return '%s\\%s' % (domain, username)
75
76
77 class WinHPCJob(Configurable):
78
79 job_id = Str('')
80 job_name = Str('MyJob', config=True)
81 min_cores = Int(1, config=True)
82 max_cores = Int(1, config=True)
83 min_sockets = Int(1, config=True)
84 max_sockets = Int(1, config=True)
85 min_nodes = Int(1, config=True)
86 max_nodes = Int(1, config=True)
87 unit_type = Str("Core", config=True)
88 auto_calculate_min = Bool(True, config=True)
89 auto_calculate_max = Bool(True, config=True)
90 run_until_canceled = Bool(False, config=True)
91 is_exclusive = Bool(False, config=True)
92 username = Str(find_username(), config=True)
93 job_type = Str('Batch', config=True)
94 priority = Enum(('Lowest','BelowNormal','Normal','AboveNormal','Highest'),
95 default_value='Highest', config=True)
96 requested_nodes = Str('', config=True)
97 project = Str('IPython', config=True)
98 xmlns = Str('http://schemas.microsoft.com/HPCS2008/scheduler/')
99 version = Str("2.000")
100 tasks = List([])
101
102 @property
103 def owner(self):
104 return self.username
105
106 def _write_attr(self, root, attr, key):
107 s = as_str(getattr(self, attr, ''))
108 if s:
109 root.set(key, s)
110
111 def as_element(self):
112 # We have to add _A_ type things to get the right order than
113 # the MSFT XML parser expects.
114 root = ET.Element('Job')
115 self._write_attr(root, 'version', '_A_Version')
116 self._write_attr(root, 'job_name', '_B_Name')
117 self._write_attr(root, 'unit_type', '_C_UnitType')
118 self._write_attr(root, 'min_cores', '_D_MinCores')
119 self._write_attr(root, 'max_cores', '_E_MaxCores')
120 self._write_attr(root, 'min_sockets', '_F_MinSockets')
121 self._write_attr(root, 'max_sockets', '_G_MaxSockets')
122 self._write_attr(root, 'min_nodes', '_H_MinNodes')
123 self._write_attr(root, 'max_nodes', '_I_MaxNodes')
124 self._write_attr(root, 'run_until_canceled', '_J_RunUntilCanceled')
125 self._write_attr(root, 'is_exclusive', '_K_IsExclusive')
126 self._write_attr(root, 'username', '_L_UserName')
127 self._write_attr(root, 'job_type', '_M_JobType')
128 self._write_attr(root, 'priority', '_N_Priority')
129 self._write_attr(root, 'requested_nodes', '_O_RequestedNodes')
130 self._write_attr(root, 'auto_calculate_max', '_P_AutoCalculateMax')
131 self._write_attr(root, 'auto_calculate_min', '_Q_AutoCalculateMin')
132 self._write_attr(root, 'project', '_R_Project')
133 self._write_attr(root, 'owner', '_S_Owner')
134 self._write_attr(root, 'xmlns', '_T_xmlns')
135 dependencies = ET.SubElement(root, "Dependencies")
136 etasks = ET.SubElement(root, "Tasks")
137 for t in self.tasks:
138 etasks.append(t.as_element())
139 return root
140
141 def tostring(self):
142 """Return the string representation of the job description XML."""
143 root = self.as_element()
144 indent(root)
145 txt = ET.tostring(root, encoding="utf-8")
146 # Now remove the tokens used to order the attributes.
147 txt = re.sub(r'_[A-Z]_','',txt)
148 txt = '<?xml version="1.0" encoding="utf-8"?>\n' + txt
149 return txt
150
151 def write(self, filename):
152 """Write the XML job description to a file."""
153 txt = self.tostring()
154 with open(filename, 'w') as f:
155 f.write(txt)
156
157 def add_task(self, task):
158 """Add a task to the job.
159
160 Parameters
161 ----------
162 task : :class:`WinHPCTask`
163 The task object to add.
164 """
165 self.tasks.append(task)
166
167
168 class WinHPCTask(Configurable):
169
170 task_id = Str('')
171 task_name = Str('')
172 version = Str("2.000")
173 min_cores = Int(1, config=True)
174 max_cores = Int(1, config=True)
175 min_sockets = Int(1, config=True)
176 max_sockets = Int(1, config=True)
177 min_nodes = Int(1, config=True)
178 max_nodes = Int(1, config=True)
179 unit_type = Str("Core", config=True)
180 command_line = CStr('', config=True)
181 work_directory = CStr('', config=True)
182 is_rerunnaable = Bool(True, config=True)
183 std_out_file_path = CStr('', config=True)
184 std_err_file_path = CStr('', config=True)
185 is_parametric = Bool(False, config=True)
186 environment_variables = Instance(dict, args=(), config=True)
187
188 def _write_attr(self, root, attr, key):
189 s = as_str(getattr(self, attr, ''))
190 if s:
191 root.set(key, s)
192
193 def as_element(self):
194 root = ET.Element('Task')
195 self._write_attr(root, 'version', '_A_Version')
196 self._write_attr(root, 'task_name', '_B_Name')
197 self._write_attr(root, 'min_cores', '_C_MinCores')
198 self._write_attr(root, 'max_cores', '_D_MaxCores')
199 self._write_attr(root, 'min_sockets', '_E_MinSockets')
200 self._write_attr(root, 'max_sockets', '_F_MaxSockets')
201 self._write_attr(root, 'min_nodes', '_G_MinNodes')
202 self._write_attr(root, 'max_nodes', '_H_MaxNodes')
203 self._write_attr(root, 'command_line', '_I_CommandLine')
204 self._write_attr(root, 'work_directory', '_J_WorkDirectory')
205 self._write_attr(root, 'is_rerunnaable', '_K_IsRerunnable')
206 self._write_attr(root, 'std_out_file_path', '_L_StdOutFilePath')
207 self._write_attr(root, 'std_err_file_path', '_M_StdErrFilePath')
208 self._write_attr(root, 'is_parametric', '_N_IsParametric')
209 self._write_attr(root, 'unit_type', '_O_UnitType')
210 root.append(self.get_env_vars())
211 return root
212
213 def get_env_vars(self):
214 env_vars = ET.Element('EnvironmentVariables')
215 for k, v in self.environment_variables.iteritems():
216 variable = ET.SubElement(env_vars, "Variable")
217 name = ET.SubElement(variable, "Name")
218 name.text = k
219 value = ET.SubElement(variable, "Value")
220 value.text = v
221 return env_vars
222
223
224
225 # By declaring these, we can configure the controller and engine separately!
226
227 class IPControllerJob(WinHPCJob):
228 job_name = Str('IPController', config=False)
229 is_exclusive = Bool(False, config=True)
230 username = Str(find_username(), config=True)
231 priority = Enum(('Lowest','BelowNormal','Normal','AboveNormal','Highest'),
232 default_value='Highest', config=True)
233 requested_nodes = Str('', config=True)
234 project = Str('IPython', config=True)
235
236
237 class IPEngineSetJob(WinHPCJob):
238 job_name = Str('IPEngineSet', config=False)
239 is_exclusive = Bool(False, config=True)
240 username = Str(find_username(), config=True)
241 priority = Enum(('Lowest','BelowNormal','Normal','AboveNormal','Highest'),
242 default_value='Highest', config=True)
243 requested_nodes = Str('', config=True)
244 project = Str('IPython', config=True)
245
246
247 class IPControllerTask(WinHPCTask):
248
249 task_name = Str('IPController', config=True)
250 controller_cmd = List(['ipcontroller.exe'], config=True)
251 controller_args = List(['--log-to-file', '--log-level', '40'], config=True)
252 # I don't want these to be configurable
253 std_out_file_path = CStr('', config=False)
254 std_err_file_path = CStr('', config=False)
255 min_cores = Int(1, config=False)
256 max_cores = Int(1, config=False)
257 min_sockets = Int(1, config=False)
258 max_sockets = Int(1, config=False)
259 min_nodes = Int(1, config=False)
260 max_nodes = Int(1, config=False)
261 unit_type = Str("Core", config=False)
262 work_directory = CStr('', config=False)
263
264 def __init__(self, config=None):
265 super(IPControllerTask, self).__init__(config=config)
266 the_uuid = uuid.uuid1()
267 self.std_out_file_path = os.path.join('log','ipcontroller-%s.out' % the_uuid)
268 self.std_err_file_path = os.path.join('log','ipcontroller-%s.err' % the_uuid)
269
270 @property
271 def command_line(self):
272 return ' '.join(self.controller_cmd + self.controller_args)
273
274
275 class IPEngineTask(WinHPCTask):
276
277 task_name = Str('IPEngine', config=True)
278 engine_cmd = List(['ipengine.exe'], config=True)
279 engine_args = List(['--log-to-file', '--log-level', '40'], config=True)
280 # I don't want these to be configurable
281 std_out_file_path = CStr('', config=False)
282 std_err_file_path = CStr('', config=False)
283 min_cores = Int(1, config=False)
284 max_cores = Int(1, config=False)
285 min_sockets = Int(1, config=False)
286 max_sockets = Int(1, config=False)
287 min_nodes = Int(1, config=False)
288 max_nodes = Int(1, config=False)
289 unit_type = Str("Core", config=False)
290 work_directory = CStr('', config=False)
291
292 def __init__(self, config=None):
293 super(IPEngineTask,self).__init__(config=config)
294 the_uuid = uuid.uuid1()
295 self.std_out_file_path = os.path.join('log','ipengine-%s.out' % the_uuid)
296 self.std_err_file_path = os.path.join('log','ipengine-%s.err' % the_uuid)
297
298 @property
299 def command_line(self):
300 return ' '.join(self.engine_cmd + self.engine_args)
301
302
303 # j = WinHPCJob(None)
304 # j.job_name = 'IPCluster'
305 # j.username = 'GNET\\bgranger'
306 # j.requested_nodes = 'GREEN'
307 #
308 # t = WinHPCTask(None)
309 # t.task_name = 'Controller'
310 # t.command_line = r"\\blue\domainusers$\bgranger\Python\Python25\Scripts\ipcontroller.exe --log-to-file -p default --log-level 10"
311 # t.work_directory = r"\\blue\domainusers$\bgranger\.ipython\cluster_default"
312 # t.std_out_file_path = 'controller-out.txt'
313 # t.std_err_file_path = 'controller-err.txt'
314 # t.environment_variables['PYTHONPATH'] = r"\\blue\domainusers$\bgranger\Python\Python25\Lib\site-packages"
315 # j.add_task(t)
316
@@ -23,8 +23,8 b' c = get_config()'
23 # - PBSControllerLauncher
23 # - PBSControllerLauncher
24 # - SGEControllerLauncher
24 # - SGEControllerLauncher
25 # - WindowsHPCControllerLauncher
25 # - WindowsHPCControllerLauncher
26 # c.Global.controller_launcher = 'IPython.zmq.parallel.launcher.LocalControllerLauncher'
26 # c.Global.controller_launcher = 'IPython.parallel.launcher.LocalControllerLauncher'
27 c.Global.controller_launcher = 'IPython.zmq.parallel.launcher.PBSControllerLauncher'
27 c.Global.controller_launcher = 'IPython.parallel.launcher.PBSControllerLauncher'
28
28
29 # Options are:
29 # Options are:
30 # - LocalEngineSetLauncher
30 # - LocalEngineSetLauncher
@@ -32,7 +32,7 b" c.Global.controller_launcher = 'IPython.zmq.parallel.launcher.PBSControllerLaunc"
32 # - PBSEngineSetLauncher
32 # - PBSEngineSetLauncher
33 # - SGEEngineSetLauncher
33 # - SGEEngineSetLauncher
34 # - WindowsHPCEngineSetLauncher
34 # - WindowsHPCEngineSetLauncher
35 # c.Global.engine_launcher = 'IPython.zmq.parallel.launcher.LocalEngineSetLauncher'
35 # c.Global.engine_launcher = 'IPython.parallel.launcher.LocalEngineSetLauncher'
36
36
37 #-----------------------------------------------------------------------------
37 #-----------------------------------------------------------------------------
38 # Global configuration
38 # Global configuration
@@ -89,7 +89,7 b' c = get_config()'
89 # Which class to use for the db backend. Currently supported are DictDB (the
89 # Which class to use for the db backend. Currently supported are DictDB (the
90 # default), and MongoDB. Uncomment this line to enable MongoDB, which will
90 # default), and MongoDB. Uncomment this line to enable MongoDB, which will
91 # slow-down the Hub's responsiveness, but also reduce its memory footprint.
91 # slow-down the Hub's responsiveness, but also reduce its memory footprint.
92 # c.HubFactory.db_class = 'IPython.zmq.parallel.mongodb.MongoDB'
92 # c.HubFactory.db_class = 'IPython.parallel.mongodb.MongoDB'
93
93
94 # The heartbeat ping frequency. This is the frequency (in ms) at which the
94 # The heartbeat ping frequency. This is the frequency (in ms) at which the
95 # Hub pings engines for heartbeats. This determines how quickly the Hub
95 # Hub pings engines for heartbeats. This determines how quickly the Hub
@@ -144,11 +144,11 b' c = get_config()'
144
144
145 # ----- in-memory configuration --------
145 # ----- in-memory configuration --------
146 # this line restores the default behavior: in-memory storage of all results.
146 # this line restores the default behavior: in-memory storage of all results.
147 # c.HubFactory.db_class = 'IPython.zmq.parallel.dictdb.DictDB'
147 # c.HubFactory.db_class = 'IPython.parallel.dictdb.DictDB'
148
148
149 # ----- sqlite configuration --------
149 # ----- sqlite configuration --------
150 # use this line to activate sqlite:
150 # use this line to activate sqlite:
151 # c.HubFactory.db_class = 'IPython.zmq.parallel.sqlitedb.SQLiteDB'
151 # c.HubFactory.db_class = 'IPython.parallel.sqlitedb.SQLiteDB'
152
152
153 # You can specify the name of the db-file. By default, this will be located
153 # You can specify the name of the db-file. By default, this will be located
154 # in the active cluster_dir, e.g. ~/.ipython/clusterz_default/tasks.db
154 # in the active cluster_dir, e.g. ~/.ipython/clusterz_default/tasks.db
@@ -165,7 +165,7 b' c = get_config()'
165
165
166 # ----- mongodb configuration --------
166 # ----- mongodb configuration --------
167 # use this line to activate mongodb:
167 # use this line to activate mongodb:
168 # c.HubFactory.db_class = 'IPython.zmq.parallel.mongodb.MongoDB'
168 # c.HubFactory.db_class = 'IPython.parallel.mongodb.MongoDB'
169
169
170 # You can specify the args and kwargs pymongo will use when creating the Connection.
170 # You can specify the args and kwargs pymongo will use when creating the Connection.
171 # For more information on what these options might be, see pymongo documentation.
171 # For more information on what these options might be, see pymongo documentation.
@@ -34,7 +34,7 b' try:'
34 except ImportError:
34 except ImportError:
35 pexpect = None
35 pexpect = None
36
36
37 from IPython.zmq.parallel.entry_point import select_random_ports
37 from IPython.parallel.entry_point import select_random_ports
38
38
39 #-----------------------------------------------------------------------------
39 #-----------------------------------------------------------------------------
40 # Code
40 # Code
@@ -13,12 +13,13 b''
13 import zmq
13 import zmq
14
14
15 if zmq.__version__ < '2.1.3':
15 if zmq.__version__ < '2.1.3':
16 raise ImportError("IPython.zmq.parallel requires pyzmq/0MQ >= 2.1.3, you appear to have %s"%zmq.__version__)
16 raise ImportError("IPython.parallel requires pyzmq/0MQ >= 2.1.3, you appear to have %s"%zmq.__version__)
17
17
18 from .asyncresult import *
18 from .asyncresult import *
19 from .client import Client
19 from .client import Client
20 from .dependency import *
20 from .dependency import *
21 from .remotefunction import *
21 from .remotefunction import *
22 from .view import *
22 from .view import *
23 from IPython.utils.pickleutil import Reference
23
24
24
25
1 NO CONTENT: file renamed from IPython/zmq/parallel/asyncresult.py to IPython/parallel/asyncresult.py
NO CONTENT: file renamed from IPython/zmq/parallel/asyncresult.py to IPython/parallel/asyncresult.py
@@ -24,7 +24,6 b' import zmq'
24 # from zmq.eventloop import ioloop, zmqstream
24 # from zmq.eventloop import ioloop, zmqstream
25
25
26 from IPython.utils.path import get_ipython_dir
26 from IPython.utils.path import get_ipython_dir
27 from IPython.utils.pickleutil import Reference
28 from IPython.utils.traitlets import (HasTraits, Int, Instance, CUnicode,
27 from IPython.utils.traitlets import (HasTraits, Int, Instance, CUnicode,
29 Dict, List, Bool, Str, Set)
28 Dict, List, Bool, Str, Set)
30 from IPython.external.decorator import decorator
29 from IPython.external.decorator import decorator
@@ -33,10 +32,8 b' from IPython.external.ssh import tunnel'
33 from . import error
32 from . import error
34 from . import util
33 from . import util
35 from . import streamsession as ss
34 from . import streamsession as ss
36 from .asyncresult import AsyncResult, AsyncMapResult, AsyncHubResult
35 from .asyncresult import AsyncResult, AsyncHubResult
37 from .clusterdir import ClusterDir, ClusterDirError
36 from .clusterdir import ClusterDir, ClusterDirError
38 from .dependency import Dependency, depend, require, dependent
39 from .remotefunction import remote, parallel, ParallelFunction, RemoteFunction
40 from .view import DirectView, LoadBalancedView
37 from .view import DirectView, LoadBalancedView
41
38
42 #--------------------------------------------------------------------------
39 #--------------------------------------------------------------------------
@@ -985,7 +982,7 b' class Client(HasTraits):'
985 targets: list,slice,int,etc. [default: use all engines]
982 targets: list,slice,int,etc. [default: use all engines]
986 The subset of engines across which to load-balance
983 The subset of engines across which to load-balance
987 """
984 """
988 if targets is None:
985 if targets is not None:
989 targets = self._build_targets(targets)[1]
986 targets = self._build_targets(targets)[1]
990 return LoadBalancedView(client=self, socket=self._task_socket, targets=targets)
987 return LoadBalancedView(client=self, socket=self._task_socket, targets=targets)
991
988
@@ -1278,16 +1275,4 b' class Client(HasTraits):'
1278 raise self._unwrap_exception(content)
1275 raise self._unwrap_exception(content)
1279
1276
1280
1277
1281 __all__ = [ 'Client',
1278 __all__ = [ 'Client' ]
1282 'depend',
1283 'require',
1284 'remote',
1285 'parallel',
1286 'RemoteFunction',
1287 'ParallelFunction',
1288 'DirectView',
1289 'LoadBalancedView',
1290 'AsyncResult',
1291 'AsyncMapResult',
1292 'Reference'
1293 ]
1 NO CONTENT: file renamed from IPython/zmq/parallel/clusterdir.py to IPython/parallel/clusterdir.py
NO CONTENT: file renamed from IPython/zmq/parallel/clusterdir.py to IPython/parallel/clusterdir.py
1 NO CONTENT: file renamed from IPython/zmq/parallel/controller.py to IPython/parallel/controller.py
NO CONTENT: file renamed from IPython/zmq/parallel/controller.py to IPython/parallel/controller.py
@@ -67,7 +67,7 b' class dependent(object):'
67 @interactive
67 @interactive
68 def _require(*names):
68 def _require(*names):
69 """Helper for @require decorator."""
69 """Helper for @require decorator."""
70 from IPython.zmq.parallel.error import UnmetDependency
70 from IPython.parallel.error import UnmetDependency
71 user_ns = globals()
71 user_ns = globals()
72 for name in names:
72 for name in names:
73 if name in user_ns:
73 if name in user_ns:
1 NO CONTENT: file renamed from IPython/zmq/parallel/dictdb.py to IPython/parallel/dictdb.py
NO CONTENT: file renamed from IPython/zmq/parallel/dictdb.py to IPython/parallel/dictdb.py
1 NO CONTENT: file renamed from IPython/zmq/parallel/engine.py to IPython/parallel/engine.py
NO CONTENT: file renamed from IPython/zmq/parallel/engine.py to IPython/parallel/engine.py
1 NO CONTENT: file renamed from IPython/zmq/parallel/entry_point.py to IPython/parallel/entry_point.py
NO CONTENT: file renamed from IPython/zmq/parallel/entry_point.py to IPython/parallel/entry_point.py
1 NO CONTENT: file renamed from IPython/zmq/parallel/error.py to IPython/parallel/error.py
NO CONTENT: file renamed from IPython/zmq/parallel/error.py to IPython/parallel/error.py
@@ -22,8 +22,8 b' from IPython.config.configurable import Configurable'
22 from IPython.utils.importstring import import_item
22 from IPython.utils.importstring import import_item
23 from IPython.utils.traitlets import Str,Int,Instance, CUnicode, CStr
23 from IPython.utils.traitlets import Str,Int,Instance, CUnicode, CStr
24
24
25 import IPython.zmq.parallel.streamsession as ss
25 import IPython.parallel.streamsession as ss
26 from IPython.zmq.parallel.entry_point import select_random_ports
26 from IPython.parallel.entry_point import select_random_ports
27
27
28 #-----------------------------------------------------------------------------
28 #-----------------------------------------------------------------------------
29 # Classes
29 # Classes
@@ -37,7 +37,7 b' class LoggingFactory(Configurable):'
37
37
38
38
39 class SessionFactory(LoggingFactory):
39 class SessionFactory(LoggingFactory):
40 """The Base factory from which every factory in IPython.zmq.parallel inherits"""
40 """The Base factory from which every factory in IPython.parallel inherits"""
41
41
42 packer = Str('',config=True)
42 packer = Str('',config=True)
43 unpacker = Str('',config=True)
43 unpacker = Str('',config=True)
@@ -48,7 +48,7 b' class SessionFactory(LoggingFactory):'
48 exec_key = CUnicode('',config=True)
48 exec_key = CUnicode('',config=True)
49 # not configurable:
49 # not configurable:
50 context = Instance('zmq.Context', (), {})
50 context = Instance('zmq.Context', (), {})
51 session = Instance('IPython.zmq.parallel.streamsession.StreamSession')
51 session = Instance('IPython.parallel.streamsession.StreamSession')
52 loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False)
52 loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False)
53 def _loop_default(self):
53 def _loop_default(self):
54 return IOLoop.instance()
54 return IOLoop.instance()
1 NO CONTENT: file renamed from IPython/zmq/parallel/heartmonitor.py to IPython/parallel/heartmonitor.py
NO CONTENT: file renamed from IPython/zmq/parallel/heartmonitor.py to IPython/parallel/heartmonitor.py
@@ -136,11 +136,11 b' class HubFactory(RegistrationFactory):'
136
136
137 monitor_url = CStr('')
137 monitor_url = CStr('')
138
138
139 db_class = CStr('IPython.zmq.parallel.dictdb.DictDB', config=True)
139 db_class = CStr('IPython.parallel.dictdb.DictDB', config=True)
140
140
141 # not configurable
141 # not configurable
142 db = Instance('IPython.zmq.parallel.dictdb.BaseDB')
142 db = Instance('IPython.parallel.dictdb.BaseDB')
143 heartmonitor = Instance('IPython.zmq.parallel.heartmonitor.HeartMonitor')
143 heartmonitor = Instance('IPython.parallel.heartmonitor.HeartMonitor')
144 subconstructors = List()
144 subconstructors = List()
145 _constructed = Bool(False)
145 _constructed = Bool(False)
146
146
@@ -56,7 +56,7 b' def strip_args(flags, args=sys.argv[1:]):'
56
56
57 def launch_process(mod, args):
57 def launch_process(mod, args):
58 """Launch a controller or engine in a subprocess."""
58 """Launch a controller or engine in a subprocess."""
59 code = "from IPython.zmq.parallel.%s import launch_new_instance;launch_new_instance()"%mod
59 code = "from IPython.parallel.%s import launch_new_instance;launch_new_instance()"%mod
60 arguments = [ sys.executable, '-c', code ] + args
60 arguments = [ sys.executable, '-c', code ] + args
61 blackholew = file(os.devnull, 'w')
61 blackholew = file(os.devnull, 'w')
62 blackholer = file(os.devnull, 'r')
62 blackholer = file(os.devnull, 'r')
@@ -26,7 +26,7 b' from zmq.eventloop import ioloop'
26
26
27 from IPython.external.argparse import ArgumentParser, SUPPRESS
27 from IPython.external.argparse import ArgumentParser, SUPPRESS
28 from IPython.utils.importstring import import_item
28 from IPython.utils.importstring import import_item
29 from IPython.zmq.parallel.clusterdir import (
29 from IPython.parallel.clusterdir import (
30 ApplicationWithClusterDir, ClusterDirConfigLoader,
30 ApplicationWithClusterDir, ClusterDirConfigLoader,
31 ClusterDirError, PIDFileError
31 ClusterDirError, PIDFileError
32 )
32 )
@@ -260,9 +260,9 b' class IPClusterApp(ApplicationWithClusterDir):'
260 def create_default_config(self):
260 def create_default_config(self):
261 super(IPClusterApp, self).create_default_config()
261 super(IPClusterApp, self).create_default_config()
262 self.default_config.Global.controller_launcher = \
262 self.default_config.Global.controller_launcher = \
263 'IPython.zmq.parallel.launcher.LocalControllerLauncher'
263 'IPython.parallel.launcher.LocalControllerLauncher'
264 self.default_config.Global.engine_launcher = \
264 self.default_config.Global.engine_launcher = \
265 'IPython.zmq.parallel.launcher.LocalEngineSetLauncher'
265 'IPython.parallel.launcher.LocalEngineSetLauncher'
266 self.default_config.Global.n = 2
266 self.default_config.Global.n = 2
267 self.default_config.Global.delay = 2
267 self.default_config.Global.delay = 2
268 self.default_config.Global.reset_config = False
268 self.default_config.Global.reset_config = False
@@ -30,13 +30,13 b' from zmq.log.handlers import PUBHandler'
30 from zmq.utils import jsonapi as json
30 from zmq.utils import jsonapi as json
31
31
32 from IPython.config.loader import Config
32 from IPython.config.loader import Config
33 from IPython.zmq.parallel import factory
33 from IPython.parallel import factory
34 from IPython.zmq.parallel.controller import ControllerFactory
34 from IPython.parallel.controller import ControllerFactory
35 from IPython.zmq.parallel.clusterdir import (
35 from IPython.parallel.clusterdir import (
36 ApplicationWithClusterDir,
36 ApplicationWithClusterDir,
37 ClusterDirConfigLoader
37 ClusterDirConfigLoader
38 )
38 )
39 from IPython.zmq.parallel.util import disambiguate_ip_address, split_url
39 from IPython.parallel.util import disambiguate_ip_address, split_url
40 # from IPython.kernel.fcutil import FCServiceFactory, FURLError
40 # from IPython.kernel.fcutil import FCServiceFactory, FURLError
41 from IPython.utils.traitlets import Instance, Unicode
41 from IPython.utils.traitlets import Instance, Unicode
42
42
@@ -117,11 +117,11 b' class IPControllerAppConfigLoader(ClusterDirConfigLoader):'
117 ## Hub Config:
117 ## Hub Config:
118 paa('--mongodb',
118 paa('--mongodb',
119 dest='HubFactory.db_class', action='store_const',
119 dest='HubFactory.db_class', action='store_const',
120 const='IPython.zmq.parallel.mongodb.MongoDB',
120 const='IPython.parallel.mongodb.MongoDB',
121 help='Use MongoDB for task storage [default: in-memory]')
121 help='Use MongoDB for task storage [default: in-memory]')
122 paa('--sqlite',
122 paa('--sqlite',
123 dest='HubFactory.db_class', action='store_const',
123 dest='HubFactory.db_class', action='store_const',
124 const='IPython.zmq.parallel.sqlitedb.SQLiteDB',
124 const='IPython.parallel.sqlitedb.SQLiteDB',
125 help='Use SQLite3 for DB task storage [default: in-memory]')
125 help='Use SQLite3 for DB task storage [default: in-memory]')
126 paa('--hb',
126 paa('--hb',
127 type=int, dest='HubFactory.hb', nargs=2,
127 type=int, dest='HubFactory.hb', nargs=2,
@@ -22,16 +22,16 b' import sys'
22 import zmq
22 import zmq
23 from zmq.eventloop import ioloop
23 from zmq.eventloop import ioloop
24
24
25 from IPython.zmq.parallel.clusterdir import (
25 from IPython.parallel.clusterdir import (
26 ApplicationWithClusterDir,
26 ApplicationWithClusterDir,
27 ClusterDirConfigLoader
27 ClusterDirConfigLoader
28 )
28 )
29 from IPython.zmq.log import EnginePUBHandler
29 from IPython.zmq.log import EnginePUBHandler
30
30
31 from IPython.zmq.parallel import factory
31 from IPython.parallel import factory
32 from IPython.zmq.parallel.engine import EngineFactory
32 from IPython.parallel.engine import EngineFactory
33 from IPython.zmq.parallel.streamkernel import Kernel
33 from IPython.parallel.streamkernel import Kernel
34 from IPython.zmq.parallel.util import disambiguate_url
34 from IPython.parallel.util import disambiguate_url
35 from IPython.utils.importstring import import_item
35 from IPython.utils.importstring import import_item
36
36
37
37
@@ -20,7 +20,7 b' import sys'
20
20
21 import zmq
21 import zmq
22
22
23 from IPython.zmq.parallel.clusterdir import (
23 from IPython.parallel.clusterdir import (
24 ApplicationWithClusterDir,
24 ApplicationWithClusterDir,
25 ClusterDirConfigLoader
25 ClusterDirConfigLoader
26 )
26 )
1 NO CONTENT: file renamed from IPython/zmq/parallel/kernelstarter.py to IPython/parallel/kernelstarter.py
NO CONTENT: file renamed from IPython/zmq/parallel/kernelstarter.py to IPython/parallel/kernelstarter.py
@@ -64,15 +64,15 b' except ImportError:'
64
64
65
65
66 ipclusterz_cmd_argv = pycmd2argv(get_ipython_module_path(
66 ipclusterz_cmd_argv = pycmd2argv(get_ipython_module_path(
67 'IPython.zmq.parallel.ipclusterapp'
67 'IPython.parallel.ipclusterapp'
68 ))
68 ))
69
69
70 ipenginez_cmd_argv = pycmd2argv(get_ipython_module_path(
70 ipenginez_cmd_argv = pycmd2argv(get_ipython_module_path(
71 'IPython.zmq.parallel.ipengineapp'
71 'IPython.parallel.ipengineapp'
72 ))
72 ))
73
73
74 ipcontrollerz_cmd_argv = pycmd2argv(get_ipython_module_path(
74 ipcontrollerz_cmd_argv = pycmd2argv(get_ipython_module_path(
75 'IPython.zmq.parallel.ipcontrollerapp'
75 'IPython.parallel.ipcontrollerapp'
76 ))
76 ))
77
77
78 #-----------------------------------------------------------------------------
78 #-----------------------------------------------------------------------------
1 NO CONTENT: file renamed from IPython/zmq/parallel/logwatcher.py to IPython/parallel/logwatcher.py
NO CONTENT: file renamed from IPython/zmq/parallel/logwatcher.py to IPython/parallel/logwatcher.py
1 NO CONTENT: file renamed from IPython/zmq/parallel/map.py to IPython/parallel/map.py
NO CONTENT: file renamed from IPython/zmq/parallel/map.py to IPython/parallel/map.py
1 NO CONTENT: file renamed from IPython/zmq/parallel/mongodb.py to IPython/parallel/mongodb.py
NO CONTENT: file renamed from IPython/zmq/parallel/mongodb.py to IPython/parallel/mongodb.py
1 NO CONTENT: file renamed from IPython/zmq/parallel/remotefunction.py to IPython/parallel/remotefunction.py
NO CONTENT: file renamed from IPython/zmq/parallel/remotefunction.py to IPython/parallel/remotefunction.py
1 NO CONTENT: file renamed from IPython/zmq/parallel/remotenamespace.py to IPython/parallel/remotenamespace.py
NO CONTENT: file renamed from IPython/zmq/parallel/remotenamespace.py to IPython/parallel/remotenamespace.py
1 NO CONTENT: file renamed from IPython/zmq/parallel/scheduler.py to IPython/parallel/scheduler.py
NO CONTENT: file renamed from IPython/zmq/parallel/scheduler.py to IPython/parallel/scheduler.py
1 NO CONTENT: file renamed from IPython/zmq/parallel/scripts/__init__.py to IPython/parallel/scripts/__init__.py
NO CONTENT: file renamed from IPython/zmq/parallel/scripts/__init__.py to IPython/parallel/scripts/__init__.py
@@ -13,6 +13,6 b''
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15
15
16 from IPython.zmq.parallel.ipclusterapp import launch_new_instance
16 from IPython.parallel.ipclusterapp import launch_new_instance
17
17
18 launch_new_instance()
18 launch_new_instance()
@@ -13,6 +13,6 b''
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15
15
16 from IPython.zmq.parallel.ipcontrollerapp import launch_new_instance
16 from IPython.parallel.ipcontrollerapp import launch_new_instance
17
17
18 launch_new_instance()
18 launch_new_instance()
@@ -13,7 +13,7 b''
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15
15
16 from IPython.zmq.parallel.ipengineapp import launch_new_instance
16 from IPython.parallel.ipengineapp import launch_new_instance
17
17
18 launch_new_instance()
18 launch_new_instance()
19
19
@@ -13,7 +13,7 b''
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15
15
16 from IPython.zmq.parallel.iploggerapp import launch_new_instance
16 from IPython.parallel.iploggerapp import launch_new_instance
17
17
18 launch_new_instance()
18 launch_new_instance()
19
19
@@ -133,7 +133,8 b' class SQLiteDB(BaseDB):'
133 sqlite3.register_converter('bufs', _convert_bufs)
133 sqlite3.register_converter('bufs', _convert_bufs)
134 # connect to the db
134 # connect to the db
135 dbfile = os.path.join(self.location, self.filename)
135 dbfile = os.path.join(self.location, self.filename)
136 self._db = sqlite3.connect(dbfile, detect_types=sqlite3.PARSE_DECLTYPES)
136 self._db = sqlite3.connect(dbfile, detect_types=sqlite3.PARSE_DECLTYPES, cached_statements=16)
137 # print dir(self._db)
137
138
138 self._db.execute("""CREATE TABLE IF NOT EXISTS %s
139 self._db.execute("""CREATE TABLE IF NOT EXISTS %s
139 (msg_id text PRIMARY KEY,
140 (msg_id text PRIMARY KEY,
@@ -71,7 +71,7 b' class Kernel(SessionFactory):'
71 control_stream = Instance(zmqstream.ZMQStream)
71 control_stream = Instance(zmqstream.ZMQStream)
72 task_stream = Instance(zmqstream.ZMQStream)
72 task_stream = Instance(zmqstream.ZMQStream)
73 iopub_stream = Instance(zmqstream.ZMQStream)
73 iopub_stream = Instance(zmqstream.ZMQStream)
74 client = Instance('IPython.zmq.parallel.client.Client')
74 client = Instance('IPython.parallel.client.Client')
75
75
76 # internals
76 # internals
77 shell_streams = List()
77 shell_streams = List()
1 NO CONTENT: file renamed from IPython/zmq/parallel/streamsession.py to IPython/parallel/streamsession.py
NO CONTENT: file renamed from IPython/zmq/parallel/streamsession.py to IPython/parallel/streamsession.py
@@ -18,7 +18,7 b' except:'
18 import zmq
18 import zmq
19 from zmq.core.poll import _poll as poll
19 from zmq.core.poll import _poll as poll
20 from zmq.devices import ThreadDevice
20 from zmq.devices import ThreadDevice
21 from IPython.zmq.parallel import streamsession as ss
21 from IPython.parallel import streamsession as ss
22
22
23
23
24 class QueueStream(object):
24 class QueueStream(object):
@@ -15,7 +15,7 b' import tempfile'
15 import time
15 import time
16 from subprocess import Popen, PIPE, STDOUT
16 from subprocess import Popen, PIPE, STDOUT
17
17
18 from IPython.zmq.parallel import client
18 from IPython.parallel import client
19
19
20 processes = []
20 processes = []
21 blackhole = tempfile.TemporaryFile()
21 blackhole = tempfile.TemporaryFile()
@@ -20,11 +20,11 b' from zmq.tests import BaseZMQTestCase'
20
20
21 from IPython.external.decorator import decorator
21 from IPython.external.decorator import decorator
22
22
23 from IPython.zmq.parallel import error
23 from IPython.parallel import error
24 from IPython.zmq.parallel.client import Client
24 from IPython.parallel.client import Client
25 from IPython.zmq.parallel.ipcluster import launch_process
25 from IPython.parallel.ipcluster import launch_process
26 from IPython.zmq.parallel.entry_point import select_random_ports
26 from IPython.parallel.entry_point import select_random_ports
27 from IPython.zmq.parallel.tests import processes,add_engines
27 from IPython.parallel.tests import processes,add_engines
28
28
29 # simple tasks for use in apply tests
29 # simple tasks for use in apply tests
30
30
@@ -12,9 +12,9 b''
12 #-------------------------------------------------------------------------------
12 #-------------------------------------------------------------------------------
13
13
14
14
15 from IPython.zmq.parallel.error import TimeoutError
15 from IPython.parallel.error import TimeoutError
16
16
17 from IPython.zmq.parallel.tests import add_engines
17 from IPython.parallel.tests import add_engines
18 from .clienttest import ClusterTestCase
18 from .clienttest import ClusterTestCase
19
19
20 def setup():
20 def setup():
@@ -16,10 +16,10 b' from tempfile import mktemp'
16
16
17 import zmq
17 import zmq
18
18
19 from IPython.zmq.parallel import client as clientmod
19 from IPython.parallel import client as clientmod
20 from IPython.zmq.parallel import error
20 from IPython.parallel import error
21 from IPython.zmq.parallel.asyncresult import AsyncResult, AsyncHubResult
21 from IPython.parallel.asyncresult import AsyncResult, AsyncHubResult
22 from IPython.zmq.parallel.view import LoadBalancedView, DirectView
22 from IPython.parallel.view import LoadBalancedView, DirectView
23
23
24 from clienttest import ClusterTestCase, segfault, wait, add_engines
24 from clienttest import ClusterTestCase, segfault, wait, add_engines
25
25
@@ -61,6 +61,15 b' class TestClient(ClusterTestCase):'
61 self.assertEquals(v.targets, targets[-1])
61 self.assertEquals(v.targets, targets[-1])
62 self.assertRaises(TypeError, lambda : self.client[None])
62 self.assertRaises(TypeError, lambda : self.client[None])
63
63
64 def test_lbview_targets(self):
65 """test load_balanced_view targets"""
66 v = self.client.load_balanced_view()
67 self.assertEquals(v.targets, None)
68 v = self.client.load_balanced_view(-1)
69 self.assertEquals(v.targets, [self.client.ids[-1]])
70 v = self.client.load_balanced_view('all')
71 self.assertEquals(v.targets, self.client.ids)
72
64 def test_targets(self):
73 def test_targets(self):
65 """test various valid targets arguments"""
74 """test various valid targets arguments"""
66 build = self.client._build_targets
75 build = self.client._build_targets
@@ -18,10 +18,10 b' import os'
18
18
19 from IPython.utils.pickleutil import can, uncan
19 from IPython.utils.pickleutil import can, uncan
20
20
21 from IPython.zmq.parallel import dependency as dmod
21 from IPython.parallel import dependency as dmod
22 from IPython.zmq.parallel.util import interactive
22 from IPython.parallel.util import interactive
23
23
24 from IPython.zmq.parallel.tests import add_engines
24 from IPython.parallel.tests import add_engines
25 from .clienttest import ClusterTestCase
25 from .clienttest import ClusterTestCase
26
26
27 def setup():
27 def setup():
@@ -16,7 +16,7 b' from unittest import TestCase'
16 from IPython.testing.parametric import parametric
16 from IPython.testing.parametric import parametric
17 from IPython.utils import newserialized as ns
17 from IPython.utils import newserialized as ns
18 from IPython.utils.pickleutil import can, uncan, CannedObject, CannedFunction
18 from IPython.utils.pickleutil import can, uncan, CannedObject, CannedFunction
19 from IPython.zmq.parallel.tests.clienttest import skip_without
19 from IPython.parallel.tests.clienttest import skip_without
20
20
21
21
22 class CanningTestCase(TestCase):
22 class CanningTestCase(TestCase):
@@ -18,7 +18,7 b' import zmq'
18 from zmq.tests import BaseZMQTestCase
18 from zmq.tests import BaseZMQTestCase
19 from zmq.eventloop.zmqstream import ZMQStream
19 from zmq.eventloop.zmqstream import ZMQStream
20 # from IPython.zmq.tests import SessionTestCase
20 # from IPython.zmq.tests import SessionTestCase
21 from IPython.zmq.parallel import streamsession as ss
21 from IPython.parallel import streamsession as ss
22
22
23 class SessionTestCase(BaseZMQTestCase):
23 class SessionTestCase(BaseZMQTestCase):
24
24
@@ -15,13 +15,13 b' from tempfile import mktemp'
15
15
16 import zmq
16 import zmq
17
17
18 from IPython.zmq.parallel import client as clientmod
18 from IPython import parallel as pmod
19 from IPython.zmq.parallel import error
19 from IPython.parallel import error
20 from IPython.zmq.parallel.asyncresult import AsyncResult, AsyncHubResult, AsyncMapResult
20 from IPython.parallel.asyncresult import AsyncResult, AsyncHubResult, AsyncMapResult
21 from IPython.zmq.parallel.view import LoadBalancedView, DirectView
21 from IPython.parallel.view import LoadBalancedView, DirectView
22 from IPython.zmq.parallel.util import interactive
22 from IPython.parallel.util import interactive
23
23
24 from IPython.zmq.parallel.tests import add_engines
24 from IPython.parallel.tests import add_engines
25
25
26 from .clienttest import ClusterTestCase, segfault, wait, skip_without
26 from .clienttest import ClusterTestCase, segfault, wait, skip_without
27
27
@@ -129,7 +129,7 b' class TestView(ClusterTestCase):'
129
129
130 def test_get_result(self):
130 def test_get_result(self):
131 """test getting results from the Hub."""
131 """test getting results from the Hub."""
132 c = clientmod.Client(profile='iptest')
132 c = pmod.Client(profile='iptest')
133 # self.add_engines(1)
133 # self.add_engines(1)
134 t = c.ids[-1]
134 t = c.ids[-1]
135 v = c[t]
135 v = c[t]
@@ -154,7 +154,7 b' class TestView(ClusterTestCase):'
154 """)
154 """)
155 v = self.client[-1]
155 v = self.client[-1]
156 v.run(tmpfile, block=True)
156 v.run(tmpfile, block=True)
157 self.assertEquals(v.apply_sync(lambda f: f(), clientmod.Reference('g')), 5)
157 self.assertEquals(v.apply_sync(lambda f: f(), pmod.Reference('g')), 5)
158
158
159 def test_apply_tracked(self):
159 def test_apply_tracked(self):
160 """test tracking for apply"""
160 """test tracking for apply"""
@@ -206,7 +206,7 b' class TestView(ClusterTestCase):'
206 def test_remote_reference(self):
206 def test_remote_reference(self):
207 v = self.client[-1]
207 v = self.client[-1]
208 v['a'] = 123
208 v['a'] = 123
209 ra = clientmod.Reference('a')
209 ra = pmod.Reference('a')
210 b = v.apply_sync(lambda x: x, ra)
210 b = v.apply_sync(lambda x: x, ra)
211 self.assertEquals(b, 123)
211 self.assertEquals(b, 123)
212
212
1 NO CONTENT: file renamed from IPython/zmq/parallel/util.py to IPython/parallel/util.py
NO CONTENT: file renamed from IPython/zmq/parallel/util.py to IPython/parallel/util.py
@@ -105,7 +105,7 b' class View(HasTraits):'
105 history=List()
105 history=List()
106 outstanding = Set()
106 outstanding = Set()
107 results = Dict()
107 results = Dict()
108 client = Instance('IPython.zmq.parallel.client.Client')
108 client = Instance('IPython.parallel.client.Client')
109
109
110 _socket = Instance('zmq.Socket')
110 _socket = Instance('zmq.Socket')
111 _flag_names = List(['targets', 'block', 'track'])
111 _flag_names = List(['targets', 'block', 'track'])
@@ -93,7 +93,7 b' class CannedFunction(CannedObject):'
93
93
94 def can(obj):
94 def can(obj):
95 # import here to prevent module-level circular imports
95 # import here to prevent module-level circular imports
96 from IPython.zmq.parallel.dependency import dependent
96 from IPython.parallel.dependency import dependent
97 if isinstance(obj, dependent):
97 if isinstance(obj, dependent):
98 keys = ('f','df')
98 keys = ('f','df')
99 return CannedObject(obj, keys=keys)
99 return CannedObject(obj, keys=keys)
@@ -8,7 +8,7 b' Authors'
8 """
8 """
9 import networkx as nx
9 import networkx as nx
10 from random import randint, random
10 from random import randint, random
11 from IPython.zmq.parallel import client as cmod
11 from IPython import parallel
12
12
13 def randomwait():
13 def randomwait():
14 import time
14 import time
@@ -87,7 +87,7 b' def main(nodes, edges):'
87 for node in G:
87 for node in G:
88 jobs[node] = randomwait
88 jobs[node] = randomwait
89
89
90 client = cmod.Client()
90 client = parallel.Client()
91 view = client.load_balanced_view()
91 view = client.load_balanced_view()
92 print "submitting %i tasks with %i dependencies"%(nodes,edges)
92 print "submitting %i tasks with %i dependencies"%(nodes,edges)
93 results = submit_jobs(view, G, jobs)
93 results = submit_jobs(view, G, jobs)
@@ -1,6 +1,4 b''
1 from IPython.zmq.parallel import error
1 from IPython.parallel import *
2 from IPython.zmq.parallel.dependency import Dependency
3 from IPython.zmq.parallel.client import *
4
2
5 client = Client()
3 client = Client()
6
4
@@ -1,4 +1,4 b''
1 from IPython.zmq.parallel.client import *
1 from IPython.parallel import *
2
2
3 client = Client()
3 client = Client()
4 view = client[:]
4 view = client[:]
@@ -1,6 +1,6 b''
1 import time
1 import time
2 import numpy as np
2 import numpy as np
3 from IPython.zmq.parallel import client as clientmod
3 from IPython import parallel
4
4
5 nlist = map(int, np.logspace(2,9,16,base=2))
5 nlist = map(int, np.logspace(2,9,16,base=2))
6 nlist2 = map(int, np.logspace(2,8,15,base=2))
6 nlist2 = map(int, np.logspace(2,8,15,base=2))
@@ -14,7 +14,7 b" def echo(s=''):"
14 return s
14 return s
15
15
16 def time_throughput(nmessages, t=0, f=wait):
16 def time_throughput(nmessages, t=0, f=wait):
17 client = clientmod.Client()
17 client = parallel.Client()
18 view = client[None]
18 view = client[None]
19 # do one ping before starting timing
19 # do one ping before starting timing
20 if f is echo:
20 if f is echo:
@@ -1,4 +1,4 b''
1 from IPython.zmq.parallel.client import *
1 from IPython.parallel import *
2
2
3 client = Client()
3 client = Client()
4
4
@@ -11,7 +11,7 b' and some engines using something like::'
11 ipclusterz start -n 4
11 ipclusterz start -n 4
12 """
12 """
13 import sys
13 import sys
14 from IPython.zmq.parallel import client, error
14 from IPython.parallel import Client, error
15 import time
15 import time
16 import BeautifulSoup # this isn't necessary, but it helps throw the dependency error earlier
16 import BeautifulSoup # this isn't necessary, but it helps throw the dependency error earlier
17
17
@@ -39,7 +39,7 b' class DistributedSpider(object):'
39 pollingDelay = 0.5
39 pollingDelay = 0.5
40
40
41 def __init__(self, site):
41 def __init__(self, site):
42 self.client = client.Client()
42 self.client = Client()
43 self.view = self.client.load_balanced_view()
43 self.view = self.client.load_balanced_view()
44 self.mux = self.client[:]
44 self.mux = self.client[:]
45
45
@@ -2,9 +2,9 b''
2 A Distributed Hello world
2 A Distributed Hello world
3 Ken Kinder <ken@kenkinder.com>
3 Ken Kinder <ken@kenkinder.com>
4 """
4 """
5 from IPython.zmq.parallel import client
5 from IPython.parallel import Client
6
6
7 rc = client.Client()
7 rc = Client()
8
8
9 def sleep_and_echo(t, msg):
9 def sleep_and_echo(t, msg):
10 import time
10 import time
@@ -3,7 +3,7 b' import socket'
3 import uuid
3 import uuid
4 import zmq
4 import zmq
5
5
6 from IPython.zmq.parallel.util import disambiguate_url
6 from IPython.parallel.util import disambiguate_url
7
7
8 class EngineCommunicator(object):
8 class EngineCommunicator(object):
9
9
@@ -1,9 +1,9 b''
1 import sys
1 import sys
2
2
3 from IPython.zmq.parallel import client
3 from IPython.parallel import Client
4
4
5
5
6 rc = client.Client()
6 rc = Client()
7 rc.block=True
7 rc.block=True
8 view = rc[:]
8 view = rc[:]
9 view.run('communicator.py')
9 view.run('communicator.py')
@@ -7,7 +7,7 b''
7
7
8 import sys
8 import sys
9 import time
9 import time
10 from IPython.zmq.parallel import client
10 from IPython.parallel import Client
11 import numpy as np
11 import numpy as np
12 from mcpricer import price_options
12 from mcpricer import price_options
13 from matplotlib import pyplot as plt
13 from matplotlib import pyplot as plt
@@ -45,7 +45,7 b' sigma_vals = np.linspace(min_sigma, max_sigma, n_sigmas)'
45
45
46 # The Client is used to setup the calculation and works with all
46 # The Client is used to setup the calculation and works with all
47 # engines.
47 # engines.
48 c = client.Client(profile=cluster_profile)
48 c = Client(profile=cluster_profile)
49
49
50 # A LoadBalancedView is an interface to the engines that provides dynamic load
50 # A LoadBalancedView is an interface to the engines that provides dynamic load
51 # balancing at the expense of not knowing which engine will execute the code.
51 # balancing at the expense of not knowing which engine will execute the code.
@@ -16,7 +16,7 b' and the files used will be downloaded if they are not in the working directory'
16 of the IPython engines.
16 of the IPython engines.
17 """
17 """
18
18
19 from IPython.zmq.parallel import client
19 from IPython.parallel import Client
20 from matplotlib import pyplot as plt
20 from matplotlib import pyplot as plt
21 import numpy as np
21 import numpy as np
22 from pidigits import *
22 from pidigits import *
@@ -27,7 +27,7 b" filestring = 'pi200m.ascii.%(i)02dof20'"
27 files = [filestring % {'i':i} for i in range(1,16)]
27 files = [filestring % {'i':i} for i in range(1,16)]
28
28
29 # Connect to the IPython cluster
29 # Connect to the IPython cluster
30 c = client.Client()
30 c = Client()
31 c[:].run('pidigits.py')
31 c[:].run('pidigits.py')
32
32
33 # the number of engines
33 # the number of engines
@@ -5,7 +5,7 b' import socket'
5
5
6 import zmq
6 import zmq
7
7
8 from IPython.zmq.parallel.util import disambiguate_url
8 from IPython.parallel.util import disambiguate_url
9
9
10 class EngineCommunicator(object):
10 class EngineCommunicator(object):
11 """An object that connects Engines to each other.
11 """An object that connects Engines to each other.
@@ -28,7 +28,7 b' import time'
28 from numpy import exp, zeros, newaxis, sqrt
28 from numpy import exp, zeros, newaxis, sqrt
29
29
30 from IPython.external import argparse
30 from IPython.external import argparse
31 from IPython.zmq.parallel.client import Client, Reference
31 from IPython.parallel.client import Client, Reference
32
32
33 def setup_partitioner(index, num_procs, gnum_cells, parts):
33 def setup_partitioner(index, num_procs, gnum_cells, parts):
34 """create a partitioner in the engine namespace"""
34 """create a partitioner in the engine namespace"""
@@ -28,7 +28,7 b' import time'
28 from numpy import exp, zeros, newaxis, sqrt
28 from numpy import exp, zeros, newaxis, sqrt
29
29
30 from IPython.external import argparse
30 from IPython.external import argparse
31 from IPython.zmq.parallel.client import Client, Reference
31 from IPython.parallel.client import Client, Reference
32
32
33 def setup_partitioner(comm, addrs, index, num_procs, gnum_cells, parts):
33 def setup_partitioner(comm, addrs, index, num_procs, gnum_cells, parts):
34 """create a partitioner in the engine namespace"""
34 """create a partitioner in the engine namespace"""
@@ -1,3 +1,3 b''
1 from IPython.zmq.parallel.client import Client
1 from IPython.parallel.client import Client
2
2
3 client = Client()
3 client = Client()
@@ -10,8 +10,8 b' import sys'
10
10
11 argv = sys.argv
11 argv = sys.argv
12
12
13 from IPython.zmq.parallel.engine import EngineFactory
13 from IPython.parallel.engine import EngineFactory
14 from IPython.zmq.parallel.ipengineapp import launch_new_instance
14 from IPython.parallel.ipengineapp import launch_new_instance
15
15
16 ns = {}
16 ns = {}
17
17
@@ -32,7 +32,7 b' def cleanup(controller, engines):'
32 if __name__ == '__main__':
32 if __name__ == '__main__':
33
33
34 # Start controller in separate process
34 # Start controller in separate process
35 cont = Popen(['python', '-m', 'IPython.zmq.parallel.ipcontrollerapp'])
35 cont = Popen(['python', '-m', 'IPython.parallel.ipcontrollerapp'])
36 print('Started controller')
36 print('Started controller')
37
37
38 # "Submit jobs"
38 # "Submit jobs"
@@ -111,7 +111,7 b' on which it depends:'
111
111
112 .. sourcecode:: ipython
112 .. sourcecode:: ipython
113
113
114 In [5]: rc = client.Client()
114 In [5]: rc = Client()
115 In [5]: view = rc.load_balanced_view()
115 In [5]: view = rc.load_balanced_view()
116
116
117 In [6]: results = {}
117 In [6]: results = {}
@@ -129,12 +129,12 b' calculation can also be run by simply typing the commands from'
129
129
130 .. sourcecode:: ipython
130 .. sourcecode:: ipython
131
131
132 In [1]: from IPython.zmq.parallel import client
132 In [1]: from IPython.parallel import Client
133
133
134 # The Client allows us to use the engines interactively.
134 # The Client allows us to use the engines interactively.
135 # We simply pass Client the name of the cluster profile we
135 # We simply pass Client the name of the cluster profile we
136 # are using.
136 # are using.
137 In [2]: c = client.Client(profile='mycluster')
137 In [2]: c = Client(profile='mycluster')
138 In [3]: view = c.load_balanced_view()
138 In [3]: view = c.load_balanced_view()
139
139
140 In [3]: c.ids
140 In [3]: c.ids
@@ -43,7 +43,7 b' The following will fail:'
43 ...
43 ...
44 RemoteError: RuntimeError(array is not writeable)
44 RemoteError: RuntimeError(array is not writeable)
45 Traceback (most recent call last):
45 Traceback (most recent call last):
46 File "/Users/minrk/dev/ip/mine/IPython/zmq/parallel/streamkernel.py", line 329, in apply_request
46 File "/path/to/site-packages/IPython/parallel/streamkernel.py", line 329, in apply_request
47 exec code in working, working
47 exec code in working, working
48 File "<string>", line 1, in <module>
48 File "<string>", line 1, in <module>
49 File "<ipython-input-14-736187483856>", line 2, in setter
49 File "<ipython-input-14-736187483856>", line 2, in setter
@@ -178,7 +178,10 b' args : tuple/list'
178 The positional arguments passed to `f`
178 The positional arguments passed to `f`
179 kwargs : dict
179 kwargs : dict
180 The keyword arguments passed to `f`
180 The keyword arguments passed to `f`
181 block : bool (default: self.block)
181
182 flags for all views:
183
184 block : bool (default: view.block)
182 Whether to wait for the result, or return immediately.
185 Whether to wait for the result, or return immediately.
183 False:
186 False:
184 returns AsyncResult
187 returns AsyncResult
@@ -186,29 +189,23 b' block : bool (default: self.block)'
186 returns actual result(s) of f(*args, **kwargs)
189 returns actual result(s) of f(*args, **kwargs)
187 if multiple targets:
190 if multiple targets:
188 list of results, matching `targets`
191 list of results, matching `targets`
189 track : bool
192 track : bool [default view.track]
190 whether to track non-copying sends.
193 whether to track non-copying sends.
191 [default False]
192
194
193 targets : int,list of ints, 'all', None
195 targets : int,list of ints, 'all', None [default view.targets]
194 Specify the destination of the job.
196 Specify the destination of the job.
195 if None:
197 if 'all' or None:
196 Submit via Task queue for load-balancing.
197 if 'all':
198 Run on all active engines
198 Run on all active engines
199 if list:
199 if list:
200 Run on each specified engine
200 Run on each specified engine
201 if int:
201 if int:
202 Run on single engine
202 Run on single engine
203 Not eht
204
203
205 balanced : bool, default None
204 Note that LoadBalancedView uses targets to restrict possible destinations. LoadBalanced calls
206 whether to load-balance. This will default to True
205 will always execute in just one location.
207 if targets is unspecified, or False if targets is specified.
206
208
207 flags only in LoadBalancedViews:
209 If `balanced` and `targets` are both specified, the task will
208
210 be assigne to *one* of the targets by the scheduler.
211
212 after : Dependency or collection of msg_ids
209 after : Dependency or collection of msg_ids
213 Only for load-balanced execution (targets=None)
210 Only for load-balanced execution (targets=None)
214 Specify a list of msg_ids as a time-based dependency.
211 Specify a list of msg_ids as a time-based dependency.
@@ -243,25 +240,9 b' does something very similar to ``execute(open(f).read())``.'
243 Views
240 Views
244 =====
241 =====
245
242
246 The principal extension of the :class:`~parallel.client.Client` is the
243 The principal extension of the :class:`~parallel.Client` is the
247 :class:`~parallel.view.View` class. The client
244 :class:`~parallel.view.View` class. The client
248
245
249 Two of apply's keyword arguments are set at the construction of the View, and are immutable for
250 a given View: `balanced` and `targets`. `balanced` determines whether the View will be a
251 :class:`.LoadBalancedView` or a :class:`.DirectView`, and `targets` will be the View's `targets`
252 attribute. Attempts to change this will raise errors.
253
254 Views are cached by targets/class, so requesting a view multiple times will always return the
255 *same object*, not create a new one:
256
257 .. sourcecode:: ipython
258
259 In [3]: v1 = rc.load_balanced_view([1,2,3])
260 In [4]: v2 = rc.load_balanced_view([1,2,3])
261
262 In [5]: v2 is v1
263 Out[5]: True
264
265
246
266 DirectView
247 DirectView
267 ----------
248 ----------
@@ -312,14 +293,12 b' are always the same as:'
312 Out[3]: <DirectView [0,2]>
293 Out[3]: <DirectView [0,2]>
313
294
314 Also note that the slice is evaluated at the time of construction of the DirectView, so the
295 Also note that the slice is evaluated at the time of construction of the DirectView, so the
315 targets will not change over time if engines are added/removed from the cluster. Requesting
296 targets will not change over time if engines are added/removed from the cluster.
316 two views with the same slice at different times will *not* necessarily return the same View
317 if the number of engines has changed.
318
297
319 Execution via DirectView
298 Execution via DirectView
320 ************************
299 ************************
321
300
322 The DirectView is the simplest way to work with one or more engines directly (hence the name).
301 The DirectView is the simplest way to work with one or more engines directly (hence the name).
323
302
324
303
325 Data movement via DirectView
304 Data movement via DirectView
@@ -359,6 +338,15 b' between engines, MPI should be used:'
359 In [60]: dview.gather('a')
338 In [60]: dview.gather('a')
360 Out[60]: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
339 Out[60]: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
361
340
341 Push and pull
342 -------------
343
344 push
345
346 pull
347
348
349
362
350
363
351
364 LoadBalancedView
352 LoadBalancedView
@@ -370,10 +358,6 b' The :class:`.LoadBalancedView`'
370 Data Movement
358 Data Movement
371 =============
359 =============
372
360
373 push
374
375 pull
376
377 Reference
361 Reference
378
362
379 Results
363 Results
@@ -383,7 +367,7 b' AsyncResults are the primary class'
383
367
384 get_result
368 get_result
385
369
386 results,metadata
370 results, metadata
387
371
388 Querying the Hub
372 Querying the Hub
389 ================
373 ================
@@ -57,7 +57,7 b' The IPython architecture consists of four components:'
57 * The IPython schedulers.
57 * The IPython schedulers.
58 * The controller client.
58 * The controller client.
59
59
60 These components live in the :mod:`IPython.zmq.parallel` package and are
60 These components live in the :mod:`IPython.parallel` package and are
61 installed with IPython. They do, however, have additional dependencies
61 installed with IPython. They do, however, have additional dependencies
62 that must be installed. For more information, see our
62 that must be installed. For more information, see our
63 :ref:`installation documentation <install_index>`.
63 :ref:`installation documentation <install_index>`.
@@ -127,7 +127,7 b' a fully asynchronous interface to a set of engines.'
127 IPython client and views
127 IPython client and views
128 ------------------------
128 ------------------------
129
129
130 There is one primary object, the :class:`~.parallel.client.Client`, for connecting to a cluster.
130 There is one primary object, the :class:`~.parallel.Client`, for connecting to a cluster.
131 For each execution model, there is a corresponding :class:`~.parallel.view.View`. These views
131 For each execution model, there is a corresponding :class:`~.parallel.view.View`. These views
132 allow users to interact with a set of engines through the interface. Here are the two default
132 allow users to interact with a set of engines through the interface. Here are the two default
133 views:
133 views:
@@ -206,9 +206,9 b' everything is working correctly, try the following commands:'
206
206
207 .. sourcecode:: ipython
207 .. sourcecode:: ipython
208
208
209 In [1]: from IPython.zmq.parallel import client
209 In [1]: from IPython.parallel import Client
210
210
211 In [2]: c = client.Client()
211 In [2]: c = Client()
212
212
213 In [4]: c.ids
213 In [4]: c.ids
214 Out[4]: set([0, 1, 2, 3])
214 Out[4]: set([0, 1, 2, 3])
@@ -224,7 +224,7 b' name, create the client like this:'
224
224
225 .. sourcecode:: ipython
225 .. sourcecode:: ipython
226
226
227 In [2]: c = client.Client('/path/to/my/ipcontroller-client.json')
227 In [2]: c = Client('/path/to/my/ipcontroller-client.json')
228
228
229 Remember, a client needs to be able to see the Hub's ports to connect. So if they are on a
229 Remember, a client needs to be able to see the Hub's ports to connect. So if they are on a
230 different machine, you may need to use an ssh server to tunnel access to that machine,
230 different machine, you may need to use an ssh server to tunnel access to that machine,
@@ -232,7 +232,7 b' then you would connect to it with:'
232
232
233 .. sourcecode:: ipython
233 .. sourcecode:: ipython
234
234
235 In [2]: c = client.Client(sshserver='myhub.example.com')
235 In [2]: c = Client(sshserver='myhub.example.com')
236
236
237 Where 'myhub.example.com' is the url or IP address of the machine on
237 Where 'myhub.example.com' is the url or IP address of the machine on
238 which the Hub process is running (or another machine that has direct access to the Hub's ports).
238 which the Hub process is running (or another machine that has direct access to the Hub's ports).
@@ -123,11 +123,11 b' using our :func:`psum` function:'
123
123
124 .. sourcecode:: ipython
124 .. sourcecode:: ipython
125
125
126 In [1]: from IPython.zmq.parallel import client
126 In [1]: from IPython.parallel import Client
127
127
128 In [2]: %load_ext parallel_magic
128 In [2]: %load_ext parallel_magic
129
129
130 In [3]: c = client.Client(profile='mpi')
130 In [3]: c = Client(profile='mpi')
131
131
132 In [4]: view = c[:]
132 In [4]: view = c[:]
133
133
@@ -27,14 +27,14 b' our :ref:`introduction <ip1par>` to using IPython for parallel computing.'
27 Creating a ``Client`` instance
27 Creating a ``Client`` instance
28 ==============================
28 ==============================
29
29
30 The first step is to import the IPython :mod:`IPython.zmq.parallel.client`
30 The first step is to import the IPython :mod:`IPython.parallel`
31 module and then create a :class:`.Client` instance:
31 module and then create a :class:`.Client` instance:
32
32
33 .. sourcecode:: ipython
33 .. sourcecode:: ipython
34
34
35 In [1]: from IPython.zmq.parallel import client
35 In [1]: from IPython.parallel import Client
36
36
37 In [2]: rc = client.Client()
37 In [2]: rc = Client()
38
38
39 This form assumes that the default connection information (stored in
39 This form assumes that the default connection information (stored in
40 :file:`ipcontroller-client.json` found in :file:`IPYTHON_DIR/clusterz_default/security`) is
40 :file:`ipcontroller-client.json` found in :file:`IPYTHON_DIR/clusterz_default/security`) is
@@ -44,9 +44,9 b' file to the client machine, or enter its contents as arguments to the Client con'
44 .. sourcecode:: ipython
44 .. sourcecode:: ipython
45
45
46 # If you have copied the json connector file from the controller:
46 # If you have copied the json connector file from the controller:
47 In [2]: rc = client.Client('/path/to/ipcontroller-client.json')
47 In [2]: rc = Client('/path/to/ipcontroller-client.json')
48 # or to connect with a specific profile you have set up:
48 # or to connect with a specific profile you have set up:
49 In [3]: rc = client.Client(profile='mpi')
49 In [3]: rc = Client(profile='mpi')
50
50
51
51
52 To make sure there are engines connected to the controller, users can get a list
52 To make sure there are engines connected to the controller, users can get a list
@@ -286,7 +286,7 b' local Python/IPython session:'
286 /home/you/<ipython-input-45-7cd858bbb8e0> in <module>()
286 /home/you/<ipython-input-45-7cd858bbb8e0> in <module>()
287 ----> 1 ar.get(1)
287 ----> 1 ar.get(1)
288
288
289 /path/to/site-packages/IPython/zmq/parallel/asyncresult.pyc in get(self, timeout)
289 /path/to/site-packages/IPython/parallel/asyncresult.pyc in get(self, timeout)
290 62 raise self._exception
290 62 raise self._exception
291 63 else:
291 63 else:
292 ---> 64 raise error.TimeoutError("Result not ready.")
292 ---> 64 raise error.TimeoutError("Result not ready.")
@@ -320,17 +320,17 b' and blocks until all of the associated results are ready:'
320 # Then, their results are ready using get() or the `.r` attribute
320 # Then, their results are ready using get() or the `.r` attribute
321 In [75]: pr_list[0].get()
321 In [75]: pr_list[0].get()
322 Out[75]: [2.9982571601867676, 2.9982588291168213, 2.9987530708312988, 2.9990990161895752]
322 Out[75]: [2.9982571601867676, 2.9982588291168213, 2.9987530708312988, 2.9990990161895752]
323
324
323
325
324
326 The ``block`` attribute
327 -----------------------
328
325
329 Many View methods(excluding :meth:`apply`) accept
326 The ``block`` and ``targets`` keyword arguments and attributes
330 ``block`` as a keyword argument. As we have seen above, these
327 --------------------------------------------------------------
331 keyword arguments control the blocking mode. The :class:`View` class also has
328
332 a :attr:`block` attribute that controls the default behavior when the keyword
329 Most DirectView methods (excluding :meth:`apply` and :meth:`map`) accept ``block`` and
333 argument is not provided. Thus the following logic is used for :attr:`block`:
330 ``targets`` as keyword arguments. As we have seen above, these keyword arguments control the
331 blocking mode and which engines the command is applied to. The :class:`View` class also has
332 :attr:`block` and :attr:`targets` attributes that control the default behavior when the keyword
333 arguments are not provided. Thus the following logic is used for :attr:`block` and :attr:`targets`:
334
334
335 * If no keyword argument is provided, the instance attributes are used.
335 * If no keyword argument is provided, the instance attributes are used.
336 * Keyword argument, if provided override the instance attributes for
336 * Keyword argument, if provided override the instance attributes for
@@ -340,16 +340,19 b' The following examples demonstrate how to use the instance attributes:'
340
340
341 .. sourcecode:: ipython
341 .. sourcecode:: ipython
342
342
343 In [16]: dview.targets = [0,2]
344
343 In [17]: dview.block = False
345 In [17]: dview.block = False
344
346
345 In [18]: ar = dview.apply(lambda : 10)
347 In [18]: ar = dview.apply(lambda : 10)
346
348
347 In [19]: ar.get()
349 In [19]: ar.get()
348 Out[19]: [10, 10, 10, 10]
350 Out[19]: [10, 10]
349
351
352 In [16]: dview.targets = v.client.ids # all engines (4)
353
350 In [21]: dview.block = True
354 In [21]: dview.block = True
351
355
352 # Note targets='all' means all engines
353 In [22]: dview.apply(lambda : 42)
356 In [22]: dview.apply(lambda : 42)
354 Out[22]: [42, 42, 42, 42]
357 Out[22]: [42, 42, 42, 42]
355
358
@@ -428,7 +431,7 b' on the engines given by the :attr:`targets` attribute:'
428 Type %autopx to disable
431 Type %autopx to disable
429
432
430 In [32]: max_evals = []
433 In [32]: max_evals = []
431 <IPython.zmq.parallel.asyncresult.AsyncResult object at 0x17b8a70>
434 <IPython.parallel.asyncresult.AsyncResult object at 0x17b8a70>
432
435
433 In [33]: for i in range(100):
436 In [33]: for i in range(100):
434 ....: a = numpy.random.rand(10,10)
437 ....: a = numpy.random.rand(10,10)
@@ -437,7 +440,7 b' on the engines given by the :attr:`targets` attribute:'
437 ....: max_evals.append(evals[0].real)
440 ....: max_evals.append(evals[0].real)
438 ....:
441 ....:
439 ....:
442 ....:
440 <IPython.zmq.parallel.asyncresult.AsyncResult object at 0x17af8f0>
443 <IPython.parallel.asyncresult.AsyncResult object at 0x17af8f0>
441
444
442 In [34]: %autopx
445 In [34]: %autopx
443 Auto Parallel Disabled
446 Auto Parallel Disabled
@@ -576,7 +579,7 b' more other types of exceptions. Here is how it works:'
576 /home/you/<ipython-input-10-15c2c22dec39> in <module>()
579 /home/you/<ipython-input-10-15c2c22dec39> in <module>()
577 ----> 1 dview.execute('1/0', block=True)
580 ----> 1 dview.execute('1/0', block=True)
578
581
579 /path/to/site-packages/IPython/zmq/parallel/view.py in execute(self, code, block)
582 /path/to/site-packages/IPython/parallel/view.py in execute(self, code, block)
580 460 default: self.block
583 460 default: self.block
581 461 """
584 461 """
582 --> 462 return self.apply_with_flags(util._execute, args=(code,), block=block)
585 --> 462 return self.apply_with_flags(util._execute, args=(code,), block=block)
@@ -585,7 +588,7 b' more other types of exceptions. Here is how it works:'
585
588
586 /home/you/<string> in apply_with_flags(self, f, args, kwargs, block, track)
589 /home/you/<string> in apply_with_flags(self, f, args, kwargs, block, track)
587
590
588 /path/to/site-packages/IPython/zmq/parallel/view.py in sync_results(f, self, *args, **kwargs)
591 /path/to/site-packages/IPython/parallel/view.py in sync_results(f, self, *args, **kwargs)
589 46 def sync_results(f, self, *args, **kwargs):
592 46 def sync_results(f, self, *args, **kwargs):
590 47 """sync relevant results from self.client to our results attribute."""
593 47 """sync relevant results from self.client to our results attribute."""
591 ---> 48 ret = f(self, *args, **kwargs)
594 ---> 48 ret = f(self, *args, **kwargs)
@@ -594,21 +597,21 b' more other types of exceptions. Here is how it works:'
594
597
595 /home/you/<string> in apply_with_flags(self, f, args, kwargs, block, track)
598 /home/you/<string> in apply_with_flags(self, f, args, kwargs, block, track)
596
599
597 /path/to/site-packages/IPython/zmq/parallel/view.py in save_ids(f, self, *args, **kwargs)
600 /path/to/site-packages/IPython/parallel/view.py in save_ids(f, self, *args, **kwargs)
598 35 n_previous = len(self.client.history)
601 35 n_previous = len(self.client.history)
599 36 try:
602 36 try:
600 ---> 37 ret = f(self, *args, **kwargs)
603 ---> 37 ret = f(self, *args, **kwargs)
601 38 finally:
604 38 finally:
602 39 nmsgs = len(self.client.history) - n_previous
605 39 nmsgs = len(self.client.history) - n_previous
603
606
604 /path/to/site-packages/IPython/zmq/parallel/view.py in apply_with_flags(self, f, args, kwargs, block, track)
607 /path/to/site-packages/IPython/parallel/view.py in apply_with_flags(self, f, args, kwargs, block, track)
605 398 if block:
608 398 if block:
606 399 try:
609 399 try:
607 --> 400 return ar.get()
610 --> 400 return ar.get()
608 401 except KeyboardInterrupt:
611 401 except KeyboardInterrupt:
609 402 pass
612 402 pass
610
613
611 /path/to/site-packages/IPython/zmq/parallel/asyncresult.pyc in get(self, timeout)
614 /path/to/site-packages/IPython/parallel/asyncresult.pyc in get(self, timeout)
612 87 return self._result
615 87 return self._result
613 88 else:
616 88 else:
614 ---> 89 raise self._exception
617 ---> 89 raise self._exception
@@ -660,7 +663,7 b' instance:'
660 /home/you/<ipython-input-10-15c2c22dec39> in <module>()
663 /home/you/<ipython-input-10-15c2c22dec39> in <module>()
661 ----> 1 dview.execute('1/0', block=True)
664 ----> 1 dview.execute('1/0', block=True)
662
665
663 /path/to/site-packages/IPython/zmq/parallel/view.py in execute(self, code, block)
666 /path/to/site-packages/IPython/parallel/view.py in execute(self, code, block)
664 460 default: self.block
667 460 default: self.block
665 461 """
668 461 """
666 --> 462 return self.apply_with_flags(util._execute, args=(code,), block=block)
669 --> 462 return self.apply_with_flags(util._execute, args=(code,), block=block)
@@ -669,7 +672,7 b' instance:'
669
672
670 /home/you/<string> in apply_with_flags(self, f, args, kwargs, block, track)
673 /home/you/<string> in apply_with_flags(self, f, args, kwargs, block, track)
671
674
672 /path/to/site-packages/IPython/zmq/parallel/view.py in sync_results(f, self, *args, **kwargs)
675 /path/to/site-packages/IPython/parallel/view.py in sync_results(f, self, *args, **kwargs)
673 46 def sync_results(f, self, *args, **kwargs):
676 46 def sync_results(f, self, *args, **kwargs):
674 47 """sync relevant results from self.client to our results attribute."""
677 47 """sync relevant results from self.client to our results attribute."""
675 ---> 48 ret = f(self, *args, **kwargs)
678 ---> 48 ret = f(self, *args, **kwargs)
@@ -678,21 +681,21 b' instance:'
678
681
679 /home/you/<string> in apply_with_flags(self, f, args, kwargs, block, track)
682 /home/you/<string> in apply_with_flags(self, f, args, kwargs, block, track)
680
683
681 /path/to/site-packages/IPython/zmq/parallel/view.py in save_ids(f, self, *args, **kwargs)
684 /path/to/site-packages/IPython/parallel/view.py in save_ids(f, self, *args, **kwargs)
682 35 n_previous = len(self.client.history)
685 35 n_previous = len(self.client.history)
683 36 try:
686 36 try:
684 ---> 37 ret = f(self, *args, **kwargs)
687 ---> 37 ret = f(self, *args, **kwargs)
685 38 finally:
688 38 finally:
686 39 nmsgs = len(self.client.history) - n_previous
689 39 nmsgs = len(self.client.history) - n_previous
687
690
688 /path/to/site-packages/IPython/zmq/parallel/view.py in apply_with_flags(self, f, args, kwargs, block, track)
691 /path/to/site-packages/IPython/parallel/view.py in apply_with_flags(self, f, args, kwargs, block, track)
689 398 if block:
692 398 if block:
690 399 try:
693 399 try:
691 --> 400 return ar.get()
694 --> 400 return ar.get()
692 401 except KeyboardInterrupt:
695 401 except KeyboardInterrupt:
693 402 pass
696 402 pass
694
697
695 /path/to/site-packages/IPython/zmq/parallel/asyncresult.pyc in get(self, timeout)
698 /path/to/site-packages/IPython/parallel/asyncresult.pyc in get(self, timeout)
696 87 return self._result
699 87 return self._result
697 88 else:
700 88 else:
698 ---> 89 raise self._exception
701 ---> 89 raise self._exception
@@ -706,7 +709,7 b' instance:'
706 [3:apply]: ZeroDivisionError: integer division or modulo by zero
709 [3:apply]: ZeroDivisionError: integer division or modulo by zero
707
710
708 In [82]: %debug
711 In [82]: %debug
709 > /Users/minrk/dev/ip/mine/IPython/zmq/parallel/asyncresult.py(80)get()
712 > /path/to/site-packages/IPython/parallel/asyncresult.py(80)get()
710 79 else:
713 79 else:
711 ---> 80 raise self._exception
714 ---> 80 raise self._exception
712 81 else:
715 81 else:
@@ -723,10 +726,10 b' instance:'
723 ipdb> e.print_tracebacks()
726 ipdb> e.print_tracebacks()
724 [0:apply]:
727 [0:apply]:
725 Traceback (most recent call last):
728 Traceback (most recent call last):
726 File "/Users/minrk/dev/ip/mine/IPython/zmq/parallel/streamkernel.py", line 332, in apply_request
729 File "/path/to/site-packages/IPython/parallel/streamkernel.py", line 332, in apply_request
727 exec code in working, working
730 exec code in working, working
728 File "<string>", line 1, in <module>
731 File "<string>", line 1, in <module>
729 File "/Users/minrk/dev/ip/mine/IPython/zmq/parallel/client.py", line 69, in _execute
732 File "/path/to/site-packages/IPython/parallel/client.py", line 69, in _execute
730 exec code in globals()
733 exec code in globals()
731 File "<string>", line 1, in <module>
734 File "<string>", line 1, in <module>
732 ZeroDivisionError: integer division or modulo by zero
735 ZeroDivisionError: integer division or modulo by zero
@@ -734,10 +737,10 b' instance:'
734
737
735 [1:apply]:
738 [1:apply]:
736 Traceback (most recent call last):
739 Traceback (most recent call last):
737 File "/Users/minrk/dev/ip/mine/IPython/zmq/parallel/streamkernel.py", line 332, in apply_request
740 File "/path/to/site-packages/IPython/parallel/streamkernel.py", line 332, in apply_request
738 exec code in working, working
741 exec code in working, working
739 File "<string>", line 1, in <module>
742 File "<string>", line 1, in <module>
740 File "/Users/minrk/dev/ip/mine/IPython/zmq/parallel/client.py", line 69, in _execute
743 File "/path/to/site-packages/IPython/parallel/client.py", line 69, in _execute
741 exec code in globals()
744 exec code in globals()
742 File "<string>", line 1, in <module>
745 File "<string>", line 1, in <module>
743 ZeroDivisionError: integer division or modulo by zero
746 ZeroDivisionError: integer division or modulo by zero
@@ -745,10 +748,10 b' instance:'
745
748
746 [2:apply]:
749 [2:apply]:
747 Traceback (most recent call last):
750 Traceback (most recent call last):
748 File "/Users/minrk/dev/ip/mine/IPython/zmq/parallel/streamkernel.py", line 332, in apply_request
751 File "/path/to/site-packages/IPython/parallel/streamkernel.py", line 332, in apply_request
749 exec code in working, working
752 exec code in working, working
750 File "<string>", line 1, in <module>
753 File "<string>", line 1, in <module>
751 File "/Users/minrk/dev/ip/mine/IPython/zmq/parallel/client.py", line 69, in _execute
754 File "/path/to/site-packages/IPython/parallel/client.py", line 69, in _execute
752 exec code in globals()
755 exec code in globals()
753 File "<string>", line 1, in <module>
756 File "<string>", line 1, in <module>
754 ZeroDivisionError: integer division or modulo by zero
757 ZeroDivisionError: integer division or modulo by zero
@@ -756,10 +759,10 b' instance:'
756
759
757 [3:apply]:
760 [3:apply]:
758 Traceback (most recent call last):
761 Traceback (most recent call last):
759 File "/Users/minrk/dev/ip/mine/IPython/zmq/parallel/streamkernel.py", line 332, in apply_request
762 File "/path/to/site-packages/IPython/parallel/streamkernel.py", line 332, in apply_request
760 exec code in working, working
763 exec code in working, working
761 File "<string>", line 1, in <module>
764 File "<string>", line 1, in <module>
762 File "/Users/minrk/dev/ip/mine/IPython/zmq/parallel/client.py", line 69, in _execute
765 File "/path/to/site-packages/IPython/parallel/client.py", line 69, in _execute
763 exec code in globals()
766 exec code in globals()
764 File "<string>", line 1, in <module>
767 File "<string>", line 1, in <module>
765 ZeroDivisionError: integer division or modulo by zero
768 ZeroDivisionError: integer division or modulo by zero
@@ -784,7 +787,7 b' All of this same error handling magic even works in non-blocking mode:'
784 /Users/minrk/<ipython-input-3-8531eb3d26fb> in <module>()
787 /Users/minrk/<ipython-input-3-8531eb3d26fb> in <module>()
785 ----> 1 ar.get()
788 ----> 1 ar.get()
786
789
787 /Users/minrk/dev/ip/mine/IPython/zmq/parallel/asyncresult.pyc in get(self, timeout)
790 /path/to/site-packages/IPython/parallel/asyncresult.pyc in get(self, timeout)
788 78 return self._result
791 78 return self._result
789 79 else:
792 79 else:
790 ---> 80 raise self._exception
793 ---> 80 raise self._exception
@@ -140,7 +140,7 b' There, instruct ipclusterz to use the MPIExec launchers by adding the lines:'
140
140
141 .. sourcecode:: python
141 .. sourcecode:: python
142
142
143 c.Global.engine_launcher = 'IPython.zmq.parallel.launcher.MPIExecEngineSetLauncher'
143 c.Global.engine_launcher = 'IPython.parallel.launcher.MPIExecEngineSetLauncher'
144
144
145 If the default MPI configuration is correct, then you can now start your cluster, with::
145 If the default MPI configuration is correct, then you can now start your cluster, with::
146
146
@@ -155,7 +155,7 b' If you have a reason to also start the Controller with mpi, you can specify:'
155
155
156 .. sourcecode:: python
156 .. sourcecode:: python
157
157
158 c.Global.controller_launcher = 'IPython.zmq.parallel.launcher.MPIExecControllerLauncher'
158 c.Global.controller_launcher = 'IPython.parallel.launcher.MPIExecControllerLauncher'
159
159
160 .. note::
160 .. note::
161
161
@@ -196,8 +196,8 b' and engines:'
196
196
197 .. sourcecode:: python
197 .. sourcecode:: python
198
198
199 c.Global.controller_launcher = 'IPython.zmq.parallel.launcher.PBSControllerLauncher'
199 c.Global.controller_launcher = 'IPython.parallel.launcher.PBSControllerLauncher'
200 c.Global.engine_launcher = 'IPython.zmq.parallel.launcher.PBSEngineSetLauncher'
200 c.Global.engine_launcher = 'IPython.parallel.launcher.PBSEngineSetLauncher'
201
201
202 To use this mode, you first need to create a PBS script template that will be
202 To use this mode, you first need to create a PBS script template that will be
203 used to start the engines. Here is a sample PBS script template:
203 used to start the engines. Here is a sample PBS script template:
@@ -309,9 +309,9 b' To use this mode, select the SSH launchers in :file:`ipclusterz_config.py`:'
309
309
310 .. sourcecode:: python
310 .. sourcecode:: python
311
311
312 c.Global.engine_launcher = 'IPython.zmq.parallel.launcher.SSHEngineSetLauncher'
312 c.Global.engine_launcher = 'IPython.parallel.launcher.SSHEngineSetLauncher'
313 # and if the Controller is also to be remote:
313 # and if the Controller is also to be remote:
314 c.Global.controller_launcher = 'IPython.zmq.parallel.launcher.SSHControllerLauncher'
314 c.Global.controller_launcher = 'IPython.parallel.launcher.SSHControllerLauncher'
315
315
316
316
317 The controller's remote location and configuration can be specified:
317 The controller's remote location and configuration can be specified:
@@ -32,15 +32,15 b' our :ref:`introduction <ip1par>` to using IPython for parallel computing.'
32 Creating a ``Client`` instance
32 Creating a ``Client`` instance
33 ==============================
33 ==============================
34
34
35 The first step is to import the IPython :mod:`IPython.zmq.parallel.client`
35 The first step is to import the IPython :mod:`IPython.parallel.client`
36 module and then create a :class:`.Client` instance, and we will also be using
36 module and then create a :class:`.Client` instance, and we will also be using
37 a :class:`LoadBalancedView`, here called `lview`:
37 a :class:`LoadBalancedView`, here called `lview`:
38
38
39 .. sourcecode:: ipython
39 .. sourcecode:: ipython
40
40
41 In [1]: from IPython.zmq.parallel import client
41 In [1]: from IPython.parallel import Client
42
42
43 In [2]: rc = client.Client()
43 In [2]: rc = Client()
44
44
45
45
46 This form assumes that the controller was started on localhost with default
46 This form assumes that the controller was started on localhost with default
@@ -50,9 +50,9 b' argument to the constructor:'
50 .. sourcecode:: ipython
50 .. sourcecode:: ipython
51
51
52 # for a visible LAN controller listening on an external port:
52 # for a visible LAN controller listening on an external port:
53 In [2]: rc = client.Client('tcp://192.168.1.16:10101')
53 In [2]: rc = Client('tcp://192.168.1.16:10101')
54 # or to connect with a specific profile you have set up:
54 # or to connect with a specific profile you have set up:
55 In [3]: rc = client.Client(profile='mpi')
55 In [3]: rc = Client(profile='mpi')
56
56
57 For load-balanced execution, we will make use of a :class:`LoadBalancedView` object, which can
57 For load-balanced execution, we will make use of a :class:`LoadBalancedView` object, which can
58 be constructed via the client's :meth:`load_balanced_view` method:
58 be constructed via the client's :meth:`load_balanced_view` method:
@@ -132,7 +132,7 b' Functional Dependencies'
132
132
133 Functional dependencies are used to determine whether a given engine is capable of running
133 Functional dependencies are used to determine whether a given engine is capable of running
134 a particular task. This is implemented via a special :class:`Exception` class,
134 a particular task. This is implemented via a special :class:`Exception` class,
135 :class:`UnmetDependency`, found in `IPython.zmq.parallel.error`. Its use is very simple:
135 :class:`UnmetDependency`, found in `IPython.parallel.error`. Its use is very simple:
136 if a task fails with an UnmetDependency exception, then the scheduler, instead of relaying
136 if a task fails with an UnmetDependency exception, then the scheduler, instead of relaying
137 the error up to the client like any other error, catches the error, and submits the task
137 the error up to the client like any other error, catches the error, and submits the task
138 to a different engine. This will repeat indefinitely, and a task will never be submitted
138 to a different engine. This will repeat indefinitely, and a task will never be submitted
@@ -145,7 +145,7 b' There are two decorators and a class used for functional dependencies:'
145
145
146 .. sourcecode:: ipython
146 .. sourcecode:: ipython
147
147
148 In [9]: from IPython.zmq.parallel.dependency import depend, require, dependent
148 In [9]: from IPython.parallel.dependency import depend, require, dependent
149
149
150 @require
150 @require
151 ********
151 ********
@@ -399,10 +399,10 b' The :class:`LoadBalancedView` has many more powerful features that allow quite a'
399 of flexibility in how tasks are defined and run. The next places to look are
399 of flexibility in how tasks are defined and run. The next places to look are
400 in the following classes:
400 in the following classes:
401
401
402 * :class:`IPython.zmq.parallel.view.LoadBalancedView`
402 * :class:`IPython.parallel.view.LoadBalancedView`
403 * :class:`IPython.zmq.parallel.client.AsyncResult`
403 * :class:`IPython.parallel.asyncresult.AsyncResult`
404 * :meth:`IPython.zmq.parallel.view.LoadBalancedView.apply`
404 * :meth:`IPython.parallel.view.LoadBalancedView.apply`
405 * :mod:`IPython.zmq.parallel.dependency`
405 * :mod:`IPython.parallel.dependency`
406
406
407 The following is an overview of how to use these classes together:
407 The following is an overview of how to use these classes together:
408
408
@@ -236,9 +236,9 b' will need to edit the following attributes in the file'
236 # Set these at the top of the file to tell ipclusterz to use the
236 # Set these at the top of the file to tell ipclusterz to use the
237 # Windows HPC job scheduler.
237 # Windows HPC job scheduler.
238 c.Global.controller_launcher = \
238 c.Global.controller_launcher = \
239 'IPython.zmq.parallel.launcher.WindowsHPCControllerLauncher'
239 'IPython.parallel.launcher.WindowsHPCControllerLauncher'
240 c.Global.engine_launcher = \
240 c.Global.engine_launcher = \
241 'IPython.zmq.parallel.launcher.WindowsHPCEngineSetLauncher'
241 'IPython.parallel.launcher.WindowsHPCEngineSetLauncher'
242
242
243 # Set these to the host name of the scheduler (head node) of your cluster.
243 # Set these to the host name of the scheduler (head node) of your cluster.
244 c.WindowsHPCControllerLauncher.scheduler = 'HEADNODE'
244 c.WindowsHPCControllerLauncher.scheduler = 'HEADNODE'
@@ -301,7 +301,7 b' apply it to each element of an array of integers in parallel using the'
301
301
302 .. sourcecode:: ipython
302 .. sourcecode:: ipython
303
303
304 In [1]: from IPython.zmq.parallel.client import *
304 In [1]: from IPython.parallel import *
305
305
306 In [2]: c = MultiEngineClient(profile='mycluster')
306 In [2]: c = MultiEngineClient(profile='mycluster')
307
307
@@ -215,10 +215,10 b" if 'setuptools' in sys.modules:"
215 'ipython = IPython.frontend.terminal.ipapp:launch_new_instance',
215 'ipython = IPython.frontend.terminal.ipapp:launch_new_instance',
216 'ipython-qtconsole = IPython.frontend.qt.console.ipythonqt:main',
216 'ipython-qtconsole = IPython.frontend.qt.console.ipythonqt:main',
217 'pycolor = IPython.utils.PyColorize:main',
217 'pycolor = IPython.utils.PyColorize:main',
218 'ipcontrollerz = IPython.zmq.parallel.ipcontrollerapp:launch_new_instance',
218 'ipcontrollerz = IPython.parallel.ipcontrollerapp:launch_new_instance',
219 'ipenginez = IPython.zmq.parallel.ipengineapp:launch_new_instance',
219 'ipenginez = IPython.parallel.ipengineapp:launch_new_instance',
220 'iploggerz = IPython.zmq.parallel.iploggerapp:launch_new_instance',
220 'iploggerz = IPython.parallel.iploggerapp:launch_new_instance',
221 'ipclusterz = IPython.zmq.parallel.ipclusterapp:launch_new_instance',
221 'ipclusterz = IPython.parallel.ipclusterapp:launch_new_instance',
222 'iptest = IPython.testing.iptest:main',
222 'iptest = IPython.testing.iptest:main',
223 'irunner = IPython.lib.irunner:main'
223 'irunner = IPython.lib.irunner:main'
224 ]
224 ]
@@ -132,8 +132,8 b' def find_packages():'
132 add_package(packages, 'testing.plugin', tests=False)
132 add_package(packages, 'testing.plugin', tests=False)
133 add_package(packages, 'utils', tests=True)
133 add_package(packages, 'utils', tests=True)
134 add_package(packages, 'zmq')
134 add_package(packages, 'zmq')
135 add_package(packages, 'zmq.parallel')
136 add_package(packages, 'zmq.pylab')
135 add_package(packages, 'zmq.pylab')
136 add_package(packages, 'parallel')
137 return packages
137 return packages
138
138
139 #---------------------------------------------------------------------------
139 #---------------------------------------------------------------------------
@@ -261,12 +261,13 b' def find_scripts():'
261 """
261 """
262 Find IPython's scripts.
262 Find IPython's scripts.
263 """
263 """
264 zmq_scripts = pjoin('IPython','zmq','parallel','scripts')
264 parallel_scripts = pjoin('IPython','parallel','scripts')
265 main_scripts = pjoin('IPython','scripts')
265 main_scripts = pjoin('IPython','scripts')
266 scripts = [pjoin(zmq_scripts, 'ipenginez'),
266 scripts = [
267 pjoin(zmq_scripts, 'ipcontrollerz'),
267 pjoin(parallel_scripts, 'ipenginez'),
268 pjoin(zmq_scripts, 'ipclusterz'),
268 pjoin(parallel_scripts, 'ipcontrollerz'),
269 pjoin(zmq_scripts, 'iploggerz'),
269 pjoin(parallel_scripts, 'ipclusterz'),
270 pjoin(parallel_scripts, 'iploggerz'),
270 pjoin(main_scripts, 'ipython'),
271 pjoin(main_scripts, 'ipython'),
271 pjoin(main_scripts, 'ipython-qtconsole'),
272 pjoin(main_scripts, 'ipython-qtconsole'),
272 pjoin(main_scripts, 'pycolor'),
273 pjoin(main_scripts, 'pycolor'),
General Comments 0
You need to be logged in to leave comments. Login now