##// END OF EJS Templates
organize IPython.parallel into subpackages
MinRK -
Show More
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
@@ -23,8 +23,8 b' c = get_config()'
23 # - PBSControllerLauncher
23 # - PBSControllerLauncher
24 # - SGEControllerLauncher
24 # - SGEControllerLauncher
25 # - WindowsHPCControllerLauncher
25 # - WindowsHPCControllerLauncher
26 # c.Global.controller_launcher = 'IPython.parallel.launcher.LocalControllerLauncher'
26 # c.Global.controller_launcher = 'IPython.parallel.apps.launcher.LocalControllerLauncher'
27 # c.Global.controller_launcher = 'IPython.parallel.launcher.PBSControllerLauncher'
27 # c.Global.controller_launcher = 'IPython.parallel.apps.launcher.PBSControllerLauncher'
28
28
29 # Options are:
29 # Options are:
30 # - LocalEngineSetLauncher
30 # - LocalEngineSetLauncher
@@ -32,7 +32,7 b' c = get_config()'
32 # - PBSEngineSetLauncher
32 # - PBSEngineSetLauncher
33 # - SGEEngineSetLauncher
33 # - SGEEngineSetLauncher
34 # - WindowsHPCEngineSetLauncher
34 # - WindowsHPCEngineSetLauncher
35 # c.Global.engine_launcher = 'IPython.parallel.launcher.LocalEngineSetLauncher'
35 # c.Global.engine_launcher = 'IPython.parallel.apps.launcher.LocalEngineSetLauncher'
36
36
37 #-----------------------------------------------------------------------------
37 #-----------------------------------------------------------------------------
38 # Global configuration
38 # Global configuration
@@ -89,7 +89,7 b' c = get_config()'
89 # Which class to use for the db backend. Currently supported are DictDB (the
89 # Which class to use for the db backend. Currently supported are DictDB (the
90 # default), and MongoDB. Uncomment this line to enable MongoDB, which will
90 # default), and MongoDB. Uncomment this line to enable MongoDB, which will
91 # slow-down the Hub's responsiveness, but also reduce its memory footprint.
91 # slow-down the Hub's responsiveness, but also reduce its memory footprint.
92 # c.HubFactory.db_class = 'IPython.parallel.mongodb.MongoDB'
92 # c.HubFactory.db_class = 'IPython.parallel.controller.mongodb.MongoDB'
93
93
94 # The heartbeat ping frequency. This is the frequency (in ms) at which the
94 # The heartbeat ping frequency. This is the frequency (in ms) at which the
95 # Hub pings engines for heartbeats. This determines how quickly the Hub
95 # Hub pings engines for heartbeats. This determines how quickly the Hub
@@ -144,11 +144,11 b' c = get_config()'
144
144
145 # ----- in-memory configuration --------
145 # ----- in-memory configuration --------
146 # this line restores the default behavior: in-memory storage of all results.
146 # this line restores the default behavior: in-memory storage of all results.
147 # c.HubFactory.db_class = 'IPython.parallel.dictdb.DictDB'
147 # c.HubFactory.db_class = 'IPython.parallel.controller.dictdb.DictDB'
148
148
149 # ----- sqlite configuration --------
149 # ----- sqlite configuration --------
150 # use this line to activate sqlite:
150 # use this line to activate sqlite:
151 # c.HubFactory.db_class = 'IPython.parallel.sqlitedb.SQLiteDB'
151 # c.HubFactory.db_class = 'IPython.parallel.controller.sqlitedb.SQLiteDB'
152
152
153 # You can specify the name of the db-file. By default, this will be located
153 # You can specify the name of the db-file. By default, this will be located
154 # in the active cluster_dir, e.g. ~/.ipython/clusterz_default/tasks.db
154 # in the active cluster_dir, e.g. ~/.ipython/clusterz_default/tasks.db
@@ -165,7 +165,7 b' c = get_config()'
165
165
166 # ----- mongodb configuration --------
166 # ----- mongodb configuration --------
167 # use this line to activate mongodb:
167 # use this line to activate mongodb:
168 # c.HubFactory.db_class = 'IPython.parallel.mongodb.MongoDB'
168 # c.HubFactory.db_class = 'IPython.parallel.controller.mongodb.MongoDB'
169
169
170 # You can specify the args and kwargs pymongo will use when creating the Connection.
170 # You can specify the args and kwargs pymongo will use when creating the Connection.
171 # For more information on what these options might be, see pymongo documentation.
171 # For more information on what these options might be, see pymongo documentation.
@@ -34,7 +34,7 b' try:'
34 except ImportError:
34 except ImportError:
35 pexpect = None
35 pexpect = None
36
36
37 from IPython.parallel.entry_point import select_random_ports
37 from IPython.parallel.util import select_random_ports
38
38
39 #-----------------------------------------------------------------------------
39 #-----------------------------------------------------------------------------
40 # Code
40 # Code
@@ -12,14 +12,15 b''
12
12
13 import zmq
13 import zmq
14
14
15 if zmq.__version__ < '2.1.3':
15 if zmq.__version__ < '2.1.4':
16 raise ImportError("IPython.parallel requires pyzmq/0MQ >= 2.1.3, you appear to have %s"%zmq.__version__)
16 raise ImportError("IPython.parallel requires pyzmq/0MQ >= 2.1.4, you appear to have %s"%zmq.__version__)
17
17
18 from .asyncresult import *
19 from .client import Client
20 from .dependency import *
21 from .remotefunction import *
22 from .view import *
23 from IPython.utils.pickleutil import Reference
18 from IPython.utils.pickleutil import Reference
24
19
20 from .client.asyncresult import *
21 from .client.client import Client
22 from .client.remotefunction import *
23 from .client.view import *
24 from .controller.dependency import *
25
25
26
1 NO CONTENT: file renamed from IPython/parallel/clusterdir.py to IPython/parallel/apps/clusterdir.py
NO CONTENT: file renamed from IPython/parallel/clusterdir.py to IPython/parallel/apps/clusterdir.py
@@ -26,7 +26,7 b' from zmq.eventloop import ioloop'
26
26
27 from IPython.external.argparse import ArgumentParser, SUPPRESS
27 from IPython.external.argparse import ArgumentParser, SUPPRESS
28 from IPython.utils.importstring import import_item
28 from IPython.utils.importstring import import_item
29 from IPython.parallel.clusterdir import (
29 from .clusterdir import (
30 ApplicationWithClusterDir, ClusterDirConfigLoader,
30 ApplicationWithClusterDir, ClusterDirConfigLoader,
31 ClusterDirError, PIDFileError
31 ClusterDirError, PIDFileError
32 )
32 )
@@ -30,9 +30,9 b' from zmq.log.handlers import PUBHandler'
30 from zmq.utils import jsonapi as json
30 from zmq.utils import jsonapi as json
31
31
32 from IPython.config.loader import Config
32 from IPython.config.loader import Config
33
33 from IPython.parallel import factory
34 from IPython.parallel import factory
34 from IPython.parallel.controller import ControllerFactory
35 from .clusterdir import (
35 from IPython.parallel.clusterdir import (
36 ApplicationWithClusterDir,
36 ApplicationWithClusterDir,
37 ClusterDirConfigLoader
37 ClusterDirConfigLoader
38 )
38 )
@@ -40,6 +40,7 b' from IPython.parallel.util import disambiguate_ip_address, split_url'
40 # from IPython.kernel.fcutil import FCServiceFactory, FURLError
40 # from IPython.kernel.fcutil import FCServiceFactory, FURLError
41 from IPython.utils.traitlets import Instance, Unicode
41 from IPython.utils.traitlets import Instance, Unicode
42
42
43 from IPython.parallel.controller.controller import ControllerFactory
43
44
44
45
45 #-----------------------------------------------------------------------------
46 #-----------------------------------------------------------------------------
@@ -117,11 +118,11 b' class IPControllerAppConfigLoader(ClusterDirConfigLoader):'
117 ## Hub Config:
118 ## Hub Config:
118 paa('--mongodb',
119 paa('--mongodb',
119 dest='HubFactory.db_class', action='store_const',
120 dest='HubFactory.db_class', action='store_const',
120 const='IPython.parallel.mongodb.MongoDB',
121 const='IPython.parallel.controller.mongodb.MongoDB',
121 help='Use MongoDB for task storage [default: in-memory]')
122 help='Use MongoDB for task storage [default: in-memory]')
122 paa('--sqlite',
123 paa('--sqlite',
123 dest='HubFactory.db_class', action='store_const',
124 dest='HubFactory.db_class', action='store_const',
124 const='IPython.parallel.sqlitedb.SQLiteDB',
125 const='IPython.parallel.controller.sqlitedb.SQLiteDB',
125 help='Use SQLite3 for DB task storage [default: in-memory]')
126 help='Use SQLite3 for DB task storage [default: in-memory]')
126 paa('--hb',
127 paa('--hb',
127 type=int, dest='HubFactory.hb', nargs=2,
128 type=int, dest='HubFactory.hb', nargs=2,
@@ -22,16 +22,17 b' import sys'
22 import zmq
22 import zmq
23 from zmq.eventloop import ioloop
23 from zmq.eventloop import ioloop
24
24
25 from IPython.parallel.clusterdir import (
25 from .clusterdir import (
26 ApplicationWithClusterDir,
26 ApplicationWithClusterDir,
27 ClusterDirConfigLoader
27 ClusterDirConfigLoader
28 )
28 )
29 from IPython.zmq.log import EnginePUBHandler
29 from IPython.zmq.log import EnginePUBHandler
30
30
31 from IPython.parallel import factory
31 from IPython.parallel import factory
32 from IPython.parallel.engine import EngineFactory
32 from IPython.parallel.engine.engine import EngineFactory
33 from IPython.parallel.streamkernel import Kernel
33 from IPython.parallel.engine.streamkernel import Kernel
34 from IPython.parallel.util import disambiguate_url
34 from IPython.parallel.util import disambiguate_url
35
35 from IPython.utils.importstring import import_item
36 from IPython.utils.importstring import import_item
36
37
37
38
@@ -20,7 +20,7 b' import sys'
20
20
21 import zmq
21 import zmq
22
22
23 from IPython.parallel.clusterdir import (
23 from .clusterdir import (
24 ApplicationWithClusterDir,
24 ApplicationWithClusterDir,
25 ClusterDirConfigLoader
25 ClusterDirConfigLoader
26 )
26 )
@@ -46,7 +46,7 b' from IPython.utils.traitlets import Any, Str, Int, List, Unicode, Dict, Instance'
46 from IPython.utils.path import get_ipython_module_path
46 from IPython.utils.path import get_ipython_module_path
47 from IPython.utils.process import find_cmd, pycmd2argv, FindCmdError
47 from IPython.utils.process import find_cmd, pycmd2argv, FindCmdError
48
48
49 from .factory import LoggingFactory
49 from IPython.parallel.factory import LoggingFactory
50
50
51 # load winhpcjob only on Windows
51 # load winhpcjob only on Windows
52 try:
52 try:
@@ -64,15 +64,15 b' except ImportError:'
64
64
65
65
66 ipcluster_cmd_argv = pycmd2argv(get_ipython_module_path(
66 ipcluster_cmd_argv = pycmd2argv(get_ipython_module_path(
67 'IPython.parallel.ipclusterapp'
67 'IPython.parallel.apps.ipclusterapp'
68 ))
68 ))
69
69
70 ipengine_cmd_argv = pycmd2argv(get_ipython_module_path(
70 ipengine_cmd_argv = pycmd2argv(get_ipython_module_path(
71 'IPython.parallel.ipengineapp'
71 'IPython.parallel.apps.ipengineapp'
72 ))
72 ))
73
73
74 ipcontroller_cmd_argv = pycmd2argv(get_ipython_module_path(
74 ipcontroller_cmd_argv = pycmd2argv(get_ipython_module_path(
75 'IPython.parallel.ipcontrollerapp'
75 'IPython.parallel.apps.ipcontrollerapp'
76 ))
76 ))
77
77
78 #-----------------------------------------------------------------------------
78 #-----------------------------------------------------------------------------
@@ -21,7 +21,7 b' from zmq.eventloop import ioloop, zmqstream'
21
21
22 from IPython.utils.traitlets import Int, Str, Instance, List
22 from IPython.utils.traitlets import Int, Str, Instance, List
23
23
24 from .factory import LoggingFactory
24 from IPython.parallel.factory import LoggingFactory
25
25
26 #-----------------------------------------------------------------------------
26 #-----------------------------------------------------------------------------
27 # Classes
27 # Classes
1 NO CONTENT: file renamed from IPython/parallel/winhpcjob.py to IPython/parallel/apps/winhpcjob.py
NO CONTENT: file renamed from IPython/parallel/winhpcjob.py to IPython/parallel/apps/winhpcjob.py
@@ -15,7 +15,7 b' import time'
15 from zmq import MessageTracker
15 from zmq import MessageTracker
16
16
17 from IPython.external.decorator import decorator
17 from IPython.external.decorator import decorator
18 from . import error
18 from IPython.parallel import error
19
19
20 #-----------------------------------------------------------------------------
20 #-----------------------------------------------------------------------------
21 # Classes
21 # Classes
@@ -29,11 +29,12 b' from IPython.utils.traitlets import (HasTraits, Int, Instance, CUnicode,'
29 from IPython.external.decorator import decorator
29 from IPython.external.decorator import decorator
30 from IPython.external.ssh import tunnel
30 from IPython.external.ssh import tunnel
31
31
32 from . import error
32 from IPython.parallel import error
33 from . import util
33 from IPython.parallel import streamsession as ss
34 from . import streamsession as ss
34 from IPython.parallel import util
35
35 from .asyncresult import AsyncResult, AsyncHubResult
36 from .asyncresult import AsyncResult, AsyncHubResult
36 from .clusterdir import ClusterDir, ClusterDirError
37 from IPython.parallel.apps.clusterdir import ClusterDir, ClusterDirError
37 from .view import DirectView, LoadBalancedView
38 from .view import DirectView, LoadBalancedView
38
39
39 #--------------------------------------------------------------------------
40 #--------------------------------------------------------------------------
1 NO CONTENT: file renamed from IPython/parallel/map.py to IPython/parallel/client/map.py
NO CONTENT: file renamed from IPython/parallel/map.py to IPython/parallel/client/map.py
1 NO CONTENT: file renamed from IPython/parallel/remotefunction.py to IPython/parallel/client/remotefunction.py
NO CONTENT: file renamed from IPython/parallel/remotefunction.py to IPython/parallel/client/remotefunction.py
@@ -23,10 +23,11 b' from IPython.utils.traitlets import HasTraits, Any, Bool, List, Dict, Set, Int, '
23
23
24 from IPython.external.decorator import decorator
24 from IPython.external.decorator import decorator
25
25
26 from IPython.parallel import util
27 from IPython.parallel.controller.dependency import Dependency, dependent
28
26 from . import map as Map
29 from . import map as Map
27 from . import util
28 from .asyncresult import AsyncResult, AsyncMapResult
30 from .asyncresult import AsyncResult, AsyncMapResult
29 from .dependency import Dependency, dependent
30 from .remotefunction import ParallelFunction, parallel, remote
31 from .remotefunction import ParallelFunction, parallel, remote
31
32
32 #-----------------------------------------------------------------------------
33 #-----------------------------------------------------------------------------
@@ -68,6 +69,7 b' def spin_after(f, self, *args, **kwargs):'
68 # Classes
69 # Classes
69 #-----------------------------------------------------------------------------
70 #-----------------------------------------------------------------------------
70
71
72 @testdec.skip_doctest
71 class View(HasTraits):
73 class View(HasTraits):
72 """Base View class for more convenint apply(f,*args,**kwargs) syntax via attributes.
74 """Base View class for more convenint apply(f,*args,**kwargs) syntax via attributes.
73
75
@@ -105,7 +107,7 b' class View(HasTraits):'
105 history=List()
107 history=List()
106 outstanding = Set()
108 outstanding = Set()
107 results = Dict()
109 results = Dict()
108 client = Instance('IPython.parallel.client.Client')
110 client = Instance('IPython.parallel.Client')
109
111
110 _socket = Instance('zmq.Socket')
112 _socket = Instance('zmq.Socket')
111 _flag_names = List(['targets', 'block', 'track'])
113 _flag_names = List(['targets', 'block', 'track'])
@@ -386,11 +388,6 b' class DirectView(View):'
386 """sync_imports(local=True) as a property.
388 """sync_imports(local=True) as a property.
387
389
388 See sync_imports for details.
390 See sync_imports for details.
389
390 In [10]: with v.importer:
391 ....: import numpy
392 ....:
393 importing numpy on engine(s)
394
391
395 """
392 """
396 return self.sync_imports(True)
393 return self.sync_imports(True)
@@ -14,7 +14,6 b' This is a collection of one Hub and several Schedulers.'
14 #-----------------------------------------------------------------------------
14 #-----------------------------------------------------------------------------
15 from __future__ import print_function
15 from __future__ import print_function
16
16
17 import logging
18 from multiprocessing import Process
17 from multiprocessing import Process
19
18
20 import zmq
19 import zmq
@@ -23,7 +22,7 b' from zmq.devices import ProcessMonitoredQueue'
23 from IPython.utils.importstring import import_item
22 from IPython.utils.importstring import import_item
24 from IPython.utils.traitlets import Int, CStr, Instance, List, Bool
23 from IPython.utils.traitlets import Int, CStr, Instance, List, Bool
25
24
26 from .entry_point import signal_children
25 from IPython.parallel.util import signal_children
27 from .hub import Hub, HubFactory
26 from .hub import Hub, HubFactory
28 from .scheduler import launch_scheduler
27 from .scheduler import launch_scheduler
29
28
@@ -8,9 +8,9 b''
8
8
9 from types import ModuleType
9 from types import ModuleType
10
10
11 from .asyncresult import AsyncResult
11 from IPython.parallel.client.asyncresult import AsyncResult
12 from .error import UnmetDependency
12 from IPython.parallel.error import UnmetDependency
13 from .util import interactive
13 from IPython.parallel.util import interactive
14
14
15 class depend(object):
15 class depend(object):
16 """Dependency decorator, for use with tasks.
16 """Dependency decorator, for use with tasks.
1 NO CONTENT: file renamed from IPython/parallel/dictdb.py to IPython/parallel/controller/dictdb.py
NO CONTENT: file renamed from IPython/parallel/dictdb.py to IPython/parallel/controller/dictdb.py
@@ -12,15 +12,14 b' and hearts are tracked based on their XREQ identities.'
12
12
13 from __future__ import print_function
13 from __future__ import print_function
14 import time
14 import time
15 import logging
16 import uuid
15 import uuid
17
16
18 import zmq
17 import zmq
19 from zmq.devices import ProcessDevice,ThreadDevice
18 from zmq.devices import ProcessDevice, ThreadDevice
20 from zmq.eventloop import ioloop, zmqstream
19 from zmq.eventloop import ioloop, zmqstream
21
20
22 from IPython.utils.traitlets import Set, Instance, CFloat, Bool
21 from IPython.utils.traitlets import Set, Instance, CFloat, Bool
23 from .factory import LoggingFactory
22 from IPython.parallel.factory import LoggingFactory
24
23
25 class Heart(object):
24 class Heart(object):
26 """A basic heart object for responding to a HeartMonitor.
25 """A basic heart object for responding to a HeartMonitor.
@@ -27,12 +27,11 b' from zmq.eventloop.zmqstream import ZMQStream'
27 from IPython.utils.importstring import import_item
27 from IPython.utils.importstring import import_item
28 from IPython.utils.traitlets import HasTraits, Instance, Int, CStr, Str, Dict, Set, List, Bool
28 from IPython.utils.traitlets import HasTraits, Instance, Int, CStr, Str, Dict, Set, List, Bool
29
29
30 from .entry_point import select_random_ports
30 from IPython.parallel import error
31 from .factory import RegistrationFactory, LoggingFactory
31 from IPython.parallel.factory import RegistrationFactory, LoggingFactory
32 from IPython.parallel.util import select_random_ports, validate_url_container, ISO8601
32
33
33 from . import error
34 from .heartmonitor import HeartMonitor
34 from .heartmonitor import HeartMonitor
35 from .util import validate_url_container, ISO8601
36
35
37 #-----------------------------------------------------------------------------
36 #-----------------------------------------------------------------------------
38 # Code
37 # Code
@@ -160,11 +159,11 b' class HubFactory(RegistrationFactory):'
160
159
161 monitor_url = CStr('')
160 monitor_url = CStr('')
162
161
163 db_class = CStr('IPython.parallel.dictdb.DictDB', config=True)
162 db_class = CStr('IPython.parallel.controller.dictdb.DictDB', config=True)
164
163
165 # not configurable
164 # not configurable
166 db = Instance('IPython.parallel.dictdb.BaseDB')
165 db = Instance('IPython.parallel.controller.dictdb.BaseDB')
167 heartmonitor = Instance('IPython.parallel.heartmonitor.HeartMonitor')
166 heartmonitor = Instance('IPython.parallel.controller.heartmonitor.HeartMonitor')
168 subconstructors = List()
167 subconstructors = List()
169 _constructed = Bool(False)
168 _constructed = Bool(False)
170
169
1 NO CONTENT: file renamed from IPython/parallel/mongodb.py to IPython/parallel/controller/mongodb.py
NO CONTENT: file renamed from IPython/parallel/mongodb.py to IPython/parallel/controller/mongodb.py
@@ -36,11 +36,11 b' from zmq.eventloop import ioloop, zmqstream'
36 from IPython.external.decorator import decorator
36 from IPython.external.decorator import decorator
37 from IPython.utils.traitlets import Instance, Dict, List, Set
37 from IPython.utils.traitlets import Instance, Dict, List, Set
38
38
39 from . import error
39 from IPython.parallel import error
40 from .dependency import Dependency
40 from IPython.parallel.factory import SessionFactory
41 from .entry_point import connect_logger, local_logger
41 from IPython.parallel.util import connect_logger, local_logger
42 from .factory import SessionFactory
43
42
43 from .dependency import Dependency
44
44
45 @decorator
45 @decorator
46 def logged(f,self,*args,**kwargs):
46 def logged(f,self,*args,**kwargs):
@@ -17,7 +17,7 b' from zmq.eventloop import ioloop'
17
17
18 from IPython.utils.traitlets import CUnicode, CStr, Instance, List
18 from IPython.utils.traitlets import CUnicode, CStr, Instance, List
19 from .dictdb import BaseDB
19 from .dictdb import BaseDB
20 from .util import ISO8601
20 from IPython.parallel.util import ISO8601
21
21
22 #-----------------------------------------------------------------------------
22 #-----------------------------------------------------------------------------
23 # SQLite operators, adapters, and converters
23 # SQLite operators, adapters, and converters
@@ -22,11 +22,12 b' from zmq.eventloop import ioloop, zmqstream'
22 from IPython.utils.traitlets import Instance, Str, Dict, Int, Type, CFloat
22 from IPython.utils.traitlets import Instance, Str, Dict, Int, Type, CFloat
23 # from IPython.utils.localinterfaces import LOCALHOST
23 # from IPython.utils.localinterfaces import LOCALHOST
24
24
25 from . import heartmonitor
25 from IPython.parallel.controller.heartmonitor import Heart
26 from .factory import RegistrationFactory
26 from IPython.parallel.factory import RegistrationFactory
27 from IPython.parallel.streamsession import Message
28 from IPython.parallel.util import disambiguate_url
29
27 from .streamkernel import Kernel
30 from .streamkernel import Kernel
28 from .streamsession import Message
29 from .util import disambiguate_url
30
31
31 class EngineFactory(RegistrationFactory):
32 class EngineFactory(RegistrationFactory):
32 """IPython engine"""
33 """IPython engine"""
@@ -129,7 +130,7 b' class EngineFactory(RegistrationFactory):'
129 loop=loop, user_ns = self.user_ns, logname=self.log.name)
130 loop=loop, user_ns = self.user_ns, logname=self.log.name)
130 self.kernel.start()
131 self.kernel.start()
131 hb_addrs = [ disambiguate_url(addr, self.location) for addr in hb_addrs ]
132 hb_addrs = [ disambiguate_url(addr, self.location) for addr in hb_addrs ]
132 heart = heartmonitor.Heart(*map(str, hb_addrs), heart_id=identity)
133 heart = Heart(*map(str, hb_addrs), heart_id=identity)
133 # ioloop.DelayedCallback(heart.start, 1000, self.loop).start()
134 # ioloop.DelayedCallback(heart.start, 1000, self.loop).start()
134 heart.start()
135 heart.start()
135
136
@@ -8,7 +8,7 b''
8
8
9 from zmq.eventloop import ioloop
9 from zmq.eventloop import ioloop
10
10
11 from .streamsession import StreamSession
11 from IPython.parallel.streamsession import StreamSession
12
12
13 class KernelStarter(object):
13 class KernelStarter(object):
14 """Object for resetting/killing the Kernel."""
14 """Object for resetting/killing the Kernel."""
@@ -22,25 +22,18 b' import time'
22 from code import CommandCompiler
22 from code import CommandCompiler
23 from datetime import datetime
23 from datetime import datetime
24 from pprint import pprint
24 from pprint import pprint
25 from signal import SIGTERM, SIGKILL
26
25
27 # System library imports.
26 # System library imports.
28 import zmq
27 import zmq
29 from zmq.eventloop import ioloop, zmqstream
28 from zmq.eventloop import ioloop, zmqstream
30
29
31 # Local imports.
30 # Local imports.
32 from IPython.core import ultratb
33 from IPython.utils.traitlets import Instance, List, Int, Dict, Set, Str
31 from IPython.utils.traitlets import Instance, List, Int, Dict, Set, Str
34 from IPython.zmq.completer import KernelCompleter
32 from IPython.zmq.completer import KernelCompleter
35 from IPython.zmq.iostream import OutStream
36 from IPython.zmq.displayhook import DisplayHook
37
33
38 from . import heartmonitor
34 from IPython.parallel.error import wrap_exception
39 from .client import Client
35 from IPython.parallel.factory import SessionFactory
40 from .error import wrap_exception
36 from IPython.parallel.util import serialize_object, unpack_apply_message, ISO8601
41 from .factory import SessionFactory
42 from .streamsession import StreamSession
43 from .util import serialize_object, unpack_apply_message, ISO8601, Namespace
44
37
45 def printer(*args):
38 def printer(*args):
46 pprint(args, stream=sys.__stdout__)
39 pprint(args, stream=sys.__stdout__)
@@ -71,7 +64,7 b' class Kernel(SessionFactory):'
71 control_stream = Instance(zmqstream.ZMQStream)
64 control_stream = Instance(zmqstream.ZMQStream)
72 task_stream = Instance(zmqstream.ZMQStream)
65 task_stream = Instance(zmqstream.ZMQStream)
73 iopub_stream = Instance(zmqstream.ZMQStream)
66 iopub_stream = Instance(zmqstream.ZMQStream)
74 client = Instance('IPython.parallel.client.Client')
67 client = Instance('IPython.parallel.Client')
75
68
76 # internals
69 # internals
77 shell_streams = List()
70 shell_streams = List()
@@ -428,62 +421,3 b' class Kernel(SessionFactory):'
428 # # don't busywait
421 # # don't busywait
429 # time.sleep(1e-3)
422 # time.sleep(1e-3)
430
423
431 def make_kernel(int_id, identity, control_addr, shell_addrs, iopub_addr, hb_addrs,
432 client_addr=None, loop=None, context=None, key=None,
433 out_stream_factory=OutStream, display_hook_factory=DisplayHook):
434 """NO LONGER IN USE"""
435 # create loop, context, and session:
436 if loop is None:
437 loop = ioloop.IOLoop.instance()
438 if context is None:
439 context = zmq.Context()
440 c = context
441 session = StreamSession(key=key)
442 # print (session.key)
443 # print (control_addr, shell_addrs, iopub_addr, hb_addrs)
444
445 # create Control Stream
446 control_stream = zmqstream.ZMQStream(c.socket(zmq.PAIR), loop)
447 control_stream.setsockopt(zmq.IDENTITY, identity)
448 control_stream.connect(control_addr)
449
450 # create Shell Streams (MUX, Task, etc.):
451 shell_streams = []
452 for addr in shell_addrs:
453 stream = zmqstream.ZMQStream(c.socket(zmq.PAIR), loop)
454 stream.setsockopt(zmq.IDENTITY, identity)
455 stream.connect(addr)
456 shell_streams.append(stream)
457
458 # create iopub stream:
459 iopub_stream = zmqstream.ZMQStream(c.socket(zmq.PUB), loop)
460 iopub_stream.setsockopt(zmq.IDENTITY, identity)
461 iopub_stream.connect(iopub_addr)
462
463 # Redirect input streams and set a display hook.
464 if out_stream_factory:
465 sys.stdout = out_stream_factory(session, iopub_stream, u'stdout')
466 sys.stdout.topic = 'engine.%i.stdout'%int_id
467 sys.stderr = out_stream_factory(session, iopub_stream, u'stderr')
468 sys.stderr.topic = 'engine.%i.stderr'%int_id
469 if display_hook_factory:
470 sys.displayhook = display_hook_factory(session, iopub_stream)
471 sys.displayhook.topic = 'engine.%i.pyout'%int_id
472
473
474 # launch heartbeat
475 heart = heartmonitor.Heart(*map(str, hb_addrs), heart_id=identity)
476 heart.start()
477
478 # create (optional) Client
479 if client_addr:
480 client = Client(client_addr, username=identity)
481 else:
482 client = None
483
484 kernel = Kernel(id=int_id, session=session, control_stream=control_stream,
485 shell_streams=shell_streams, iopub_stream=iopub_stream,
486 client=client, loop=loop)
487 kernel.start()
488 return loop, c, kernel
489
@@ -23,7 +23,7 b' from IPython.utils.importstring import import_item'
23 from IPython.utils.traitlets import Str,Int,Instance, CUnicode, CStr
23 from IPython.utils.traitlets import Str,Int,Instance, CUnicode, CStr
24
24
25 import IPython.parallel.streamsession as ss
25 import IPython.parallel.streamsession as ss
26 from IPython.parallel.entry_point import select_random_ports
26 from IPython.parallel.util import select_random_ports
27
27
28 #-----------------------------------------------------------------------------
28 #-----------------------------------------------------------------------------
29 # Classes
29 # Classes
@@ -13,6 +13,6 b''
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15
15
16 from IPython.parallel.ipclusterapp import launch_new_instance
16 from IPython.parallel.apps.ipclusterapp import launch_new_instance
17
17
18 launch_new_instance()
18 launch_new_instance()
@@ -13,6 +13,6 b''
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15
15
16 from IPython.parallel.ipcontrollerapp import launch_new_instance
16 from IPython.parallel.apps.ipcontrollerapp import launch_new_instance
17
17
18 launch_new_instance()
18 launch_new_instance()
@@ -13,7 +13,7 b''
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15
15
16 from IPython.parallel.ipengineapp import launch_new_instance
16 from IPython.parallel.apps.ipengineapp import launch_new_instance
17
17
18 launch_new_instance()
18 launch_new_instance()
19
19
@@ -13,7 +13,7 b''
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15
15
16 from IPython.parallel.iploggerapp import launch_new_instance
16 from IPython.parallel.apps.iploggerapp import launch_new_instance
17
17
18 launch_new_instance()
18 launch_new_instance()
19
19
@@ -15,7 +15,7 b' import tempfile'
15 import time
15 import time
16 from subprocess import Popen, PIPE, STDOUT
16 from subprocess import Popen, PIPE, STDOUT
17
17
18 from IPython.parallel import client
18 from IPython.parallel import Client
19
19
20 processes = []
20 processes = []
21 blackhole = tempfile.TemporaryFile()
21 blackhole = tempfile.TemporaryFile()
@@ -27,14 +27,14 b' def setup():'
27 processes.append(cp)
27 processes.append(cp)
28 time.sleep(.5)
28 time.sleep(.5)
29 add_engines(1)
29 add_engines(1)
30 c = client.Client(profile='iptest')
30 c = Client(profile='iptest')
31 while not c.ids:
31 while not c.ids:
32 time.sleep(.1)
32 time.sleep(.1)
33 c.spin()
33 c.spin()
34 c.close()
34 c.close()
35
35
36 def add_engines(n=1, profile='iptest'):
36 def add_engines(n=1, profile='iptest'):
37 rc = client.Client(profile=profile)
37 rc = Client(profile=profile)
38 base = len(rc)
38 base = len(rc)
39 eps = []
39 eps = []
40 for i in range(n):
40 for i in range(n):
@@ -10,8 +10,6 b''
10 import sys
10 import sys
11 import tempfile
11 import tempfile
12 import time
12 import time
13 from signal import SIGINT
14 from multiprocessing import Process
15
13
16 from nose import SkipTest
14 from nose import SkipTest
17
15
@@ -21,9 +19,7 b' from zmq.tests import BaseZMQTestCase'
21 from IPython.external.decorator import decorator
19 from IPython.external.decorator import decorator
22
20
23 from IPython.parallel import error
21 from IPython.parallel import error
24 from IPython.parallel.client import Client
22 from IPython.parallel import Client
25 from IPython.parallel.ipcluster import launch_process
26 from IPython.parallel.entry_point import select_random_ports
27 from IPython.parallel.tests import processes,add_engines
23 from IPython.parallel.tests import processes,add_engines
28
24
29 # simple tasks for use in apply tests
25 # simple tasks for use in apply tests
@@ -16,10 +16,10 b' from tempfile import mktemp'
16
16
17 import zmq
17 import zmq
18
18
19 from IPython.parallel import client as clientmod
19 from IPython.parallel.client import client as clientmod
20 from IPython.parallel import error
20 from IPython.parallel import error
21 from IPython.parallel.asyncresult import AsyncResult, AsyncHubResult
21 from IPython.parallel import AsyncResult, AsyncHubResult
22 from IPython.parallel.view import LoadBalancedView, DirectView
22 from IPython.parallel import LoadBalancedView, DirectView
23
23
24 from clienttest import ClusterTestCase, segfault, wait, add_engines
24 from clienttest import ClusterTestCase, segfault, wait, add_engines
25
25
@@ -18,7 +18,7 b' import os'
18
18
19 from IPython.utils.pickleutil import can, uncan
19 from IPython.utils.pickleutil import can, uncan
20
20
21 from IPython.parallel import dependency as dmod
21 import IPython.parallel as pmod
22 from IPython.parallel.util import interactive
22 from IPython.parallel.util import interactive
23
23
24 from IPython.parallel.tests import add_engines
24 from IPython.parallel.tests import add_engines
@@ -27,7 +27,7 b' from .clienttest import ClusterTestCase'
27 def setup():
27 def setup():
28 add_engines(1)
28 add_engines(1)
29
29
30 @dmod.require('time')
30 @pmod.require('time')
31 def wait(n):
31 def wait(n):
32 time.sleep(n)
32 time.sleep(n)
33 return n
33 return n
@@ -65,7 +65,7 b' class DependencyTest(ClusterTestCase):'
65 def test_require_imports(self):
65 def test_require_imports(self):
66 """test that @require imports names"""
66 """test that @require imports names"""
67 @self.cancan
67 @self.cancan
68 @dmod.require('urllib')
68 @pmod.require('urllib')
69 @interactive
69 @interactive
70 def encode(dikt):
70 def encode(dikt):
71 return urllib.urlencode(dikt)
71 return urllib.urlencode(dikt)
@@ -73,13 +73,13 b' class DependencyTest(ClusterTestCase):'
73 self.assertEquals(encode(dict(a=5)), 'a=5')
73 self.assertEquals(encode(dict(a=5)), 'a=5')
74
74
75 def test_success_only(self):
75 def test_success_only(self):
76 dep = dmod.Dependency(mixed, success=True, failure=False)
76 dep = pmod.Dependency(mixed, success=True, failure=False)
77 self.assertUnmet(dep)
77 self.assertUnmet(dep)
78 self.assertUnreachable(dep)
78 self.assertUnreachable(dep)
79 dep.all=False
79 dep.all=False
80 self.assertMet(dep)
80 self.assertMet(dep)
81 self.assertReachable(dep)
81 self.assertReachable(dep)
82 dep = dmod.Dependency(completed, success=True, failure=False)
82 dep = pmod.Dependency(completed, success=True, failure=False)
83 self.assertMet(dep)
83 self.assertMet(dep)
84 self.assertReachable(dep)
84 self.assertReachable(dep)
85 dep.all=False
85 dep.all=False
@@ -87,13 +87,13 b' class DependencyTest(ClusterTestCase):'
87 self.assertReachable(dep)
87 self.assertReachable(dep)
88
88
89 def test_failure_only(self):
89 def test_failure_only(self):
90 dep = dmod.Dependency(mixed, success=False, failure=True)
90 dep = pmod.Dependency(mixed, success=False, failure=True)
91 self.assertUnmet(dep)
91 self.assertUnmet(dep)
92 self.assertUnreachable(dep)
92 self.assertUnreachable(dep)
93 dep.all=False
93 dep.all=False
94 self.assertMet(dep)
94 self.assertMet(dep)
95 self.assertReachable(dep)
95 self.assertReachable(dep)
96 dep = dmod.Dependency(completed, success=False, failure=True)
96 dep = pmod.Dependency(completed, success=False, failure=True)
97 self.assertUnmet(dep)
97 self.assertUnmet(dep)
98 self.assertUnreachable(dep)
98 self.assertUnreachable(dep)
99 dep.all=False
99 dep.all=False
@@ -17,8 +17,8 b' import zmq'
17
17
18 from IPython import parallel as pmod
18 from IPython import parallel as pmod
19 from IPython.parallel import error
19 from IPython.parallel import error
20 from IPython.parallel.asyncresult import AsyncResult, AsyncHubResult, AsyncMapResult
20 from IPython.parallel import AsyncResult, AsyncHubResult, AsyncMapResult
21 from IPython.parallel.view import LoadBalancedView, DirectView
21 from IPython.parallel import LoadBalancedView, DirectView
22 from IPython.parallel.util import interactive
22 from IPython.parallel.util import interactive
23
23
24 from IPython.parallel.tests import add_engines
24 from IPython.parallel.tests import add_engines
@@ -10,8 +10,18 b''
10 # Imports
10 # Imports
11 #-----------------------------------------------------------------------------
11 #-----------------------------------------------------------------------------
12
12
13 # Standard library imports.
14 import logging
15 import os
13 import re
16 import re
17 import stat
14 import socket
18 import socket
19 import sys
20 from signal import signal, SIGINT, SIGABRT, SIGTERM
21 try:
22 from signal import SIGKILL
23 except ImportError:
24 SIGKILL=None
15
25
16 try:
26 try:
17 import cPickle
27 import cPickle
@@ -20,10 +30,16 b' except:'
20 cPickle = None
30 cPickle = None
21 import pickle
31 import pickle
22
32
33 # System library imports
34 import zmq
35 from zmq.log import handlers
23
36
37 # IPython imports
24 from IPython.utils.pickleutil import can, uncan, canSequence, uncanSequence
38 from IPython.utils.pickleutil import can, uncan, canSequence, uncanSequence
25 from IPython.utils.newserialized import serialize, unserialize
39 from IPython.utils.newserialized import serialize, unserialize
40 from IPython.zmq.log import EnginePUBHandler
26
41
42 # globals
27 ISO8601="%Y-%m-%dT%H:%M:%S.%f"
43 ISO8601="%Y-%m-%dT%H:%M:%S.%f"
28
44
29 #-----------------------------------------------------------------------------
45 #-----------------------------------------------------------------------------
@@ -352,3 +368,95 b' def _execute(code):'
352 """helper method for implementing `client.execute` via `client.apply`"""
368 """helper method for implementing `client.execute` via `client.apply`"""
353 exec code in globals()
369 exec code in globals()
354
370
371 #--------------------------------------------------------------------------
372 # extra process management utilities
373 #--------------------------------------------------------------------------
374
375 _random_ports = set()
376
377 def select_random_ports(n):
378 """Selects and return n random ports that are available."""
379 ports = []
380 for i in xrange(n):
381 sock = socket.socket()
382 sock.bind(('', 0))
383 while sock.getsockname()[1] in _random_ports:
384 sock.close()
385 sock = socket.socket()
386 sock.bind(('', 0))
387 ports.append(sock)
388 for i, sock in enumerate(ports):
389 port = sock.getsockname()[1]
390 sock.close()
391 ports[i] = port
392 _random_ports.add(port)
393 return ports
394
395 def signal_children(children):
396 """Relay interupt/term signals to children, for more solid process cleanup."""
397 def terminate_children(sig, frame):
398 logging.critical("Got signal %i, terminating children..."%sig)
399 for child in children:
400 child.terminate()
401
402 sys.exit(sig != SIGINT)
403 # sys.exit(sig)
404 for sig in (SIGINT, SIGABRT, SIGTERM):
405 signal(sig, terminate_children)
406
407 def generate_exec_key(keyfile):
408 import uuid
409 newkey = str(uuid.uuid4())
410 with open(keyfile, 'w') as f:
411 # f.write('ipython-key ')
412 f.write(newkey+'\n')
413 # set user-only RW permissions (0600)
414 # this will have no effect on Windows
415 os.chmod(keyfile, stat.S_IRUSR|stat.S_IWUSR)
416
417
418 def integer_loglevel(loglevel):
419 try:
420 loglevel = int(loglevel)
421 except ValueError:
422 if isinstance(loglevel, str):
423 loglevel = getattr(logging, loglevel)
424 return loglevel
425
426 def connect_logger(logname, context, iface, root="ip", loglevel=logging.DEBUG):
427 logger = logging.getLogger(logname)
428 if any([isinstance(h, handlers.PUBHandler) for h in logger.handlers]):
429 # don't add a second PUBHandler
430 return
431 loglevel = integer_loglevel(loglevel)
432 lsock = context.socket(zmq.PUB)
433 lsock.connect(iface)
434 handler = handlers.PUBHandler(lsock)
435 handler.setLevel(loglevel)
436 handler.root_topic = root
437 logger.addHandler(handler)
438 logger.setLevel(loglevel)
439
440 def connect_engine_logger(context, iface, engine, loglevel=logging.DEBUG):
441 logger = logging.getLogger()
442 if any([isinstance(h, handlers.PUBHandler) for h in logger.handlers]):
443 # don't add a second PUBHandler
444 return
445 loglevel = integer_loglevel(loglevel)
446 lsock = context.socket(zmq.PUB)
447 lsock.connect(iface)
448 handler = EnginePUBHandler(engine, lsock)
449 handler.setLevel(loglevel)
450 logger.addHandler(handler)
451 logger.setLevel(loglevel)
452
453 def local_logger(logname, loglevel=logging.DEBUG):
454 loglevel = integer_loglevel(loglevel)
455 logger = logging.getLogger(logname)
456 if any([isinstance(h, logging.StreamHandler) for h in logger.handlers]):
457 # don't add a second StreamHandler
458 return
459 handler = logging.StreamHandler()
460 handler.setLevel(loglevel)
461 logger.addHandler(handler)
462 logger.setLevel(loglevel)
@@ -282,7 +282,10 b' def make_runners():'
282 # Packages to be tested via nose, that only depend on the stdlib
282 # Packages to be tested via nose, that only depend on the stdlib
283 nose_pkg_names = ['config', 'core', 'extensions', 'frontend', 'lib',
283 nose_pkg_names = ['config', 'core', 'extensions', 'frontend', 'lib',
284 'scripts', 'testing', 'utils' ]
284 'scripts', 'testing', 'utils' ]
285
285
286 if have['zmq']:
287 nose_pkg_names.append('parallel')
288
286 # For debugging this code, only load quick stuff
289 # For debugging this code, only load quick stuff
287 #nose_pkg_names = ['core', 'extensions'] # dbg
290 #nose_pkg_names = ['core', 'extensions'] # dbg
288
291
@@ -93,7 +93,7 b' class CannedFunction(CannedObject):'
93
93
94 def can(obj):
94 def can(obj):
95 # import here to prevent module-level circular imports
95 # import here to prevent module-level circular imports
96 from IPython.parallel.dependency import dependent
96 from IPython.parallel import dependent
97 if isinstance(obj, dependent):
97 if isinstance(obj, dependent):
98 keys = ('f','df')
98 keys = ('f','df')
99 return CannedObject(obj, keys=keys)
99 return CannedObject(obj, keys=keys)
@@ -241,7 +241,7 b' Views'
241 =====
241 =====
242
242
243 The principal extension of the :class:`~parallel.Client` is the
243 The principal extension of the :class:`~parallel.Client` is the
244 :class:`~parallel.view.View` class. The client
244 :class:`~parallel.View` class. The client
245
245
246
246
247 DirectView
247 DirectView
@@ -128,7 +128,7 b' IPython client and views'
128 ------------------------
128 ------------------------
129
129
130 There is one primary object, the :class:`~.parallel.Client`, for connecting to a cluster.
130 There is one primary object, the :class:`~.parallel.Client`, for connecting to a cluster.
131 For each execution model, there is a corresponding :class:`~.parallel.view.View`. These views
131 For each execution model, there is a corresponding :class:`~.parallel.View`. These views
132 allow users to interact with a set of engines through the interface. Here are the two default
132 allow users to interact with a set of engines through the interface. Here are the two default
133 views:
133 views:
134
134
@@ -431,7 +431,7 b' on the engines given by the :attr:`targets` attribute:'
431 Type %autopx to disable
431 Type %autopx to disable
432
432
433 In [32]: max_evals = []
433 In [32]: max_evals = []
434 <IPython.parallel.asyncresult.AsyncResult object at 0x17b8a70>
434 <IPython.parallel.AsyncResult object at 0x17b8a70>
435
435
436 In [33]: for i in range(100):
436 In [33]: for i in range(100):
437 ....: a = numpy.random.rand(10,10)
437 ....: a = numpy.random.rand(10,10)
@@ -440,7 +440,7 b' on the engines given by the :attr:`targets` attribute:'
440 ....: max_evals.append(evals[0].real)
440 ....: max_evals.append(evals[0].real)
441 ....:
441 ....:
442 ....:
442 ....:
443 <IPython.parallel.asyncresult.AsyncResult object at 0x17af8f0>
443 <IPython.parallel.AsyncResult object at 0x17af8f0>
444
444
445 In [34]: %autopx
445 In [34]: %autopx
446 Auto Parallel Disabled
446 Auto Parallel Disabled
@@ -140,7 +140,7 b' There, instruct ipcluster to use the MPIExec launchers by adding the lines:'
140
140
141 .. sourcecode:: python
141 .. sourcecode:: python
142
142
143 c.Global.engine_launcher = 'IPython.parallel.launcher.MPIExecEngineSetLauncher'
143 c.Global.engine_launcher = 'IPython.parallel.apps.launcher.MPIExecEngineSetLauncher'
144
144
145 If the default MPI configuration is correct, then you can now start your cluster, with::
145 If the default MPI configuration is correct, then you can now start your cluster, with::
146
146
@@ -155,7 +155,7 b' If you have a reason to also start the Controller with mpi, you can specify:'
155
155
156 .. sourcecode:: python
156 .. sourcecode:: python
157
157
158 c.Global.controller_launcher = 'IPython.parallel.launcher.MPIExecControllerLauncher'
158 c.Global.controller_launcher = 'IPython.parallel.apps.launcher.MPIExecControllerLauncher'
159
159
160 .. note::
160 .. note::
161
161
@@ -196,8 +196,8 b' and engines:'
196
196
197 .. sourcecode:: python
197 .. sourcecode:: python
198
198
199 c.Global.controller_launcher = 'IPython.parallel.launcher.PBSControllerLauncher'
199 c.Global.controller_launcher = 'IPython.parallel.apps.launcher.PBSControllerLauncher'
200 c.Global.engine_launcher = 'IPython.parallel.launcher.PBSEngineSetLauncher'
200 c.Global.engine_launcher = 'IPython.parallel.apps.launcher.PBSEngineSetLauncher'
201
201
202 IPython does provide simple default batch templates for PBS and SGE, but you may need
202 IPython does provide simple default batch templates for PBS and SGE, but you may need
203 to specify your own. Here is a sample PBS script template:
203 to specify your own. Here is a sample PBS script template:
@@ -318,9 +318,9 b' To use this mode, select the SSH launchers in :file:`ipcluster_config.py`:'
318
318
319 .. sourcecode:: python
319 .. sourcecode:: python
320
320
321 c.Global.engine_launcher = 'IPython.parallel.launcher.SSHEngineSetLauncher'
321 c.Global.engine_launcher = 'IPython.parallel.apps.launcher.SSHEngineSetLauncher'
322 # and if the Controller is also to be remote:
322 # and if the Controller is also to be remote:
323 c.Global.controller_launcher = 'IPython.parallel.launcher.SSHControllerLauncher'
323 c.Global.controller_launcher = 'IPython.parallel.apps.launcher.SSHControllerLauncher'
324
324
325
325
326 The controller's remote location and configuration can be specified:
326 The controller's remote location and configuration can be specified:
@@ -32,7 +32,7 b' our :ref:`introduction <ip1par>` to using IPython for parallel computing.'
32 Creating a ``Client`` instance
32 Creating a ``Client`` instance
33 ==============================
33 ==============================
34
34
35 The first step is to import the IPython :mod:`IPython.parallel.client`
35 The first step is to import the IPython :mod:`IPython.parallel`
36 module and then create a :class:`.Client` instance, and we will also be using
36 module and then create a :class:`.Client` instance, and we will also be using
37 a :class:`LoadBalancedView`, here called `lview`:
37 a :class:`LoadBalancedView`, here called `lview`:
38
38
@@ -145,7 +145,7 b' There are two decorators and a class used for functional dependencies:'
145
145
146 .. sourcecode:: ipython
146 .. sourcecode:: ipython
147
147
148 In [9]: from IPython.parallel.dependency import depend, require, dependent
148 In [9]: from IPython.parallel import depend, require, dependent
149
149
150 @require
150 @require
151 ********
151 ********
@@ -398,10 +398,10 b' The :class:`LoadBalancedView` has many more powerful features that allow quite a'
398 of flexibility in how tasks are defined and run. The next places to look are
398 of flexibility in how tasks are defined and run. The next places to look are
399 in the following classes:
399 in the following classes:
400
400
401 * :class:`IPython.parallel.view.LoadBalancedView`
401 * :class:`~IPython.parallel.client.view.LoadBalancedView`
402 * :class:`IPython.parallel.asyncresult.AsyncResult`
402 * :class:`~IPython.parallel.client.asyncresult.AsyncResult`
403 * :meth:`IPython.parallel.view.LoadBalancedView.apply`
403 * :meth:`~IPython.parallel.client.view.LoadBalancedView.apply`
404 * :mod:`IPython.parallel.dependency`
404 * :mod:`~IPython.parallel.controller.dependency`
405
405
406 The following is an overview of how to use these classes together:
406 The following is an overview of how to use these classes together:
407
407
@@ -1,8 +1,8 b''
1 .. _parallel_transition:
1 .. _parallel_transition:
2
2
3 ============================================================
3 =====================================================
4 Transitioning from IPython.kernel to IPython.zmq.newparallel
4 Transitioning from IPython.kernel to IPython.parallel
5 ============================================================
5 =====================================================
6
6
7
7
8 We have rewritten our parallel computing tools to use 0MQ_ and Tornado_. The redesign
8 We have rewritten our parallel computing tools to use 0MQ_ and Tornado_. The redesign
@@ -39,8 +39,8 b' Creating a Client'
39 Creating a client with default settings has not changed much, though the extended options have.
39 Creating a client with default settings has not changed much, though the extended options have.
40 One significant change is that there are no longer multiple Client classes to represent the
40 One significant change is that there are no longer multiple Client classes to represent the
41 various execution models. There is just one low-level Client object for connecting to the
41 various execution models. There is just one low-level Client object for connecting to the
42 cluster, and View objects are created from that Client that provide the different interfaces
42 cluster, and View objects are created from that Client that provide the different interfaces for
43 for execution.
43 execution.
44
44
45
45
46 To create a new client, and set up the default direct and load-balanced objects:
46 To create a new client, and set up the default direct and load-balanced objects:
@@ -124,8 +124,6 b' argument.'
124 DirectView.
124 DirectView.
125
125
126
126
127
128
129 The other major difference is the use of :meth:`apply`. When remote work is simply functions,
127 The other major difference is the use of :meth:`apply`. When remote work is simply functions,
130 the natural return value is the actual Python objects. It is no longer the recommended pattern
128 the natural return value is the actual Python objects. It is no longer the recommended pattern
131 to use stdout as your results, due to stream decoupling and the asynchronous nature of how the
129 to use stdout as your results, due to stream decoupling and the asynchronous nature of how the
@@ -203,6 +201,25 b' the engine beyond the duration of the task.'
203 LoadBalancedView.
201 LoadBalancedView.
204
202
205
203
204 PendingResults to AsyncResults
205 ------------------------------
206
207 With the departure from Twisted, we no longer have the :class:`Deferred` class for representing
208 unfinished results. For this, we have an AsyncResult object, based on the object of the same
209 name in the built-in :mod:`multiprocessing.pool` module. Our version provides a superset of that
210 interface.
211
212 However, unlike in IPython.kernel, we do not have PendingDeferred, PendingResult, or TaskResult
213 objects. Simply this one object, the AsyncResult. Every asynchronous (`block=False`) call
214 returns one.
215
216 The basic methods of an AsyncResult are:
217
218 .. sourcecode:: python
219
220 AsyncResult.wait([timeout]): # wait for the result to arrive
221 AsyncResult.get([timeout]): # wait for the result to arrive, and then return it
222 AsyncResult.metadata: # dict of extra information about execution.
206
223
207 There are still some things that behave the same as IPython.kernel:
224 There are still some things that behave the same as IPython.kernel:
208
225
@@ -218,4 +235,11 b' There are still some things that behave the same as IPython.kernel:'
218 In [6]: ar.r
235 In [6]: ar.r
219 Out[6]: [5, 5]
236 Out[6]: [5, 5]
220
237
238 The ``.r`` or ``.result`` property simply calls :meth:`get`, waiting for and returning the
239 result.
240
241 .. seealso::
242
243 :ref:`AsyncResult details <AsyncResult>`
244
221
245
@@ -233,9 +233,9 b' will need to edit the following attributes in the file'
233 # Set these at the top of the file to tell ipcluster to use the
233 # Set these at the top of the file to tell ipcluster to use the
234 # Windows HPC job scheduler.
234 # Windows HPC job scheduler.
235 c.Global.controller_launcher = \
235 c.Global.controller_launcher = \
236 'IPython.parallel.launcher.WindowsHPCControllerLauncher'
236 'IPython.parallel.apps.launcher.WindowsHPCControllerLauncher'
237 c.Global.engine_launcher = \
237 c.Global.engine_launcher = \
238 'IPython.parallel.launcher.WindowsHPCEngineSetLauncher'
238 'IPython.parallel.apps.launcher.WindowsHPCEngineSetLauncher'
239
239
240 # Set these to the host name of the scheduler (head node) of your cluster.
240 # Set these to the host name of the scheduler (head node) of your cluster.
241 c.WindowsHPCControllerLauncher.scheduler = 'HEADNODE'
241 c.WindowsHPCControllerLauncher.scheduler = 'HEADNODE'
@@ -215,19 +215,19 b" if 'setuptools' in sys.modules:"
215 'ipython = IPython.frontend.terminal.ipapp:launch_new_instance',
215 'ipython = IPython.frontend.terminal.ipapp:launch_new_instance',
216 'ipython-qtconsole = IPython.frontend.qt.console.ipythonqt:main',
216 'ipython-qtconsole = IPython.frontend.qt.console.ipythonqt:main',
217 'pycolor = IPython.utils.PyColorize:main',
217 'pycolor = IPython.utils.PyColorize:main',
218 'ipcontroller = IPython.parallel.ipcontrollerapp:launch_new_instance',
218 'ipcontroller = IPython.parallel.apps.ipcontrollerapp:launch_new_instance',
219 'ipengine = IPython.parallel.ipengineapp:launch_new_instance',
219 'ipengine = IPython.parallel.apps.ipengineapp:launch_new_instance',
220 'iplogger = IPython.parallel.iploggerapp:launch_new_instance',
220 'iplogger = IPython.parallel.apps.iploggerapp:launch_new_instance',
221 'ipcluster = IPython.parallel.ipclusterapp:launch_new_instance',
221 'ipcluster = IPython.parallel.apps.ipclusterapp:launch_new_instance',
222 'iptest = IPython.testing.iptest:main',
222 'iptest = IPython.testing.iptest:main',
223 'irunner = IPython.lib.irunner:main'
223 'irunner = IPython.lib.irunner:main'
224 ]
224 ]
225 }
225 }
226 setup_args['extras_require'] = dict(
226 setup_args['extras_require'] = dict(
227 parallel = 'pyzmq>=2.1.4',
227 zmq = 'pyzmq>=2.0.10.1',
228 zmq = 'pyzmq>=2.0.10.1',
228 doc='Sphinx>=0.3',
229 doc='Sphinx>=0.3',
229 test='nose>=0.10.1',
230 test='nose>=0.10.1',
230 security='pyOpenSSL>=0.6'
231 )
231 )
232 else:
232 else:
233 # If we are running without setuptools, call this function which will
233 # If we are running without setuptools, call this function which will
@@ -127,7 +127,8 b' def find_packages():'
127 add_package(packages, 'frontend.qt.console', tests=True)
127 add_package(packages, 'frontend.qt.console', tests=True)
128 add_package(packages, 'frontend.terminal', tests=True)
128 add_package(packages, 'frontend.terminal', tests=True)
129 add_package(packages, 'lib', tests=True)
129 add_package(packages, 'lib', tests=True)
130 add_package(packages, 'parallel', tests=True)
130 add_package(packages, 'parallel', tests=True, scripts=True,
131 others=['apps','engine','client','controller'])
131 add_package(packages, 'quarantine', tests=True)
132 add_package(packages, 'quarantine', tests=True)
132 add_package(packages, 'scripts')
133 add_package(packages, 'scripts')
133 add_package(packages, 'testing', tests=True)
134 add_package(packages, 'testing', tests=True)
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now