diff --git a/IPython/config/default/ipclusterz_config.py b/IPython/config/default/ipclusterz_config.py index 855bfbb..e76c886 100644 --- a/IPython/config/default/ipclusterz_config.py +++ b/IPython/config/default/ipclusterz_config.py @@ -23,8 +23,8 @@ c = get_config() # - PBSControllerLauncher # - SGEControllerLauncher # - WindowsHPCControllerLauncher -# c.Global.controller_launcher = 'IPython.zmq.parallel.launcher.LocalControllerLauncher' -c.Global.controller_launcher = 'IPython.zmq.parallel.launcher.PBSControllerLauncher' +# c.Global.controller_launcher = 'IPython.parallel.launcher.LocalControllerLauncher' +c.Global.controller_launcher = 'IPython.parallel.launcher.PBSControllerLauncher' # Options are: # - LocalEngineSetLauncher @@ -32,7 +32,7 @@ c.Global.controller_launcher = 'IPython.zmq.parallel.launcher.PBSControllerLaunc # - PBSEngineSetLauncher # - SGEEngineSetLauncher # - WindowsHPCEngineSetLauncher -# c.Global.engine_launcher = 'IPython.zmq.parallel.launcher.LocalEngineSetLauncher' +# c.Global.engine_launcher = 'IPython.parallel.launcher.LocalEngineSetLauncher' #----------------------------------------------------------------------------- # Global configuration diff --git a/IPython/config/default/ipcontrollerz_config.py b/IPython/config/default/ipcontrollerz_config.py index 3cf437b..adf2878 100644 --- a/IPython/config/default/ipcontrollerz_config.py +++ b/IPython/config/default/ipcontrollerz_config.py @@ -89,7 +89,7 @@ c = get_config() # Which class to use for the db backend. Currently supported are DictDB (the # default), and MongoDB. Uncomment this line to enable MongoDB, which will # slow-down the Hub's responsiveness, but also reduce its memory footprint. -# c.HubFactory.db_class = 'IPython.zmq.parallel.mongodb.MongoDB' +# c.HubFactory.db_class = 'IPython.parallel.mongodb.MongoDB' # The heartbeat ping frequency. This is the frequency (in ms) at which the # Hub pings engines for heartbeats. This determines how quickly the Hub @@ -144,11 +144,11 @@ c = get_config() # ----- in-memory configuration -------- # this line restores the default behavior: in-memory storage of all results. -# c.HubFactory.db_class = 'IPython.zmq.parallel.dictdb.DictDB' +# c.HubFactory.db_class = 'IPython.parallel.dictdb.DictDB' # ----- sqlite configuration -------- # use this line to activate sqlite: -# c.HubFactory.db_class = 'IPython.zmq.parallel.sqlitedb.SQLiteDB' +# c.HubFactory.db_class = 'IPython.parallel.sqlitedb.SQLiteDB' # You can specify the name of the db-file. By default, this will be located # in the active cluster_dir, e.g. ~/.ipython/clusterz_default/tasks.db @@ -165,7 +165,7 @@ c = get_config() # ----- mongodb configuration -------- # use this line to activate mongodb: -# c.HubFactory.db_class = 'IPython.zmq.parallel.mongodb.MongoDB' +# c.HubFactory.db_class = 'IPython.parallel.mongodb.MongoDB' # You can specify the args and kwargs pymongo will use when creating the Connection. # For more information on what these options might be, see pymongo documentation. diff --git a/IPython/external/ssh/tunnel.py b/IPython/external/ssh/tunnel.py index 868b0a5..4fda9e6 100644 --- a/IPython/external/ssh/tunnel.py +++ b/IPython/external/ssh/tunnel.py @@ -34,7 +34,7 @@ try: except ImportError: pexpect = None -from IPython.zmq.parallel.entry_point import select_random_ports +from IPython.parallel.entry_point import select_random_ports #----------------------------------------------------------------------------- # Code diff --git a/IPython/zmq/parallel/__init__.py b/IPython/parallel/__init__.py similarity index 87% rename from IPython/zmq/parallel/__init__.py rename to IPython/parallel/__init__.py index 2dbf93d..c51b9d0 100644 --- a/IPython/zmq/parallel/__init__.py +++ b/IPython/parallel/__init__.py @@ -13,12 +13,13 @@ import zmq if zmq.__version__ < '2.1.3': - raise ImportError("IPython.zmq.parallel requires pyzmq/0MQ >= 2.1.3, you appear to have %s"%zmq.__version__) + raise ImportError("IPython.parallel requires pyzmq/0MQ >= 2.1.3, you appear to have %s"%zmq.__version__) from .asyncresult import * from .client import Client from .dependency import * from .remotefunction import * from .view import * +from IPython.utils.pickleutil import Reference diff --git a/IPython/zmq/parallel/asyncresult.py b/IPython/parallel/asyncresult.py similarity index 100% rename from IPython/zmq/parallel/asyncresult.py rename to IPython/parallel/asyncresult.py diff --git a/IPython/zmq/parallel/client.py b/IPython/parallel/client.py similarity index 96% rename from IPython/zmq/parallel/client.py rename to IPython/parallel/client.py index 6bddd96..714e744 100644 --- a/IPython/zmq/parallel/client.py +++ b/IPython/parallel/client.py @@ -24,7 +24,6 @@ import zmq # from zmq.eventloop import ioloop, zmqstream from IPython.utils.path import get_ipython_dir -from IPython.utils.pickleutil import Reference from IPython.utils.traitlets import (HasTraits, Int, Instance, CUnicode, Dict, List, Bool, Str, Set) from IPython.external.decorator import decorator @@ -33,10 +32,8 @@ from IPython.external.ssh import tunnel from . import error from . import util from . import streamsession as ss -from .asyncresult import AsyncResult, AsyncMapResult, AsyncHubResult +from .asyncresult import AsyncResult, AsyncHubResult from .clusterdir import ClusterDir, ClusterDirError -from .dependency import Dependency, depend, require, dependent -from .remotefunction import remote, parallel, ParallelFunction, RemoteFunction from .view import DirectView, LoadBalancedView #-------------------------------------------------------------------------- @@ -985,7 +982,7 @@ class Client(HasTraits): targets: list,slice,int,etc. [default: use all engines] The subset of engines across which to load-balance """ - if targets is None: + if targets is not None: targets = self._build_targets(targets)[1] return LoadBalancedView(client=self, socket=self._task_socket, targets=targets) @@ -1278,16 +1275,4 @@ class Client(HasTraits): raise self._unwrap_exception(content) -__all__ = [ 'Client', - 'depend', - 'require', - 'remote', - 'parallel', - 'RemoteFunction', - 'ParallelFunction', - 'DirectView', - 'LoadBalancedView', - 'AsyncResult', - 'AsyncMapResult', - 'Reference' - ] +__all__ = [ 'Client' ] diff --git a/IPython/zmq/parallel/clusterdir.py b/IPython/parallel/clusterdir.py similarity index 100% rename from IPython/zmq/parallel/clusterdir.py rename to IPython/parallel/clusterdir.py diff --git a/IPython/zmq/parallel/controller.py b/IPython/parallel/controller.py similarity index 100% rename from IPython/zmq/parallel/controller.py rename to IPython/parallel/controller.py diff --git a/IPython/zmq/parallel/dependency.py b/IPython/parallel/dependency.py similarity index 99% rename from IPython/zmq/parallel/dependency.py rename to IPython/parallel/dependency.py index bbd60f3..b18ff1a 100644 --- a/IPython/zmq/parallel/dependency.py +++ b/IPython/parallel/dependency.py @@ -67,7 +67,7 @@ class dependent(object): @interactive def _require(*names): """Helper for @require decorator.""" - from IPython.zmq.parallel.error import UnmetDependency + from IPython.parallel.error import UnmetDependency user_ns = globals() for name in names: if name in user_ns: diff --git a/IPython/zmq/parallel/dictdb.py b/IPython/parallel/dictdb.py similarity index 100% rename from IPython/zmq/parallel/dictdb.py rename to IPython/parallel/dictdb.py diff --git a/IPython/zmq/parallel/engine.py b/IPython/parallel/engine.py similarity index 100% rename from IPython/zmq/parallel/engine.py rename to IPython/parallel/engine.py diff --git a/IPython/zmq/parallel/entry_point.py b/IPython/parallel/entry_point.py similarity index 100% rename from IPython/zmq/parallel/entry_point.py rename to IPython/parallel/entry_point.py diff --git a/IPython/zmq/parallel/error.py b/IPython/parallel/error.py similarity index 100% rename from IPython/zmq/parallel/error.py rename to IPython/parallel/error.py diff --git a/IPython/zmq/parallel/factory.py b/IPython/parallel/factory.py similarity index 94% rename from IPython/zmq/parallel/factory.py rename to IPython/parallel/factory.py index 9509fb3..7c9da9a 100644 --- a/IPython/zmq/parallel/factory.py +++ b/IPython/parallel/factory.py @@ -22,8 +22,8 @@ from IPython.config.configurable import Configurable from IPython.utils.importstring import import_item from IPython.utils.traitlets import Str,Int,Instance, CUnicode, CStr -import IPython.zmq.parallel.streamsession as ss -from IPython.zmq.parallel.entry_point import select_random_ports +import IPython.parallel.streamsession as ss +from IPython.parallel.entry_point import select_random_ports #----------------------------------------------------------------------------- # Classes @@ -37,7 +37,7 @@ class LoggingFactory(Configurable): class SessionFactory(LoggingFactory): - """The Base factory from which every factory in IPython.zmq.parallel inherits""" + """The Base factory from which every factory in IPython.parallel inherits""" packer = Str('',config=True) unpacker = Str('',config=True) @@ -48,7 +48,7 @@ class SessionFactory(LoggingFactory): exec_key = CUnicode('',config=True) # not configurable: context = Instance('zmq.Context', (), {}) - session = Instance('IPython.zmq.parallel.streamsession.StreamSession') + session = Instance('IPython.parallel.streamsession.StreamSession') loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False) def _loop_default(self): return IOLoop.instance() diff --git a/IPython/zmq/parallel/heartmonitor.py b/IPython/parallel/heartmonitor.py similarity index 100% rename from IPython/zmq/parallel/heartmonitor.py rename to IPython/parallel/heartmonitor.py diff --git a/IPython/zmq/parallel/hub.py b/IPython/parallel/hub.py similarity index 98% rename from IPython/zmq/parallel/hub.py rename to IPython/parallel/hub.py index 27070e3..2610002 100755 --- a/IPython/zmq/parallel/hub.py +++ b/IPython/parallel/hub.py @@ -136,11 +136,11 @@ class HubFactory(RegistrationFactory): monitor_url = CStr('') - db_class = CStr('IPython.zmq.parallel.dictdb.DictDB', config=True) + db_class = CStr('IPython.parallel.dictdb.DictDB', config=True) # not configurable - db = Instance('IPython.zmq.parallel.dictdb.BaseDB') - heartmonitor = Instance('IPython.zmq.parallel.heartmonitor.HeartMonitor') + db = Instance('IPython.parallel.dictdb.BaseDB') + heartmonitor = Instance('IPython.parallel.heartmonitor.HeartMonitor') subconstructors = List() _constructed = Bool(False) diff --git a/IPython/zmq/parallel/ipcluster.py b/IPython/parallel/ipcluster.py similarity index 97% rename from IPython/zmq/parallel/ipcluster.py rename to IPython/parallel/ipcluster.py index 29dc2e5..1da18c8 100644 --- a/IPython/zmq/parallel/ipcluster.py +++ b/IPython/parallel/ipcluster.py @@ -56,7 +56,7 @@ def strip_args(flags, args=sys.argv[1:]): def launch_process(mod, args): """Launch a controller or engine in a subprocess.""" - code = "from IPython.zmq.parallel.%s import launch_new_instance;launch_new_instance()"%mod + code = "from IPython.parallel.%s import launch_new_instance;launch_new_instance()"%mod arguments = [ sys.executable, '-c', code ] + args blackholew = file(os.devnull, 'w') blackholer = file(os.devnull, 'r') diff --git a/IPython/zmq/parallel/ipclusterapp.py b/IPython/parallel/ipclusterapp.py similarity index 98% rename from IPython/zmq/parallel/ipclusterapp.py rename to IPython/parallel/ipclusterapp.py index 54571da..c57e788 100755 --- a/IPython/zmq/parallel/ipclusterapp.py +++ b/IPython/parallel/ipclusterapp.py @@ -26,7 +26,7 @@ from zmq.eventloop import ioloop from IPython.external.argparse import ArgumentParser, SUPPRESS from IPython.utils.importstring import import_item -from IPython.zmq.parallel.clusterdir import ( +from IPython.parallel.clusterdir import ( ApplicationWithClusterDir, ClusterDirConfigLoader, ClusterDirError, PIDFileError ) @@ -260,9 +260,9 @@ class IPClusterApp(ApplicationWithClusterDir): def create_default_config(self): super(IPClusterApp, self).create_default_config() self.default_config.Global.controller_launcher = \ - 'IPython.zmq.parallel.launcher.LocalControllerLauncher' + 'IPython.parallel.launcher.LocalControllerLauncher' self.default_config.Global.engine_launcher = \ - 'IPython.zmq.parallel.launcher.LocalEngineSetLauncher' + 'IPython.parallel.launcher.LocalEngineSetLauncher' self.default_config.Global.n = 2 self.default_config.Global.delay = 2 self.default_config.Global.reset_config = False diff --git a/IPython/zmq/parallel/ipcontrollerapp.py b/IPython/parallel/ipcontrollerapp.py similarity index 97% rename from IPython/zmq/parallel/ipcontrollerapp.py rename to IPython/parallel/ipcontrollerapp.py index 897c838..705396a 100755 --- a/IPython/zmq/parallel/ipcontrollerapp.py +++ b/IPython/parallel/ipcontrollerapp.py @@ -30,13 +30,13 @@ from zmq.log.handlers import PUBHandler from zmq.utils import jsonapi as json from IPython.config.loader import Config -from IPython.zmq.parallel import factory -from IPython.zmq.parallel.controller import ControllerFactory -from IPython.zmq.parallel.clusterdir import ( +from IPython.parallel import factory +from IPython.parallel.controller import ControllerFactory +from IPython.parallel.clusterdir import ( ApplicationWithClusterDir, ClusterDirConfigLoader ) -from IPython.zmq.parallel.util import disambiguate_ip_address, split_url +from IPython.parallel.util import disambiguate_ip_address, split_url # from IPython.kernel.fcutil import FCServiceFactory, FURLError from IPython.utils.traitlets import Instance, Unicode @@ -117,11 +117,11 @@ class IPControllerAppConfigLoader(ClusterDirConfigLoader): ## Hub Config: paa('--mongodb', dest='HubFactory.db_class', action='store_const', - const='IPython.zmq.parallel.mongodb.MongoDB', + const='IPython.parallel.mongodb.MongoDB', help='Use MongoDB for task storage [default: in-memory]') paa('--sqlite', dest='HubFactory.db_class', action='store_const', - const='IPython.zmq.parallel.sqlitedb.SQLiteDB', + const='IPython.parallel.sqlitedb.SQLiteDB', help='Use SQLite3 for DB task storage [default: in-memory]') paa('--hb', type=int, dest='HubFactory.hb', nargs=2, diff --git a/IPython/zmq/parallel/ipengineapp.py b/IPython/parallel/ipengineapp.py similarity index 96% rename from IPython/zmq/parallel/ipengineapp.py rename to IPython/parallel/ipengineapp.py index 5278d4d..6cf0562 100755 --- a/IPython/zmq/parallel/ipengineapp.py +++ b/IPython/parallel/ipengineapp.py @@ -22,16 +22,16 @@ import sys import zmq from zmq.eventloop import ioloop -from IPython.zmq.parallel.clusterdir import ( +from IPython.parallel.clusterdir import ( ApplicationWithClusterDir, ClusterDirConfigLoader ) from IPython.zmq.log import EnginePUBHandler -from IPython.zmq.parallel import factory -from IPython.zmq.parallel.engine import EngineFactory -from IPython.zmq.parallel.streamkernel import Kernel -from IPython.zmq.parallel.util import disambiguate_url +from IPython.parallel import factory +from IPython.parallel.engine import EngineFactory +from IPython.parallel.streamkernel import Kernel +from IPython.parallel.util import disambiguate_url from IPython.utils.importstring import import_item diff --git a/IPython/zmq/parallel/iploggerapp.py b/IPython/parallel/iploggerapp.py similarity index 99% rename from IPython/zmq/parallel/iploggerapp.py rename to IPython/parallel/iploggerapp.py index 6453fd8..816bf04 100755 --- a/IPython/zmq/parallel/iploggerapp.py +++ b/IPython/parallel/iploggerapp.py @@ -20,7 +20,7 @@ import sys import zmq -from IPython.zmq.parallel.clusterdir import ( +from IPython.parallel.clusterdir import ( ApplicationWithClusterDir, ClusterDirConfigLoader ) diff --git a/IPython/zmq/parallel/kernelstarter.py b/IPython/parallel/kernelstarter.py similarity index 100% rename from IPython/zmq/parallel/kernelstarter.py rename to IPython/parallel/kernelstarter.py diff --git a/IPython/zmq/parallel/launcher.py b/IPython/parallel/launcher.py similarity index 98% rename from IPython/zmq/parallel/launcher.py rename to IPython/parallel/launcher.py index c6f591e..13c3c78 100644 --- a/IPython/zmq/parallel/launcher.py +++ b/IPython/parallel/launcher.py @@ -64,15 +64,15 @@ except ImportError: ipclusterz_cmd_argv = pycmd2argv(get_ipython_module_path( - 'IPython.zmq.parallel.ipclusterapp' + 'IPython.parallel.ipclusterapp' )) ipenginez_cmd_argv = pycmd2argv(get_ipython_module_path( - 'IPython.zmq.parallel.ipengineapp' + 'IPython.parallel.ipengineapp' )) ipcontrollerz_cmd_argv = pycmd2argv(get_ipython_module_path( - 'IPython.zmq.parallel.ipcontrollerapp' + 'IPython.parallel.ipcontrollerapp' )) #----------------------------------------------------------------------------- diff --git a/IPython/zmq/parallel/logwatcher.py b/IPython/parallel/logwatcher.py similarity index 100% rename from IPython/zmq/parallel/logwatcher.py rename to IPython/parallel/logwatcher.py diff --git a/IPython/zmq/parallel/map.py b/IPython/parallel/map.py similarity index 100% rename from IPython/zmq/parallel/map.py rename to IPython/parallel/map.py diff --git a/IPython/zmq/parallel/mongodb.py b/IPython/parallel/mongodb.py similarity index 100% rename from IPython/zmq/parallel/mongodb.py rename to IPython/parallel/mongodb.py diff --git a/IPython/zmq/parallel/remotefunction.py b/IPython/parallel/remotefunction.py similarity index 100% rename from IPython/zmq/parallel/remotefunction.py rename to IPython/parallel/remotefunction.py diff --git a/IPython/zmq/parallel/remotenamespace.py b/IPython/parallel/remotenamespace.py similarity index 100% rename from IPython/zmq/parallel/remotenamespace.py rename to IPython/parallel/remotenamespace.py diff --git a/IPython/zmq/parallel/scheduler.py b/IPython/parallel/scheduler.py similarity index 100% rename from IPython/zmq/parallel/scheduler.py rename to IPython/parallel/scheduler.py diff --git a/IPython/zmq/parallel/scripts/__init__.py b/IPython/parallel/scripts/__init__.py similarity index 100% rename from IPython/zmq/parallel/scripts/__init__.py rename to IPython/parallel/scripts/__init__.py diff --git a/IPython/zmq/parallel/scripts/ipclusterz b/IPython/parallel/scripts/ipclusterz similarity index 92% rename from IPython/zmq/parallel/scripts/ipclusterz rename to IPython/parallel/scripts/ipclusterz index 6e4b1f1..743a510 100755 --- a/IPython/zmq/parallel/scripts/ipclusterz +++ b/IPython/parallel/scripts/ipclusterz @@ -13,6 +13,6 @@ #----------------------------------------------------------------------------- -from IPython.zmq.parallel.ipclusterapp import launch_new_instance +from IPython.parallel.ipclusterapp import launch_new_instance launch_new_instance() diff --git a/IPython/zmq/parallel/scripts/ipcontrollerz b/IPython/parallel/scripts/ipcontrollerz similarity index 92% rename from IPython/zmq/parallel/scripts/ipcontrollerz rename to IPython/parallel/scripts/ipcontrollerz index 74aeaf5..1556dff 100755 --- a/IPython/zmq/parallel/scripts/ipcontrollerz +++ b/IPython/parallel/scripts/ipcontrollerz @@ -13,6 +13,6 @@ #----------------------------------------------------------------------------- -from IPython.zmq.parallel.ipcontrollerapp import launch_new_instance +from IPython.parallel.ipcontrollerapp import launch_new_instance launch_new_instance() diff --git a/IPython/zmq/parallel/scripts/ipenginez b/IPython/parallel/scripts/ipenginez similarity index 92% rename from IPython/zmq/parallel/scripts/ipenginez rename to IPython/parallel/scripts/ipenginez index 515347f..56f4649 100755 --- a/IPython/zmq/parallel/scripts/ipenginez +++ b/IPython/parallel/scripts/ipenginez @@ -13,7 +13,7 @@ #----------------------------------------------------------------------------- -from IPython.zmq.parallel.ipengineapp import launch_new_instance +from IPython.parallel.ipengineapp import launch_new_instance launch_new_instance() diff --git a/IPython/zmq/parallel/scripts/iploggerz b/IPython/parallel/scripts/iploggerz similarity index 92% rename from IPython/zmq/parallel/scripts/iploggerz rename to IPython/parallel/scripts/iploggerz index 32e6eed..97d1fa3 100755 --- a/IPython/zmq/parallel/scripts/iploggerz +++ b/IPython/parallel/scripts/iploggerz @@ -13,7 +13,7 @@ #----------------------------------------------------------------------------- -from IPython.zmq.parallel.iploggerapp import launch_new_instance +from IPython.parallel.iploggerapp import launch_new_instance launch_new_instance() diff --git a/IPython/zmq/parallel/sqlitedb.py b/IPython/parallel/sqlitedb.py similarity index 98% rename from IPython/zmq/parallel/sqlitedb.py rename to IPython/parallel/sqlitedb.py index e8071d5..7de6341 100644 --- a/IPython/zmq/parallel/sqlitedb.py +++ b/IPython/parallel/sqlitedb.py @@ -133,7 +133,8 @@ class SQLiteDB(BaseDB): sqlite3.register_converter('bufs', _convert_bufs) # connect to the db dbfile = os.path.join(self.location, self.filename) - self._db = sqlite3.connect(dbfile, detect_types=sqlite3.PARSE_DECLTYPES) + self._db = sqlite3.connect(dbfile, detect_types=sqlite3.PARSE_DECLTYPES, cached_statements=16) + # print dir(self._db) self._db.execute("""CREATE TABLE IF NOT EXISTS %s (msg_id text PRIMARY KEY, diff --git a/IPython/zmq/parallel/streamkernel.py b/IPython/parallel/streamkernel.py similarity index 99% rename from IPython/zmq/parallel/streamkernel.py rename to IPython/parallel/streamkernel.py index 155009a..9f7a5c9 100755 --- a/IPython/zmq/parallel/streamkernel.py +++ b/IPython/parallel/streamkernel.py @@ -71,7 +71,7 @@ class Kernel(SessionFactory): control_stream = Instance(zmqstream.ZMQStream) task_stream = Instance(zmqstream.ZMQStream) iopub_stream = Instance(zmqstream.ZMQStream) - client = Instance('IPython.zmq.parallel.client.Client') + client = Instance('IPython.parallel.client.Client') # internals shell_streams = List() diff --git a/IPython/zmq/parallel/streamsession.py b/IPython/parallel/streamsession.py similarity index 100% rename from IPython/zmq/parallel/streamsession.py rename to IPython/parallel/streamsession.py diff --git a/IPython/zmq/parallel/taskthread.py b/IPython/parallel/taskthread.py similarity index 98% rename from IPython/zmq/parallel/taskthread.py rename to IPython/parallel/taskthread.py index eb845a7..2a80701 100644 --- a/IPython/zmq/parallel/taskthread.py +++ b/IPython/parallel/taskthread.py @@ -18,7 +18,7 @@ except: import zmq from zmq.core.poll import _poll as poll from zmq.devices import ThreadDevice -from IPython.zmq.parallel import streamsession as ss +from IPython.parallel import streamsession as ss class QueueStream(object): diff --git a/IPython/zmq/parallel/tests/__init__.py b/IPython/parallel/tests/__init__.py similarity index 98% rename from IPython/zmq/parallel/tests/__init__.py rename to IPython/parallel/tests/__init__.py index 680736e..c8e4a26 100644 --- a/IPython/zmq/parallel/tests/__init__.py +++ b/IPython/parallel/tests/__init__.py @@ -15,7 +15,7 @@ import tempfile import time from subprocess import Popen, PIPE, STDOUT -from IPython.zmq.parallel import client +from IPython.parallel import client processes = [] blackhole = tempfile.TemporaryFile() diff --git a/IPython/zmq/parallel/tests/clienttest.py b/IPython/parallel/tests/clienttest.py similarity index 94% rename from IPython/zmq/parallel/tests/clienttest.py rename to IPython/parallel/tests/clienttest.py index bd8a87f..bbfb68e 100644 --- a/IPython/zmq/parallel/tests/clienttest.py +++ b/IPython/parallel/tests/clienttest.py @@ -20,11 +20,11 @@ from zmq.tests import BaseZMQTestCase from IPython.external.decorator import decorator -from IPython.zmq.parallel import error -from IPython.zmq.parallel.client import Client -from IPython.zmq.parallel.ipcluster import launch_process -from IPython.zmq.parallel.entry_point import select_random_ports -from IPython.zmq.parallel.tests import processes,add_engines +from IPython.parallel import error +from IPython.parallel.client import Client +from IPython.parallel.ipcluster import launch_process +from IPython.parallel.entry_point import select_random_ports +from IPython.parallel.tests import processes,add_engines # simple tasks for use in apply tests diff --git a/IPython/zmq/parallel/tests/test_asyncresult.py b/IPython/parallel/tests/test_asyncresult.py similarity index 96% rename from IPython/zmq/parallel/tests/test_asyncresult.py rename to IPython/parallel/tests/test_asyncresult.py index dfe9230..e3ef072 100644 --- a/IPython/zmq/parallel/tests/test_asyncresult.py +++ b/IPython/parallel/tests/test_asyncresult.py @@ -12,9 +12,9 @@ #------------------------------------------------------------------------------- -from IPython.zmq.parallel.error import TimeoutError +from IPython.parallel.error import TimeoutError -from IPython.zmq.parallel.tests import add_engines +from IPython.parallel.tests import add_engines from .clienttest import ClusterTestCase def setup(): diff --git a/IPython/zmq/parallel/tests/test_client.py b/IPython/parallel/tests/test_client.py similarity index 92% rename from IPython/zmq/parallel/tests/test_client.py rename to IPython/parallel/tests/test_client.py index 9a14c3c..e3a213a 100644 --- a/IPython/zmq/parallel/tests/test_client.py +++ b/IPython/parallel/tests/test_client.py @@ -16,10 +16,10 @@ from tempfile import mktemp import zmq -from IPython.zmq.parallel import client as clientmod -from IPython.zmq.parallel import error -from IPython.zmq.parallel.asyncresult import AsyncResult, AsyncHubResult -from IPython.zmq.parallel.view import LoadBalancedView, DirectView +from IPython.parallel import client as clientmod +from IPython.parallel import error +from IPython.parallel.asyncresult import AsyncResult, AsyncHubResult +from IPython.parallel.view import LoadBalancedView, DirectView from clienttest import ClusterTestCase, segfault, wait, add_engines @@ -61,6 +61,15 @@ class TestClient(ClusterTestCase): self.assertEquals(v.targets, targets[-1]) self.assertRaises(TypeError, lambda : self.client[None]) + def test_lbview_targets(self): + """test load_balanced_view targets""" + v = self.client.load_balanced_view() + self.assertEquals(v.targets, None) + v = self.client.load_balanced_view(-1) + self.assertEquals(v.targets, [self.client.ids[-1]]) + v = self.client.load_balanced_view('all') + self.assertEquals(v.targets, self.client.ids) + def test_targets(self): """test various valid targets arguments""" build = self.client._build_targets diff --git a/IPython/zmq/parallel/tests/test_dependency.py b/IPython/parallel/tests/test_dependency.py similarity index 96% rename from IPython/zmq/parallel/tests/test_dependency.py rename to IPython/parallel/tests/test_dependency.py index e67d37e..9773e93 100644 --- a/IPython/zmq/parallel/tests/test_dependency.py +++ b/IPython/parallel/tests/test_dependency.py @@ -18,10 +18,10 @@ import os from IPython.utils.pickleutil import can, uncan -from IPython.zmq.parallel import dependency as dmod -from IPython.zmq.parallel.util import interactive +from IPython.parallel import dependency as dmod +from IPython.parallel.util import interactive -from IPython.zmq.parallel.tests import add_engines +from IPython.parallel.tests import add_engines from .clienttest import ClusterTestCase def setup(): diff --git a/IPython/zmq/parallel/tests/test_newserialized.py b/IPython/parallel/tests/test_newserialized.py similarity index 98% rename from IPython/zmq/parallel/tests/test_newserialized.py rename to IPython/parallel/tests/test_newserialized.py index 8fd0bb7..e57533f 100644 --- a/IPython/zmq/parallel/tests/test_newserialized.py +++ b/IPython/parallel/tests/test_newserialized.py @@ -16,7 +16,7 @@ from unittest import TestCase from IPython.testing.parametric import parametric from IPython.utils import newserialized as ns from IPython.utils.pickleutil import can, uncan, CannedObject, CannedFunction -from IPython.zmq.parallel.tests.clienttest import skip_without +from IPython.parallel.tests.clienttest import skip_without class CanningTestCase(TestCase): diff --git a/IPython/zmq/parallel/tests/test_streamsession.py b/IPython/parallel/tests/test_streamsession.py similarity index 98% rename from IPython/zmq/parallel/tests/test_streamsession.py rename to IPython/parallel/tests/test_streamsession.py index 562d8bc..891afe0 100644 --- a/IPython/zmq/parallel/tests/test_streamsession.py +++ b/IPython/parallel/tests/test_streamsession.py @@ -18,7 +18,7 @@ import zmq from zmq.tests import BaseZMQTestCase from zmq.eventloop.zmqstream import ZMQStream # from IPython.zmq.tests import SessionTestCase -from IPython.zmq.parallel import streamsession as ss +from IPython.parallel import streamsession as ss class SessionTestCase(BaseZMQTestCase): diff --git a/IPython/zmq/parallel/tests/test_view.py b/IPython/parallel/tests/test_view.py similarity index 92% rename from IPython/zmq/parallel/tests/test_view.py rename to IPython/parallel/tests/test_view.py index 91a9820..50781ba 100644 --- a/IPython/zmq/parallel/tests/test_view.py +++ b/IPython/parallel/tests/test_view.py @@ -15,13 +15,13 @@ from tempfile import mktemp import zmq -from IPython.zmq.parallel import client as clientmod -from IPython.zmq.parallel import error -from IPython.zmq.parallel.asyncresult import AsyncResult, AsyncHubResult, AsyncMapResult -from IPython.zmq.parallel.view import LoadBalancedView, DirectView -from IPython.zmq.parallel.util import interactive +from IPython import parallel as pmod +from IPython.parallel import error +from IPython.parallel.asyncresult import AsyncResult, AsyncHubResult, AsyncMapResult +from IPython.parallel.view import LoadBalancedView, DirectView +from IPython.parallel.util import interactive -from IPython.zmq.parallel.tests import add_engines +from IPython.parallel.tests import add_engines from .clienttest import ClusterTestCase, segfault, wait, skip_without @@ -129,7 +129,7 @@ class TestView(ClusterTestCase): def test_get_result(self): """test getting results from the Hub.""" - c = clientmod.Client(profile='iptest') + c = pmod.Client(profile='iptest') # self.add_engines(1) t = c.ids[-1] v = c[t] @@ -154,7 +154,7 @@ class TestView(ClusterTestCase): """) v = self.client[-1] v.run(tmpfile, block=True) - self.assertEquals(v.apply_sync(lambda f: f(), clientmod.Reference('g')), 5) + self.assertEquals(v.apply_sync(lambda f: f(), pmod.Reference('g')), 5) def test_apply_tracked(self): """test tracking for apply""" @@ -206,7 +206,7 @@ class TestView(ClusterTestCase): def test_remote_reference(self): v = self.client[-1] v['a'] = 123 - ra = clientmod.Reference('a') + ra = pmod.Reference('a') b = v.apply_sync(lambda x: x, ra) self.assertEquals(b, 123) diff --git a/IPython/zmq/parallel/util.py b/IPython/parallel/util.py similarity index 100% rename from IPython/zmq/parallel/util.py rename to IPython/parallel/util.py diff --git a/IPython/zmq/parallel/view.py b/IPython/parallel/view.py similarity index 97% rename from IPython/zmq/parallel/view.py rename to IPython/parallel/view.py index c198062..09cafce 100644 --- a/IPython/zmq/parallel/view.py +++ b/IPython/parallel/view.py @@ -105,7 +105,7 @@ class View(HasTraits): history=List() outstanding = Set() results = Dict() - client = Instance('IPython.zmq.parallel.client.Client') + client = Instance('IPython.parallel.client.Client') _socket = Instance('zmq.Socket') _flag_names = List(['targets', 'block', 'track']) diff --git a/IPython/parallel/winhpcjob.py b/IPython/parallel/winhpcjob.py new file mode 100644 index 0000000..c9219e2 --- /dev/null +++ b/IPython/parallel/winhpcjob.py @@ -0,0 +1,316 @@ +#!/usr/bin/env python +# encoding: utf-8 +""" +Job and task components for writing .xml files that the Windows HPC Server +2008 can use to start jobs. +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2009 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +from __future__ import with_statement + +import os +import re +import uuid + +from xml.etree import ElementTree as ET + +from IPython.config.configurable import Configurable +from IPython.utils.traitlets import ( + Str, Int, List, Instance, + Enum, Bool, CStr +) + +#----------------------------------------------------------------------------- +# Job and Task classes +#----------------------------------------------------------------------------- + + +def as_str(value): + if isinstance(value, str): + return value + elif isinstance(value, bool): + if value: + return 'true' + else: + return 'false' + elif isinstance(value, (int, float)): + return repr(value) + else: + return value + + +def indent(elem, level=0): + i = "\n" + level*" " + if len(elem): + if not elem.text or not elem.text.strip(): + elem.text = i + " " + if not elem.tail or not elem.tail.strip(): + elem.tail = i + for elem in elem: + indent(elem, level+1) + if not elem.tail or not elem.tail.strip(): + elem.tail = i + else: + if level and (not elem.tail or not elem.tail.strip()): + elem.tail = i + + +def find_username(): + domain = os.environ.get('USERDOMAIN') + username = os.environ.get('USERNAME','') + if domain is None: + return username + else: + return '%s\\%s' % (domain, username) + + +class WinHPCJob(Configurable): + + job_id = Str('') + job_name = Str('MyJob', config=True) + min_cores = Int(1, config=True) + max_cores = Int(1, config=True) + min_sockets = Int(1, config=True) + max_sockets = Int(1, config=True) + min_nodes = Int(1, config=True) + max_nodes = Int(1, config=True) + unit_type = Str("Core", config=True) + auto_calculate_min = Bool(True, config=True) + auto_calculate_max = Bool(True, config=True) + run_until_canceled = Bool(False, config=True) + is_exclusive = Bool(False, config=True) + username = Str(find_username(), config=True) + job_type = Str('Batch', config=True) + priority = Enum(('Lowest','BelowNormal','Normal','AboveNormal','Highest'), + default_value='Highest', config=True) + requested_nodes = Str('', config=True) + project = Str('IPython', config=True) + xmlns = Str('http://schemas.microsoft.com/HPCS2008/scheduler/') + version = Str("2.000") + tasks = List([]) + + @property + def owner(self): + return self.username + + def _write_attr(self, root, attr, key): + s = as_str(getattr(self, attr, '')) + if s: + root.set(key, s) + + def as_element(self): + # We have to add _A_ type things to get the right order than + # the MSFT XML parser expects. + root = ET.Element('Job') + self._write_attr(root, 'version', '_A_Version') + self._write_attr(root, 'job_name', '_B_Name') + self._write_attr(root, 'unit_type', '_C_UnitType') + self._write_attr(root, 'min_cores', '_D_MinCores') + self._write_attr(root, 'max_cores', '_E_MaxCores') + self._write_attr(root, 'min_sockets', '_F_MinSockets') + self._write_attr(root, 'max_sockets', '_G_MaxSockets') + self._write_attr(root, 'min_nodes', '_H_MinNodes') + self._write_attr(root, 'max_nodes', '_I_MaxNodes') + self._write_attr(root, 'run_until_canceled', '_J_RunUntilCanceled') + self._write_attr(root, 'is_exclusive', '_K_IsExclusive') + self._write_attr(root, 'username', '_L_UserName') + self._write_attr(root, 'job_type', '_M_JobType') + self._write_attr(root, 'priority', '_N_Priority') + self._write_attr(root, 'requested_nodes', '_O_RequestedNodes') + self._write_attr(root, 'auto_calculate_max', '_P_AutoCalculateMax') + self._write_attr(root, 'auto_calculate_min', '_Q_AutoCalculateMin') + self._write_attr(root, 'project', '_R_Project') + self._write_attr(root, 'owner', '_S_Owner') + self._write_attr(root, 'xmlns', '_T_xmlns') + dependencies = ET.SubElement(root, "Dependencies") + etasks = ET.SubElement(root, "Tasks") + for t in self.tasks: + etasks.append(t.as_element()) + return root + + def tostring(self): + """Return the string representation of the job description XML.""" + root = self.as_element() + indent(root) + txt = ET.tostring(root, encoding="utf-8") + # Now remove the tokens used to order the attributes. + txt = re.sub(r'_[A-Z]_','',txt) + txt = '\n' + txt + return txt + + def write(self, filename): + """Write the XML job description to a file.""" + txt = self.tostring() + with open(filename, 'w') as f: + f.write(txt) + + def add_task(self, task): + """Add a task to the job. + + Parameters + ---------- + task : :class:`WinHPCTask` + The task object to add. + """ + self.tasks.append(task) + + +class WinHPCTask(Configurable): + + task_id = Str('') + task_name = Str('') + version = Str("2.000") + min_cores = Int(1, config=True) + max_cores = Int(1, config=True) + min_sockets = Int(1, config=True) + max_sockets = Int(1, config=True) + min_nodes = Int(1, config=True) + max_nodes = Int(1, config=True) + unit_type = Str("Core", config=True) + command_line = CStr('', config=True) + work_directory = CStr('', config=True) + is_rerunnaable = Bool(True, config=True) + std_out_file_path = CStr('', config=True) + std_err_file_path = CStr('', config=True) + is_parametric = Bool(False, config=True) + environment_variables = Instance(dict, args=(), config=True) + + def _write_attr(self, root, attr, key): + s = as_str(getattr(self, attr, '')) + if s: + root.set(key, s) + + def as_element(self): + root = ET.Element('Task') + self._write_attr(root, 'version', '_A_Version') + self._write_attr(root, 'task_name', '_B_Name') + self._write_attr(root, 'min_cores', '_C_MinCores') + self._write_attr(root, 'max_cores', '_D_MaxCores') + self._write_attr(root, 'min_sockets', '_E_MinSockets') + self._write_attr(root, 'max_sockets', '_F_MaxSockets') + self._write_attr(root, 'min_nodes', '_G_MinNodes') + self._write_attr(root, 'max_nodes', '_H_MaxNodes') + self._write_attr(root, 'command_line', '_I_CommandLine') + self._write_attr(root, 'work_directory', '_J_WorkDirectory') + self._write_attr(root, 'is_rerunnaable', '_K_IsRerunnable') + self._write_attr(root, 'std_out_file_path', '_L_StdOutFilePath') + self._write_attr(root, 'std_err_file_path', '_M_StdErrFilePath') + self._write_attr(root, 'is_parametric', '_N_IsParametric') + self._write_attr(root, 'unit_type', '_O_UnitType') + root.append(self.get_env_vars()) + return root + + def get_env_vars(self): + env_vars = ET.Element('EnvironmentVariables') + for k, v in self.environment_variables.iteritems(): + variable = ET.SubElement(env_vars, "Variable") + name = ET.SubElement(variable, "Name") + name.text = k + value = ET.SubElement(variable, "Value") + value.text = v + return env_vars + + + +# By declaring these, we can configure the controller and engine separately! + +class IPControllerJob(WinHPCJob): + job_name = Str('IPController', config=False) + is_exclusive = Bool(False, config=True) + username = Str(find_username(), config=True) + priority = Enum(('Lowest','BelowNormal','Normal','AboveNormal','Highest'), + default_value='Highest', config=True) + requested_nodes = Str('', config=True) + project = Str('IPython', config=True) + + +class IPEngineSetJob(WinHPCJob): + job_name = Str('IPEngineSet', config=False) + is_exclusive = Bool(False, config=True) + username = Str(find_username(), config=True) + priority = Enum(('Lowest','BelowNormal','Normal','AboveNormal','Highest'), + default_value='Highest', config=True) + requested_nodes = Str('', config=True) + project = Str('IPython', config=True) + + +class IPControllerTask(WinHPCTask): + + task_name = Str('IPController', config=True) + controller_cmd = List(['ipcontroller.exe'], config=True) + controller_args = List(['--log-to-file', '--log-level', '40'], config=True) + # I don't want these to be configurable + std_out_file_path = CStr('', config=False) + std_err_file_path = CStr('', config=False) + min_cores = Int(1, config=False) + max_cores = Int(1, config=False) + min_sockets = Int(1, config=False) + max_sockets = Int(1, config=False) + min_nodes = Int(1, config=False) + max_nodes = Int(1, config=False) + unit_type = Str("Core", config=False) + work_directory = CStr('', config=False) + + def __init__(self, config=None): + super(IPControllerTask, self).__init__(config=config) + the_uuid = uuid.uuid1() + self.std_out_file_path = os.path.join('log','ipcontroller-%s.out' % the_uuid) + self.std_err_file_path = os.path.join('log','ipcontroller-%s.err' % the_uuid) + + @property + def command_line(self): + return ' '.join(self.controller_cmd + self.controller_args) + + +class IPEngineTask(WinHPCTask): + + task_name = Str('IPEngine', config=True) + engine_cmd = List(['ipengine.exe'], config=True) + engine_args = List(['--log-to-file', '--log-level', '40'], config=True) + # I don't want these to be configurable + std_out_file_path = CStr('', config=False) + std_err_file_path = CStr('', config=False) + min_cores = Int(1, config=False) + max_cores = Int(1, config=False) + min_sockets = Int(1, config=False) + max_sockets = Int(1, config=False) + min_nodes = Int(1, config=False) + max_nodes = Int(1, config=False) + unit_type = Str("Core", config=False) + work_directory = CStr('', config=False) + + def __init__(self, config=None): + super(IPEngineTask,self).__init__(config=config) + the_uuid = uuid.uuid1() + self.std_out_file_path = os.path.join('log','ipengine-%s.out' % the_uuid) + self.std_err_file_path = os.path.join('log','ipengine-%s.err' % the_uuid) + + @property + def command_line(self): + return ' '.join(self.engine_cmd + self.engine_args) + + +# j = WinHPCJob(None) +# j.job_name = 'IPCluster' +# j.username = 'GNET\\bgranger' +# j.requested_nodes = 'GREEN' +# +# t = WinHPCTask(None) +# t.task_name = 'Controller' +# t.command_line = r"\\blue\domainusers$\bgranger\Python\Python25\Scripts\ipcontroller.exe --log-to-file -p default --log-level 10" +# t.work_directory = r"\\blue\domainusers$\bgranger\.ipython\cluster_default" +# t.std_out_file_path = 'controller-out.txt' +# t.std_err_file_path = 'controller-err.txt' +# t.environment_variables['PYTHONPATH'] = r"\\blue\domainusers$\bgranger\Python\Python25\Lib\site-packages" +# j.add_task(t) + diff --git a/IPython/utils/pickleutil.py b/IPython/utils/pickleutil.py index 669df9d..e09b73a 100644 --- a/IPython/utils/pickleutil.py +++ b/IPython/utils/pickleutil.py @@ -93,7 +93,7 @@ class CannedFunction(CannedObject): def can(obj): # import here to prevent module-level circular imports - from IPython.zmq.parallel.dependency import dependent + from IPython.parallel.dependency import dependent if isinstance(obj, dependent): keys = ('f','df') return CannedObject(obj, keys=keys) diff --git a/docs/examples/newparallel/dagdeps.py b/docs/examples/newparallel/dagdeps.py index d65ec66..edc6cb9 100644 --- a/docs/examples/newparallel/dagdeps.py +++ b/docs/examples/newparallel/dagdeps.py @@ -8,7 +8,7 @@ Authors """ import networkx as nx from random import randint, random -from IPython.zmq.parallel import client as cmod +from IPython import parallel def randomwait(): import time @@ -87,7 +87,7 @@ def main(nodes, edges): for node in G: jobs[node] = randomwait - client = cmod.Client() + client = parallel.Client() view = client.load_balanced_view() print "submitting %i tasks with %i dependencies"%(nodes,edges) results = submit_jobs(view, G, jobs) diff --git a/docs/examples/newparallel/demo/dependencies.py b/docs/examples/newparallel/demo/dependencies.py index fd43003..3cf1566 100644 --- a/docs/examples/newparallel/demo/dependencies.py +++ b/docs/examples/newparallel/demo/dependencies.py @@ -1,6 +1,4 @@ -from IPython.zmq.parallel import error -from IPython.zmq.parallel.dependency import Dependency -from IPython.zmq.parallel.client import * +from IPython.parallel import * client = Client() diff --git a/docs/examples/newparallel/demo/map.py b/docs/examples/newparallel/demo/map.py index 50d6af1..d0316d1 100644 --- a/docs/examples/newparallel/demo/map.py +++ b/docs/examples/newparallel/demo/map.py @@ -1,4 +1,4 @@ -from IPython.zmq.parallel.client import * +from IPython.parallel import * client = Client() view = client[:] diff --git a/docs/examples/newparallel/demo/throughput.py b/docs/examples/newparallel/demo/throughput.py index 57a85f3..b9eaa08 100644 --- a/docs/examples/newparallel/demo/throughput.py +++ b/docs/examples/newparallel/demo/throughput.py @@ -1,6 +1,6 @@ import time import numpy as np -from IPython.zmq.parallel import client as clientmod +from IPython import parallel nlist = map(int, np.logspace(2,9,16,base=2)) nlist2 = map(int, np.logspace(2,8,15,base=2)) @@ -14,7 +14,7 @@ def echo(s=''): return s def time_throughput(nmessages, t=0, f=wait): - client = clientmod.Client() + client = parallel.Client() view = client[None] # do one ping before starting timing if f is echo: diff --git a/docs/examples/newparallel/demo/views.py b/docs/examples/newparallel/demo/views.py index 891a5a6..78f857f 100644 --- a/docs/examples/newparallel/demo/views.py +++ b/docs/examples/newparallel/demo/views.py @@ -1,4 +1,4 @@ -from IPython.zmq.parallel.client import * +from IPython.parallel import * client = Client() diff --git a/docs/examples/newparallel/fetchparse.py b/docs/examples/newparallel/fetchparse.py index 0691cc1..0d2d162 100644 --- a/docs/examples/newparallel/fetchparse.py +++ b/docs/examples/newparallel/fetchparse.py @@ -11,7 +11,7 @@ and some engines using something like:: ipclusterz start -n 4 """ import sys -from IPython.zmq.parallel import client, error +from IPython.parallel import Client, error import time import BeautifulSoup # this isn't necessary, but it helps throw the dependency error earlier @@ -39,7 +39,7 @@ class DistributedSpider(object): pollingDelay = 0.5 def __init__(self, site): - self.client = client.Client() + self.client = Client() self.view = self.client.load_balanced_view() self.mux = self.client[:] diff --git a/docs/examples/newparallel/helloworld.py b/docs/examples/newparallel/helloworld.py index 6e578e7..8b29f4d 100644 --- a/docs/examples/newparallel/helloworld.py +++ b/docs/examples/newparallel/helloworld.py @@ -2,9 +2,9 @@ A Distributed Hello world Ken Kinder """ -from IPython.zmq.parallel import client +from IPython.parallel import Client -rc = client.Client() +rc = Client() def sleep_and_echo(t, msg): import time diff --git a/docs/examples/newparallel/interengine/communicator.py b/docs/examples/newparallel/interengine/communicator.py index 48deea8..959e30f 100644 --- a/docs/examples/newparallel/interengine/communicator.py +++ b/docs/examples/newparallel/interengine/communicator.py @@ -3,7 +3,7 @@ import socket import uuid import zmq -from IPython.zmq.parallel.util import disambiguate_url +from IPython.parallel.util import disambiguate_url class EngineCommunicator(object): diff --git a/docs/examples/newparallel/interengine/interengine.py b/docs/examples/newparallel/interengine/interengine.py index 5e80426..865c802 100644 --- a/docs/examples/newparallel/interengine/interengine.py +++ b/docs/examples/newparallel/interengine/interengine.py @@ -1,9 +1,9 @@ import sys -from IPython.zmq.parallel import client +from IPython.parallel import Client -rc = client.Client() +rc = Client() rc.block=True view = rc[:] view.run('communicator.py') diff --git a/docs/examples/newparallel/mcdriver.py b/docs/examples/newparallel/mcdriver.py index 4a53150..c0e1abd 100644 --- a/docs/examples/newparallel/mcdriver.py +++ b/docs/examples/newparallel/mcdriver.py @@ -7,7 +7,7 @@ import sys import time -from IPython.zmq.parallel import client +from IPython.parallel import Client import numpy as np from mcpricer import price_options from matplotlib import pyplot as plt @@ -45,7 +45,7 @@ sigma_vals = np.linspace(min_sigma, max_sigma, n_sigmas) # The Client is used to setup the calculation and works with all # engines. -c = client.Client(profile=cluster_profile) +c = Client(profile=cluster_profile) # A LoadBalancedView is an interface to the engines that provides dynamic load # balancing at the expense of not knowing which engine will execute the code. diff --git a/docs/examples/newparallel/parallelpi.py b/docs/examples/newparallel/parallelpi.py index e59dc8a..dbdc770 100644 --- a/docs/examples/newparallel/parallelpi.py +++ b/docs/examples/newparallel/parallelpi.py @@ -16,7 +16,7 @@ and the files used will be downloaded if they are not in the working directory of the IPython engines. """ -from IPython.zmq.parallel import client +from IPython.parallel import Client from matplotlib import pyplot as plt import numpy as np from pidigits import * @@ -27,7 +27,7 @@ filestring = 'pi200m.ascii.%(i)02dof20' files = [filestring % {'i':i} for i in range(1,16)] # Connect to the IPython cluster -c = client.Client() +c = Client() c[:].run('pidigits.py') # the number of engines diff --git a/docs/examples/newparallel/wave2D/communicator.py b/docs/examples/newparallel/wave2D/communicator.py index de01bdb..4abb2eb 100644 --- a/docs/examples/newparallel/wave2D/communicator.py +++ b/docs/examples/newparallel/wave2D/communicator.py @@ -5,7 +5,7 @@ import socket import zmq -from IPython.zmq.parallel.util import disambiguate_url +from IPython.parallel.util import disambiguate_url class EngineCommunicator(object): """An object that connects Engines to each other. diff --git a/docs/examples/newparallel/wave2D/parallelwave-mpi.py b/docs/examples/newparallel/wave2D/parallelwave-mpi.py index 15dcbdc..1a0f793 100755 --- a/docs/examples/newparallel/wave2D/parallelwave-mpi.py +++ b/docs/examples/newparallel/wave2D/parallelwave-mpi.py @@ -28,7 +28,7 @@ import time from numpy import exp, zeros, newaxis, sqrt from IPython.external import argparse -from IPython.zmq.parallel.client import Client, Reference +from IPython.parallel.client import Client, Reference def setup_partitioner(index, num_procs, gnum_cells, parts): """create a partitioner in the engine namespace""" diff --git a/docs/examples/newparallel/wave2D/parallelwave.py b/docs/examples/newparallel/wave2D/parallelwave.py index 3309267..2692d5e 100755 --- a/docs/examples/newparallel/wave2D/parallelwave.py +++ b/docs/examples/newparallel/wave2D/parallelwave.py @@ -28,7 +28,7 @@ import time from numpy import exp, zeros, newaxis, sqrt from IPython.external import argparse -from IPython.zmq.parallel.client import Client, Reference +from IPython.parallel.client import Client, Reference def setup_partitioner(comm, addrs, index, num_procs, gnum_cells, parts): """create a partitioner in the engine namespace""" diff --git a/docs/examples/newparallel/workflow/client.py b/docs/examples/newparallel/workflow/client.py index 5346b07..985e822 100644 --- a/docs/examples/newparallel/workflow/client.py +++ b/docs/examples/newparallel/workflow/client.py @@ -1,3 +1,3 @@ -from IPython.zmq.parallel.client import Client +from IPython.parallel.client import Client client = Client() diff --git a/docs/examples/newparallel/workflow/job_wrapper.py b/docs/examples/newparallel/workflow/job_wrapper.py index 933611d..216fe51 100755 --- a/docs/examples/newparallel/workflow/job_wrapper.py +++ b/docs/examples/newparallel/workflow/job_wrapper.py @@ -10,8 +10,8 @@ import sys argv = sys.argv -from IPython.zmq.parallel.engine import EngineFactory -from IPython.zmq.parallel.ipengineapp import launch_new_instance +from IPython.parallel.engine import EngineFactory +from IPython.parallel.ipengineapp import launch_new_instance ns = {} diff --git a/docs/examples/newparallel/workflow/wmanager.py b/docs/examples/newparallel/workflow/wmanager.py index 6565920..e4786b4 100644 --- a/docs/examples/newparallel/workflow/wmanager.py +++ b/docs/examples/newparallel/workflow/wmanager.py @@ -32,7 +32,7 @@ def cleanup(controller, engines): if __name__ == '__main__': # Start controller in separate process - cont = Popen(['python', '-m', 'IPython.zmq.parallel.ipcontrollerapp']) + cont = Popen(['python', '-m', 'IPython.parallel.ipcontrollerapp']) print('Started controller') # "Submit jobs" diff --git a/docs/source/parallelz/dag_dependencies.txt b/docs/source/parallelz/dag_dependencies.txt index 488a953..779e2e2 100644 --- a/docs/source/parallelz/dag_dependencies.txt +++ b/docs/source/parallelz/dag_dependencies.txt @@ -111,7 +111,7 @@ on which it depends: .. sourcecode:: ipython - In [5]: rc = client.Client() + In [5]: rc = Client() In [5]: view = rc.load_balanced_view() In [6]: results = {} diff --git a/docs/source/parallelz/parallel_demos.txt b/docs/source/parallelz/parallel_demos.txt index 940e640..e91823b 100644 --- a/docs/source/parallelz/parallel_demos.txt +++ b/docs/source/parallelz/parallel_demos.txt @@ -129,12 +129,12 @@ calculation can also be run by simply typing the commands from .. sourcecode:: ipython - In [1]: from IPython.zmq.parallel import client + In [1]: from IPython.parallel import Client # The Client allows us to use the engines interactively. # We simply pass Client the name of the cluster profile we # are using. - In [2]: c = client.Client(profile='mycluster') + In [2]: c = Client(profile='mycluster') In [3]: view = c.load_balanced_view() In [3]: c.ids diff --git a/docs/source/parallelz/parallel_details.txt b/docs/source/parallelz/parallel_details.txt index 50ae7b8..2c1ffe7 100644 --- a/docs/source/parallelz/parallel_details.txt +++ b/docs/source/parallelz/parallel_details.txt @@ -43,7 +43,7 @@ The following will fail: ... RemoteError: RuntimeError(array is not writeable) Traceback (most recent call last): - File "/Users/minrk/dev/ip/mine/IPython/zmq/parallel/streamkernel.py", line 329, in apply_request + File "/path/to/site-packages/IPython/parallel/streamkernel.py", line 329, in apply_request exec code in working, working File "", line 1, in File "", line 2, in setter @@ -178,7 +178,10 @@ args : tuple/list The positional arguments passed to `f` kwargs : dict The keyword arguments passed to `f` -block : bool (default: self.block) + +flags for all views: + +block : bool (default: view.block) Whether to wait for the result, or return immediately. False: returns AsyncResult @@ -186,29 +189,23 @@ block : bool (default: self.block) returns actual result(s) of f(*args, **kwargs) if multiple targets: list of results, matching `targets` -track : bool +track : bool [default view.track] whether to track non-copying sends. - [default False] -targets : int,list of ints, 'all', None +targets : int,list of ints, 'all', None [default view.targets] Specify the destination of the job. - if None: - Submit via Task queue for load-balancing. - if 'all': + if 'all' or None: Run on all active engines if list: Run on each specified engine if int: Run on single engine - Not eht -balanced : bool, default None - whether to load-balance. This will default to True - if targets is unspecified, or False if targets is specified. - - If `balanced` and `targets` are both specified, the task will - be assigne to *one* of the targets by the scheduler. - +Note that LoadBalancedView uses targets to restrict possible destinations. LoadBalanced calls +will always execute in just one location. + +flags only in LoadBalancedViews: + after : Dependency or collection of msg_ids Only for load-balanced execution (targets=None) Specify a list of msg_ids as a time-based dependency. @@ -243,25 +240,9 @@ does something very similar to ``execute(open(f).read())``. Views ===== -The principal extension of the :class:`~parallel.client.Client` is the +The principal extension of the :class:`~parallel.Client` is the :class:`~parallel.view.View` class. The client -Two of apply's keyword arguments are set at the construction of the View, and are immutable for -a given View: `balanced` and `targets`. `balanced` determines whether the View will be a -:class:`.LoadBalancedView` or a :class:`.DirectView`, and `targets` will be the View's `targets` -attribute. Attempts to change this will raise errors. - -Views are cached by targets/class, so requesting a view multiple times will always return the -*same object*, not create a new one: - -.. sourcecode:: ipython - - In [3]: v1 = rc.load_balanced_view([1,2,3]) - In [4]: v2 = rc.load_balanced_view([1,2,3]) - - In [5]: v2 is v1 - Out[5]: True - DirectView ---------- @@ -312,14 +293,12 @@ are always the same as: Out[3]: Also note that the slice is evaluated at the time of construction of the DirectView, so the -targets will not change over time if engines are added/removed from the cluster. Requesting -two views with the same slice at different times will *not* necessarily return the same View -if the number of engines has changed. +targets will not change over time if engines are added/removed from the cluster. Execution via DirectView ************************ -The DirectView is the simplest way to work with one or more engines directly (hence the name). +The DirectView is the simplest way to work with one or more engines directly (hence the name). Data movement via DirectView @@ -359,6 +338,15 @@ between engines, MPI should be used: In [60]: dview.gather('a') Out[60]: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] +Push and pull +------------- + +push + +pull + + + LoadBalancedView @@ -370,10 +358,6 @@ The :class:`.LoadBalancedView` Data Movement ============= -push - -pull - Reference Results @@ -383,7 +367,7 @@ AsyncResults are the primary class get_result -results,metadata +results, metadata Querying the Hub ================ diff --git a/docs/source/parallelz/parallel_intro.txt b/docs/source/parallelz/parallel_intro.txt index de8d159..93707dc 100644 --- a/docs/source/parallelz/parallel_intro.txt +++ b/docs/source/parallelz/parallel_intro.txt @@ -57,7 +57,7 @@ The IPython architecture consists of four components: * The IPython schedulers. * The controller client. -These components live in the :mod:`IPython.zmq.parallel` package and are +These components live in the :mod:`IPython.parallel` package and are installed with IPython. They do, however, have additional dependencies that must be installed. For more information, see our :ref:`installation documentation `. @@ -127,7 +127,7 @@ a fully asynchronous interface to a set of engines. IPython client and views ------------------------ -There is one primary object, the :class:`~.parallel.client.Client`, for connecting to a cluster. +There is one primary object, the :class:`~.parallel.Client`, for connecting to a cluster. For each execution model, there is a corresponding :class:`~.parallel.view.View`. These views allow users to interact with a set of engines through the interface. Here are the two default views: @@ -206,9 +206,9 @@ everything is working correctly, try the following commands: .. sourcecode:: ipython - In [1]: from IPython.zmq.parallel import client + In [1]: from IPython.parallel import Client - In [2]: c = client.Client() + In [2]: c = Client() In [4]: c.ids Out[4]: set([0, 1, 2, 3]) @@ -224,7 +224,7 @@ name, create the client like this: .. sourcecode:: ipython - In [2]: c = client.Client('/path/to/my/ipcontroller-client.json') + In [2]: c = Client('/path/to/my/ipcontroller-client.json') Remember, a client needs to be able to see the Hub's ports to connect. So if they are on a different machine, you may need to use an ssh server to tunnel access to that machine, @@ -232,7 +232,7 @@ then you would connect to it with: .. sourcecode:: ipython - In [2]: c = client.Client(sshserver='myhub.example.com') + In [2]: c = Client(sshserver='myhub.example.com') Where 'myhub.example.com' is the url or IP address of the machine on which the Hub process is running (or another machine that has direct access to the Hub's ports). diff --git a/docs/source/parallelz/parallel_mpi.txt b/docs/source/parallelz/parallel_mpi.txt index c6d2445..fe5cbf9 100644 --- a/docs/source/parallelz/parallel_mpi.txt +++ b/docs/source/parallelz/parallel_mpi.txt @@ -123,11 +123,11 @@ using our :func:`psum` function: .. sourcecode:: ipython - In [1]: from IPython.zmq.parallel import client + In [1]: from IPython.parallel import Client In [2]: %load_ext parallel_magic - In [3]: c = client.Client(profile='mpi') + In [3]: c = Client(profile='mpi') In [4]: view = c[:] diff --git a/docs/source/parallelz/parallel_multiengine.txt b/docs/source/parallelz/parallel_multiengine.txt index 3f445b8..afdf2ae 100644 --- a/docs/source/parallelz/parallel_multiengine.txt +++ b/docs/source/parallelz/parallel_multiengine.txt @@ -27,14 +27,14 @@ our :ref:`introduction ` to using IPython for parallel computing. Creating a ``Client`` instance ============================== -The first step is to import the IPython :mod:`IPython.zmq.parallel.client` +The first step is to import the IPython :mod:`IPython.parallel` module and then create a :class:`.Client` instance: .. sourcecode:: ipython - In [1]: from IPython.zmq.parallel import client + In [1]: from IPython.parallel import Client - In [2]: rc = client.Client() + In [2]: rc = Client() This form assumes that the default connection information (stored in :file:`ipcontroller-client.json` found in :file:`IPYTHON_DIR/clusterz_default/security`) is @@ -44,9 +44,9 @@ file to the client machine, or enter its contents as arguments to the Client con .. sourcecode:: ipython # If you have copied the json connector file from the controller: - In [2]: rc = client.Client('/path/to/ipcontroller-client.json') + In [2]: rc = Client('/path/to/ipcontroller-client.json') # or to connect with a specific profile you have set up: - In [3]: rc = client.Client(profile='mpi') + In [3]: rc = Client(profile='mpi') To make sure there are engines connected to the controller, users can get a list @@ -286,7 +286,7 @@ local Python/IPython session: /home/you/ in () ----> 1 ar.get(1) - /path/to/site-packages/IPython/zmq/parallel/asyncresult.pyc in get(self, timeout) + /path/to/site-packages/IPython/parallel/asyncresult.pyc in get(self, timeout) 62 raise self._exception 63 else: ---> 64 raise error.TimeoutError("Result not ready.") @@ -320,17 +320,17 @@ and blocks until all of the associated results are ready: # Then, their results are ready using get() or the `.r` attribute In [75]: pr_list[0].get() Out[75]: [2.9982571601867676, 2.9982588291168213, 2.9987530708312988, 2.9990990161895752] - -The ``block`` attribute ------------------------ -Many View methods(excluding :meth:`apply`) accept -``block`` as a keyword argument. As we have seen above, these -keyword arguments control the blocking mode. The :class:`View` class also has -a :attr:`block` attribute that controls the default behavior when the keyword -argument is not provided. Thus the following logic is used for :attr:`block`: +The ``block`` and ``targets`` keyword arguments and attributes +-------------------------------------------------------------- + +Most DirectView methods (excluding :meth:`apply` and :meth:`map`) accept ``block`` and +``targets`` as keyword arguments. As we have seen above, these keyword arguments control the +blocking mode and which engines the command is applied to. The :class:`View` class also has +:attr:`block` and :attr:`targets` attributes that control the default behavior when the keyword +arguments are not provided. Thus the following logic is used for :attr:`block` and :attr:`targets`: * If no keyword argument is provided, the instance attributes are used. * Keyword argument, if provided override the instance attributes for @@ -340,16 +340,19 @@ The following examples demonstrate how to use the instance attributes: .. sourcecode:: ipython + In [16]: dview.targets = [0,2] + In [17]: dview.block = False In [18]: ar = dview.apply(lambda : 10) In [19]: ar.get() - Out[19]: [10, 10, 10, 10] + Out[19]: [10, 10] + In [16]: dview.targets = v.client.ids # all engines (4) + In [21]: dview.block = True - # Note targets='all' means all engines In [22]: dview.apply(lambda : 42) Out[22]: [42, 42, 42, 42] @@ -428,7 +431,7 @@ on the engines given by the :attr:`targets` attribute: Type %autopx to disable In [32]: max_evals = [] - + In [33]: for i in range(100): ....: a = numpy.random.rand(10,10) @@ -437,7 +440,7 @@ on the engines given by the :attr:`targets` attribute: ....: max_evals.append(evals[0].real) ....: ....: - + In [34]: %autopx Auto Parallel Disabled @@ -576,7 +579,7 @@ more other types of exceptions. Here is how it works: /home/you/ in () ----> 1 dview.execute('1/0', block=True) - /path/to/site-packages/IPython/zmq/parallel/view.py in execute(self, code, block) + /path/to/site-packages/IPython/parallel/view.py in execute(self, code, block) 460 default: self.block 461 """ --> 462 return self.apply_with_flags(util._execute, args=(code,), block=block) @@ -585,7 +588,7 @@ more other types of exceptions. Here is how it works: /home/you/ in apply_with_flags(self, f, args, kwargs, block, track) - /path/to/site-packages/IPython/zmq/parallel/view.py in sync_results(f, self, *args, **kwargs) + /path/to/site-packages/IPython/parallel/view.py in sync_results(f, self, *args, **kwargs) 46 def sync_results(f, self, *args, **kwargs): 47 """sync relevant results from self.client to our results attribute.""" ---> 48 ret = f(self, *args, **kwargs) @@ -594,21 +597,21 @@ more other types of exceptions. Here is how it works: /home/you/ in apply_with_flags(self, f, args, kwargs, block, track) - /path/to/site-packages/IPython/zmq/parallel/view.py in save_ids(f, self, *args, **kwargs) + /path/to/site-packages/IPython/parallel/view.py in save_ids(f, self, *args, **kwargs) 35 n_previous = len(self.client.history) 36 try: ---> 37 ret = f(self, *args, **kwargs) 38 finally: 39 nmsgs = len(self.client.history) - n_previous - /path/to/site-packages/IPython/zmq/parallel/view.py in apply_with_flags(self, f, args, kwargs, block, track) + /path/to/site-packages/IPython/parallel/view.py in apply_with_flags(self, f, args, kwargs, block, track) 398 if block: 399 try: --> 400 return ar.get() 401 except KeyboardInterrupt: 402 pass - /path/to/site-packages/IPython/zmq/parallel/asyncresult.pyc in get(self, timeout) + /path/to/site-packages/IPython/parallel/asyncresult.pyc in get(self, timeout) 87 return self._result 88 else: ---> 89 raise self._exception @@ -660,7 +663,7 @@ instance: /home/you/ in () ----> 1 dview.execute('1/0', block=True) - /path/to/site-packages/IPython/zmq/parallel/view.py in execute(self, code, block) + /path/to/site-packages/IPython/parallel/view.py in execute(self, code, block) 460 default: self.block 461 """ --> 462 return self.apply_with_flags(util._execute, args=(code,), block=block) @@ -669,7 +672,7 @@ instance: /home/you/ in apply_with_flags(self, f, args, kwargs, block, track) - /path/to/site-packages/IPython/zmq/parallel/view.py in sync_results(f, self, *args, **kwargs) + /path/to/site-packages/IPython/parallel/view.py in sync_results(f, self, *args, **kwargs) 46 def sync_results(f, self, *args, **kwargs): 47 """sync relevant results from self.client to our results attribute.""" ---> 48 ret = f(self, *args, **kwargs) @@ -678,21 +681,21 @@ instance: /home/you/ in apply_with_flags(self, f, args, kwargs, block, track) - /path/to/site-packages/IPython/zmq/parallel/view.py in save_ids(f, self, *args, **kwargs) + /path/to/site-packages/IPython/parallel/view.py in save_ids(f, self, *args, **kwargs) 35 n_previous = len(self.client.history) 36 try: ---> 37 ret = f(self, *args, **kwargs) 38 finally: 39 nmsgs = len(self.client.history) - n_previous - /path/to/site-packages/IPython/zmq/parallel/view.py in apply_with_flags(self, f, args, kwargs, block, track) + /path/to/site-packages/IPython/parallel/view.py in apply_with_flags(self, f, args, kwargs, block, track) 398 if block: 399 try: --> 400 return ar.get() 401 except KeyboardInterrupt: 402 pass - /path/to/site-packages/IPython/zmq/parallel/asyncresult.pyc in get(self, timeout) + /path/to/site-packages/IPython/parallel/asyncresult.pyc in get(self, timeout) 87 return self._result 88 else: ---> 89 raise self._exception @@ -706,7 +709,7 @@ instance: [3:apply]: ZeroDivisionError: integer division or modulo by zero In [82]: %debug - > /Users/minrk/dev/ip/mine/IPython/zmq/parallel/asyncresult.py(80)get() + > /path/to/site-packages/IPython/parallel/asyncresult.py(80)get() 79 else: ---> 80 raise self._exception 81 else: @@ -723,10 +726,10 @@ instance: ipdb> e.print_tracebacks() [0:apply]: Traceback (most recent call last): - File "/Users/minrk/dev/ip/mine/IPython/zmq/parallel/streamkernel.py", line 332, in apply_request + File "/path/to/site-packages/IPython/parallel/streamkernel.py", line 332, in apply_request exec code in working, working File "", line 1, in - File "/Users/minrk/dev/ip/mine/IPython/zmq/parallel/client.py", line 69, in _execute + File "/path/to/site-packages/IPython/parallel/client.py", line 69, in _execute exec code in globals() File "", line 1, in ZeroDivisionError: integer division or modulo by zero @@ -734,10 +737,10 @@ instance: [1:apply]: Traceback (most recent call last): - File "/Users/minrk/dev/ip/mine/IPython/zmq/parallel/streamkernel.py", line 332, in apply_request + File "/path/to/site-packages/IPython/parallel/streamkernel.py", line 332, in apply_request exec code in working, working File "", line 1, in - File "/Users/minrk/dev/ip/mine/IPython/zmq/parallel/client.py", line 69, in _execute + File "/path/to/site-packages/IPython/parallel/client.py", line 69, in _execute exec code in globals() File "", line 1, in ZeroDivisionError: integer division or modulo by zero @@ -745,10 +748,10 @@ instance: [2:apply]: Traceback (most recent call last): - File "/Users/minrk/dev/ip/mine/IPython/zmq/parallel/streamkernel.py", line 332, in apply_request + File "/path/to/site-packages/IPython/parallel/streamkernel.py", line 332, in apply_request exec code in working, working File "", line 1, in - File "/Users/minrk/dev/ip/mine/IPython/zmq/parallel/client.py", line 69, in _execute + File "/path/to/site-packages/IPython/parallel/client.py", line 69, in _execute exec code in globals() File "", line 1, in ZeroDivisionError: integer division or modulo by zero @@ -756,10 +759,10 @@ instance: [3:apply]: Traceback (most recent call last): - File "/Users/minrk/dev/ip/mine/IPython/zmq/parallel/streamkernel.py", line 332, in apply_request + File "/path/to/site-packages/IPython/parallel/streamkernel.py", line 332, in apply_request exec code in working, working File "", line 1, in - File "/Users/minrk/dev/ip/mine/IPython/zmq/parallel/client.py", line 69, in _execute + File "/path/to/site-packages/IPython/parallel/client.py", line 69, in _execute exec code in globals() File "", line 1, in ZeroDivisionError: integer division or modulo by zero @@ -784,7 +787,7 @@ All of this same error handling magic even works in non-blocking mode: /Users/minrk/ in () ----> 1 ar.get() - /Users/minrk/dev/ip/mine/IPython/zmq/parallel/asyncresult.pyc in get(self, timeout) + /path/to/site-packages/IPython/parallel/asyncresult.pyc in get(self, timeout) 78 return self._result 79 else: ---> 80 raise self._exception diff --git a/docs/source/parallelz/parallel_process.txt b/docs/source/parallelz/parallel_process.txt index f4e1466..b23895b 100644 --- a/docs/source/parallelz/parallel_process.txt +++ b/docs/source/parallelz/parallel_process.txt @@ -140,7 +140,7 @@ There, instruct ipclusterz to use the MPIExec launchers by adding the lines: .. sourcecode:: python - c.Global.engine_launcher = 'IPython.zmq.parallel.launcher.MPIExecEngineSetLauncher' + c.Global.engine_launcher = 'IPython.parallel.launcher.MPIExecEngineSetLauncher' If the default MPI configuration is correct, then you can now start your cluster, with:: @@ -155,7 +155,7 @@ If you have a reason to also start the Controller with mpi, you can specify: .. sourcecode:: python - c.Global.controller_launcher = 'IPython.zmq.parallel.launcher.MPIExecControllerLauncher' + c.Global.controller_launcher = 'IPython.parallel.launcher.MPIExecControllerLauncher' .. note:: @@ -196,8 +196,8 @@ and engines: .. sourcecode:: python - c.Global.controller_launcher = 'IPython.zmq.parallel.launcher.PBSControllerLauncher' - c.Global.engine_launcher = 'IPython.zmq.parallel.launcher.PBSEngineSetLauncher' + c.Global.controller_launcher = 'IPython.parallel.launcher.PBSControllerLauncher' + c.Global.engine_launcher = 'IPython.parallel.launcher.PBSEngineSetLauncher' To use this mode, you first need to create a PBS script template that will be used to start the engines. Here is a sample PBS script template: @@ -309,9 +309,9 @@ To use this mode, select the SSH launchers in :file:`ipclusterz_config.py`: .. sourcecode:: python - c.Global.engine_launcher = 'IPython.zmq.parallel.launcher.SSHEngineSetLauncher' + c.Global.engine_launcher = 'IPython.parallel.launcher.SSHEngineSetLauncher' # and if the Controller is also to be remote: - c.Global.controller_launcher = 'IPython.zmq.parallel.launcher.SSHControllerLauncher' + c.Global.controller_launcher = 'IPython.parallel.launcher.SSHControllerLauncher' The controller's remote location and configuration can be specified: diff --git a/docs/source/parallelz/parallel_task.txt b/docs/source/parallelz/parallel_task.txt index b73056a..9ba010d 100644 --- a/docs/source/parallelz/parallel_task.txt +++ b/docs/source/parallelz/parallel_task.txt @@ -32,15 +32,15 @@ our :ref:`introduction ` to using IPython for parallel computing. Creating a ``Client`` instance ============================== -The first step is to import the IPython :mod:`IPython.zmq.parallel.client` +The first step is to import the IPython :mod:`IPython.parallel.client` module and then create a :class:`.Client` instance, and we will also be using a :class:`LoadBalancedView`, here called `lview`: .. sourcecode:: ipython - In [1]: from IPython.zmq.parallel import client + In [1]: from IPython.parallel import Client - In [2]: rc = client.Client() + In [2]: rc = Client() This form assumes that the controller was started on localhost with default @@ -50,9 +50,9 @@ argument to the constructor: .. sourcecode:: ipython # for a visible LAN controller listening on an external port: - In [2]: rc = client.Client('tcp://192.168.1.16:10101') + In [2]: rc = Client('tcp://192.168.1.16:10101') # or to connect with a specific profile you have set up: - In [3]: rc = client.Client(profile='mpi') + In [3]: rc = Client(profile='mpi') For load-balanced execution, we will make use of a :class:`LoadBalancedView` object, which can be constructed via the client's :meth:`load_balanced_view` method: @@ -132,7 +132,7 @@ Functional Dependencies Functional dependencies are used to determine whether a given engine is capable of running a particular task. This is implemented via a special :class:`Exception` class, -:class:`UnmetDependency`, found in `IPython.zmq.parallel.error`. Its use is very simple: +:class:`UnmetDependency`, found in `IPython.parallel.error`. Its use is very simple: if a task fails with an UnmetDependency exception, then the scheduler, instead of relaying the error up to the client like any other error, catches the error, and submits the task to a different engine. This will repeat indefinitely, and a task will never be submitted @@ -145,7 +145,7 @@ There are two decorators and a class used for functional dependencies: .. sourcecode:: ipython - In [9]: from IPython.zmq.parallel.dependency import depend, require, dependent + In [9]: from IPython.parallel.dependency import depend, require, dependent @require ******** @@ -399,10 +399,10 @@ The :class:`LoadBalancedView` has many more powerful features that allow quite a of flexibility in how tasks are defined and run. The next places to look are in the following classes: -* :class:`IPython.zmq.parallel.view.LoadBalancedView` -* :class:`IPython.zmq.parallel.client.AsyncResult` -* :meth:`IPython.zmq.parallel.view.LoadBalancedView.apply` -* :mod:`IPython.zmq.parallel.dependency` +* :class:`IPython.parallel.view.LoadBalancedView` +* :class:`IPython.parallel.asyncresult.AsyncResult` +* :meth:`IPython.parallel.view.LoadBalancedView.apply` +* :mod:`IPython.parallel.dependency` The following is an overview of how to use these classes together: diff --git a/docs/source/parallelz/parallel_winhpc.txt b/docs/source/parallelz/parallel_winhpc.txt index eedd8c6..291dee9 100644 --- a/docs/source/parallelz/parallel_winhpc.txt +++ b/docs/source/parallelz/parallel_winhpc.txt @@ -236,9 +236,9 @@ will need to edit the following attributes in the file # Set these at the top of the file to tell ipclusterz to use the # Windows HPC job scheduler. c.Global.controller_launcher = \ - 'IPython.zmq.parallel.launcher.WindowsHPCControllerLauncher' + 'IPython.parallel.launcher.WindowsHPCControllerLauncher' c.Global.engine_launcher = \ - 'IPython.zmq.parallel.launcher.WindowsHPCEngineSetLauncher' + 'IPython.parallel.launcher.WindowsHPCEngineSetLauncher' # Set these to the host name of the scheduler (head node) of your cluster. c.WindowsHPCControllerLauncher.scheduler = 'HEADNODE' @@ -301,7 +301,7 @@ apply it to each element of an array of integers in parallel using the .. sourcecode:: ipython - In [1]: from IPython.zmq.parallel.client import * + In [1]: from IPython.parallel import * In [2]: c = MultiEngineClient(profile='mycluster') diff --git a/setup.py b/setup.py index 2450384..13ec6aa 100755 --- a/setup.py +++ b/setup.py @@ -215,10 +215,10 @@ if 'setuptools' in sys.modules: 'ipython = IPython.frontend.terminal.ipapp:launch_new_instance', 'ipython-qtconsole = IPython.frontend.qt.console.ipythonqt:main', 'pycolor = IPython.utils.PyColorize:main', - 'ipcontrollerz = IPython.zmq.parallel.ipcontrollerapp:launch_new_instance', - 'ipenginez = IPython.zmq.parallel.ipengineapp:launch_new_instance', - 'iploggerz = IPython.zmq.parallel.iploggerapp:launch_new_instance', - 'ipclusterz = IPython.zmq.parallel.ipclusterapp:launch_new_instance', + 'ipcontrollerz = IPython.parallel.ipcontrollerapp:launch_new_instance', + 'ipenginez = IPython.parallel.ipengineapp:launch_new_instance', + 'iploggerz = IPython.parallel.iploggerapp:launch_new_instance', + 'ipclusterz = IPython.parallel.ipclusterapp:launch_new_instance', 'iptest = IPython.testing.iptest:main', 'irunner = IPython.lib.irunner:main' ] diff --git a/setupbase.py b/setupbase.py index c05e6e0..8a2289f 100644 --- a/setupbase.py +++ b/setupbase.py @@ -132,8 +132,8 @@ def find_packages(): add_package(packages, 'testing.plugin', tests=False) add_package(packages, 'utils', tests=True) add_package(packages, 'zmq') - add_package(packages, 'zmq.parallel') add_package(packages, 'zmq.pylab') + add_package(packages, 'parallel') return packages #--------------------------------------------------------------------------- @@ -261,12 +261,13 @@ def find_scripts(): """ Find IPython's scripts. """ - zmq_scripts = pjoin('IPython','zmq','parallel','scripts') + parallel_scripts = pjoin('IPython','parallel','scripts') main_scripts = pjoin('IPython','scripts') - scripts = [pjoin(zmq_scripts, 'ipenginez'), - pjoin(zmq_scripts, 'ipcontrollerz'), - pjoin(zmq_scripts, 'ipclusterz'), - pjoin(zmq_scripts, 'iploggerz'), + scripts = [ + pjoin(parallel_scripts, 'ipenginez'), + pjoin(parallel_scripts, 'ipcontrollerz'), + pjoin(parallel_scripts, 'ipclusterz'), + pjoin(parallel_scripts, 'iploggerz'), pjoin(main_scripts, 'ipython'), pjoin(main_scripts, 'ipython-qtconsole'), pjoin(main_scripts, 'pycolor'),