##// END OF EJS Templates
Fixed bug in ipengine.py when mpi was enabled that was causing the engine to crash at start....
Brian E Granger -
Show More
@@ -1,95 +1,99 b''
1 1 # -*- coding: utf-8 -*-
2 2 """Release data for the IPython project.
3 3
4 4 $Id: Release.py 3002 2008-02-01 07:17:00Z fperez $"""
5 5
6 6 #*****************************************************************************
7 7 # Copyright (C) 2001-2006 Fernando Perez <fperez@colorado.edu>
8 8 #
9 9 # Copyright (c) 2001 Janko Hauser <jhauser@zscout.de> and Nathaniel Gray
10 10 # <n8gray@caltech.edu>
11 11 #
12 12 # Distributed under the terms of the BSD License. The full license is in
13 13 # the file COPYING, distributed as part of this software.
14 14 #*****************************************************************************
15 15
16 16 # Name of the package for release purposes. This is the name which labels
17 17 # the tarballs and RPMs made by distutils, so it's best to lowercase it.
18 18 name = 'ipython'
19 19
20 20 # For versions with substrings (like 0.6.16.svn), use an extra . to separate
21 21 # the new substring. We have to avoid using either dashes or underscores,
22 22 # because bdist_rpm does not accept dashes (an RPM) convention, and
23 23 # bdist_deb does not accept underscores (a Debian convention).
24 24
25 revision = '1016'
25 development = True # change this to False to do a release
26 version_base = '0.9.0'
26 27 branch = 'ipython'
28 revision = '1016'
27 29
28 if branch == 'ipython':
29 version = '0.9.0.bzr.r' + revision
30 if development:
31 if branch == 'ipython':
32 version = '%s.bzr.r%s' % (version_base, revision)
33 else:
34 version = '%s.bzr.r%s.%s' % (version_base, revision, branch)
30 35 else:
31 version = '0.9.0.bzr.r%s.%s' % (revision,branch)
36 version = version_base
32 37
33 # version = '0.8.4'
34 38
35 39 description = "Tools for interactive development in Python."
36 40
37 41 long_description = \
38 42 """
39 43 IPython provides a replacement for the interactive Python interpreter with
40 44 extra functionality.
41 45
42 46 Main features:
43 47
44 48 * Comprehensive object introspection.
45 49
46 50 * Input history, persistent across sessions.
47 51
48 52 * Caching of output results during a session with automatically generated
49 53 references.
50 54
51 55 * Readline based name completion.
52 56
53 57 * Extensible system of 'magic' commands for controlling the environment and
54 58 performing many tasks related either to IPython or the operating system.
55 59
56 60 * Configuration system with easy switching between different setups (simpler
57 61 than changing $PYTHONSTARTUP environment variables every time).
58 62
59 63 * Session logging and reloading.
60 64
61 65 * Extensible syntax processing for special purpose situations.
62 66
63 67 * Access to the system shell with user-extensible alias system.
64 68
65 69 * Easily embeddable in other Python programs.
66 70
67 71 * Integrated access to the pdb debugger and the Python profiler.
68 72
69 73 The latest development version is always available at the IPython subversion
70 74 repository_.
71 75
72 76 .. _repository: http://ipython.scipy.org/svn/ipython/ipython/trunk#egg=ipython-dev
73 77 """
74 78
75 79 license = 'BSD'
76 80
77 81 authors = {'Fernando' : ('Fernando Perez','fperez@colorado.edu'),
78 82 'Janko' : ('Janko Hauser','jhauser@zscout.de'),
79 83 'Nathan' : ('Nathaniel Gray','n8gray@caltech.edu'),
80 84 'Ville' : ('Ville Vainio','vivainio@gmail.com'),
81 85 'Brian' : ('Brian E Granger', 'ellisonbg@gmail.com'),
82 86 'Min' : ('Min Ragan-Kelley', 'benjaminrk@gmail.com')
83 87 }
84 88
85 89 author = 'The IPython Development Team'
86 90
87 91 author_email = 'ipython-dev@scipy.org'
88 92
89 93 url = 'http://ipython.scipy.org'
90 94
91 95 download_url = 'http://ipython.scipy.org/dist'
92 96
93 97 platforms = ['Linux','Mac OSX','Windows XP/2000/NT','Windows 95/98/ME']
94 98
95 99 keywords = ['Interactive','Interpreter','Shell','Parallel','Distributed']
@@ -1,323 +1,323 b''
1 1 #!/usr/bin/env python
2 2 # encoding: utf-8
3 3
4 4 """Start an IPython cluster conveniently, either locally or remotely.
5 5
6 6 Basic usage
7 7 -----------
8 8
9 9 For local operation, the simplest mode of usage is:
10 10
11 11 %prog -n N
12 12
13 13 where N is the number of engines you want started.
14 14
15 15 For remote operation, you must call it with a cluster description file:
16 16
17 17 %prog -f clusterfile.py
18 18
19 19 The cluster file is a normal Python script which gets run via execfile(). You
20 20 can have arbitrary logic in it, but all that matters is that at the end of the
21 21 execution, it declares the variables 'controller', 'engines', and optionally
22 22 'sshx'. See the accompanying examples for details on what these variables must
23 23 contain.
24 24
25 25
26 26 Notes
27 27 -----
28 28
29 29 WARNING: this code is still UNFINISHED and EXPERIMENTAL! It is incomplete,
30 30 some listed options are not really implemented, and all of its interfaces are
31 31 subject to change.
32 32
33 33 When operating over SSH for a remote cluster, this program relies on the
34 34 existence of a particular script called 'sshx'. This script must live in the
35 35 target systems where you'll be running your controller and engines, and is
36 36 needed to configure your PATH and PYTHONPATH variables for further execution of
37 37 python code at the other end of an SSH connection. The script can be as simple
38 38 as:
39 39
40 40 #!/bin/sh
41 41 . $HOME/.bashrc
42 42 "$@"
43 43
44 44 which is the default one provided by IPython. You can modify this or provide
45 45 your own. Since it's quite likely that for different clusters you may need
46 46 this script to configure things differently or that it may live in different
47 47 locations, its full path can be set in the same file where you define the
48 48 cluster setup. IPython's order of evaluation for this variable is the
49 49 following:
50 50
51 51 a) Internal default: 'sshx'. This only works if it is in the default system
52 52 path which SSH sets up in non-interactive mode.
53 53
54 54 b) Environment variable: if $IPYTHON_SSHX is defined, this overrides the
55 55 internal default.
56 56
57 57 c) Variable 'sshx' in the cluster configuration file: finally, this will
58 58 override the previous two values.
59 59
60 60 This code is Unix-only, with precious little hope of any of this ever working
61 61 under Windows, since we need SSH from the ground up, we background processes,
62 62 etc. Ports of this functionality to Windows are welcome.
63 63
64 64
65 65 Call summary
66 66 ------------
67 67
68 68 %prog [options]
69 69 """
70 70
71 71 __docformat__ = "restructuredtext en"
72 72
73 73 #-------------------------------------------------------------------------------
74 74 # Copyright (C) 2008 The IPython Development Team
75 75 #
76 76 # Distributed under the terms of the BSD License. The full license is in
77 77 # the file COPYING, distributed as part of this software.
78 78 #-------------------------------------------------------------------------------
79 79
80 80 #-------------------------------------------------------------------------------
81 81 # Stdlib imports
82 82 #-------------------------------------------------------------------------------
83 83
84 84 import os
85 85 import signal
86 86 import sys
87 87 import time
88 88
89 89 from optparse import OptionParser
90 90 from subprocess import Popen,call
91 91
92 92 #---------------------------------------------------------------------------
93 93 # IPython imports
94 94 #---------------------------------------------------------------------------
95 95 from IPython.tools import utils
96 96 from IPython.config import cutils
97 97
98 98 #---------------------------------------------------------------------------
99 99 # Normal code begins
100 100 #---------------------------------------------------------------------------
101 101
102 102 def parse_args():
103 103 """Parse command line and return opts,args."""
104 104
105 105 parser = OptionParser(usage=__doc__)
106 106 newopt = parser.add_option # shorthand
107 107
108 108 newopt("--controller-port", type="int", dest="controllerport",
109 109 help="the TCP port the controller is listening on")
110 110
111 111 newopt("--controller-ip", type="string", dest="controllerip",
112 112 help="the TCP ip address of the controller")
113 113
114 114 newopt("-n", "--num", type="int", dest="n",default=2,
115 115 help="the number of engines to start")
116 116
117 117 newopt("--engine-port", type="int", dest="engineport",
118 118 help="the TCP port the controller will listen on for engine "
119 119 "connections")
120 120
121 121 newopt("--engine-ip", type="string", dest="engineip",
122 122 help="the TCP ip address the controller will listen on "
123 123 "for engine connections")
124 124
125 125 newopt("--mpi", type="string", dest="mpi",
126 126 help="use mpi with package: for instance --mpi=mpi4py")
127 127
128 128 newopt("-l", "--logfile", type="string", dest="logfile",
129 129 help="log file name")
130 130
131 131 newopt('-f','--cluster-file',dest='clusterfile',
132 132 help='file describing a remote cluster')
133 133
134 134 return parser.parse_args()
135 135
136 136 def numAlive(controller,engines):
137 137 """Return the number of processes still alive."""
138 138 retcodes = [controller.poll()] + \
139 139 [e.poll() for e in engines]
140 140 return retcodes.count(None)
141 141
142 142 stop = lambda pid: os.kill(pid,signal.SIGINT)
143 143 kill = lambda pid: os.kill(pid,signal.SIGTERM)
144 144
145 145 def cleanup(clean,controller,engines):
146 146 """Stop the controller and engines with the given cleanup method."""
147 147
148 148 for e in engines:
149 149 if e.poll() is None:
150 150 print 'Stopping engine, pid',e.pid
151 151 clean(e.pid)
152 152 if controller.poll() is None:
153 153 print 'Stopping controller, pid',controller.pid
154 154 clean(controller.pid)
155 155
156 156
157 157 def ensureDir(path):
158 158 """Ensure a directory exists or raise an exception."""
159 159 if not os.path.isdir(path):
160 160 os.makedirs(path)
161 161
162 162
163 163 def startMsg(control_host,control_port=10105):
164 164 """Print a startup message"""
165 165 print
166 166 print 'Your cluster is up and running.'
167 167 print
168 168 print 'For interactive use, you can make a MultiEngineClient with:'
169 169 print
170 170 print 'from IPython.kernel import client'
171 print "mec = client.MultiEngineClient((%r,%s))" % \
172 (control_host,control_port)
171 print "mec = client.MultiEngineClient()"
173 172 print
174 173 print 'You can then cleanly stop the cluster from IPython using:'
175 174 print
176 175 print 'mec.kill(controller=True)'
177 176 print
178 177
179 178
180 179 def clusterLocal(opt,arg):
181 180 """Start a cluster on the local machine."""
182 181
183 182 # Store all logs inside the ipython directory
184 183 ipdir = cutils.get_ipython_dir()
185 184 pjoin = os.path.join
186 185
187 186 logfile = opt.logfile
188 187 if logfile is None:
189 188 logdir_base = pjoin(ipdir,'log')
190 189 ensureDir(logdir_base)
191 190 logfile = pjoin(logdir_base,'ipcluster-')
192 191
193 192 print 'Starting controller:',
194 193 controller = Popen(['ipcontroller','--logfile',logfile])
195 194 print 'Controller PID:',controller.pid
196 195
197 196 print 'Starting engines: ',
198 time.sleep(3)
197 time.sleep(5)
199 198
200 199 englogfile = '%s%s-' % (logfile,controller.pid)
201 200 mpi = opt.mpi
202 201 if mpi: # start with mpi - killing the engines with sigterm will not work if you do this
203 202 engines = [Popen(['mpirun', '-np', str(opt.n), 'ipengine', '--mpi', mpi, '--logfile',englogfile])]
203 # engines = [Popen(['mpirun', '-np', str(opt.n), 'ipengine', '--mpi', mpi])]
204 204 else: # do what we would normally do
205 205 engines = [ Popen(['ipengine','--logfile',englogfile])
206 206 for i in range(opt.n) ]
207 207 eids = [e.pid for e in engines]
208 208 print 'Engines PIDs: ',eids
209 209 print 'Log files: %s*' % englogfile
210 210
211 211 proc_ids = eids + [controller.pid]
212 212 procs = engines + [controller]
213 213
214 214 grpid = os.getpgrp()
215 215 try:
216 216 startMsg('127.0.0.1')
217 217 print 'You can also hit Ctrl-C to stop it, or use from the cmd line:'
218 218 print
219 219 print 'kill -INT',grpid
220 220 print
221 221 try:
222 222 while True:
223 223 time.sleep(5)
224 224 except:
225 225 pass
226 226 finally:
227 227 print 'Stopping cluster. Cleaning up...'
228 228 cleanup(stop,controller,engines)
229 229 for i in range(4):
230 230 time.sleep(i+2)
231 231 nZombies = numAlive(controller,engines)
232 232 if nZombies== 0:
233 233 print 'OK: All processes cleaned up.'
234 234 break
235 235 print 'Trying again, %d processes did not stop...' % nZombies
236 236 cleanup(kill,controller,engines)
237 237 if numAlive(controller,engines) == 0:
238 238 print 'OK: All processes cleaned up.'
239 239 break
240 240 else:
241 241 print '*'*75
242 242 print 'ERROR: could not kill some processes, try to do it',
243 243 print 'manually.'
244 244 zombies = []
245 245 if controller.returncode is None:
246 246 print 'Controller is alive: pid =',controller.pid
247 247 zombies.append(controller.pid)
248 248 liveEngines = [ e for e in engines if e.returncode is None ]
249 249 for e in liveEngines:
250 250 print 'Engine is alive: pid =',e.pid
251 251 zombies.append(e.pid)
252 252 print
253 253 print 'Zombie summary:',' '.join(map(str,zombies))
254 254
255 255 def clusterRemote(opt,arg):
256 256 """Start a remote cluster over SSH"""
257 257
258 258 # Load the remote cluster configuration
259 259 clConfig = {}
260 260 execfile(opt.clusterfile,clConfig)
261 261 contConfig = clConfig['controller']
262 262 engConfig = clConfig['engines']
263 263 # Determine where to find sshx:
264 264 sshx = clConfig.get('sshx',os.environ.get('IPYTHON_SSHX','sshx'))
265 265
266 266 # Store all logs inside the ipython directory
267 267 ipdir = cutils.get_ipython_dir()
268 268 pjoin = os.path.join
269 269
270 270 logfile = opt.logfile
271 271 if logfile is None:
272 272 logdir_base = pjoin(ipdir,'log')
273 273 ensureDir(logdir_base)
274 274 logfile = pjoin(logdir_base,'ipcluster')
275 275
276 276 # Append this script's PID to the logfile name always
277 277 logfile = '%s-%s' % (logfile,os.getpid())
278 278
279 279 print 'Starting controller:'
280 280 # Controller data:
281 281 xsys = os.system
282 282
283 283 contHost = contConfig['host']
284 284 contLog = '%s-con-%s-' % (logfile,contHost)
285 285 cmd = "ssh %s '%s' 'ipcontroller --logfile %s' &" % \
286 286 (contHost,sshx,contLog)
287 287 #print 'cmd:<%s>' % cmd # dbg
288 288 xsys(cmd)
289 289 time.sleep(2)
290 290
291 291 print 'Starting engines: '
292 292 for engineHost,engineData in engConfig.iteritems():
293 293 if isinstance(engineData,int):
294 294 numEngines = engineData
295 295 else:
296 296 raise NotImplementedError('port configuration not finished for engines')
297 297
298 298 print 'Sarting %d engines on %s' % (numEngines,engineHost)
299 299 engLog = '%s-eng-%s-' % (logfile,engineHost)
300 300 for i in range(numEngines):
301 301 cmd = "ssh %s '%s' 'ipengine --controller-ip %s --logfile %s' &" % \
302 302 (engineHost,sshx,contHost,engLog)
303 303 #print 'cmd:<%s>' % cmd # dbg
304 304 xsys(cmd)
305 305 # Wait after each host a little bit
306 306 time.sleep(1)
307 307
308 308 startMsg(contConfig['host'])
309 309
310 310 def main():
311 311 """Main driver for the two big options: local or remote cluster."""
312 312
313 313 opt,arg = parse_args()
314 314
315 315 clusterfile = opt.clusterfile
316 316 if clusterfile:
317 317 clusterRemote(opt,arg)
318 318 else:
319 319 clusterLocal(opt,arg)
320 320
321 321
322 322 if __name__=='__main__':
323 323 main()
@@ -1,169 +1,171 b''
1 1 #!/usr/bin/env python
2 2 # encoding: utf-8
3 3
4 4 """Start the IPython Engine."""
5 5
6 6 __docformat__ = "restructuredtext en"
7 7
8 8 #-------------------------------------------------------------------------------
9 9 # Copyright (C) 2008 The IPython Development Team
10 10 #
11 11 # Distributed under the terms of the BSD License. The full license is in
12 12 # the file COPYING, distributed as part of this software.
13 13 #-------------------------------------------------------------------------------
14 14
15 15 #-------------------------------------------------------------------------------
16 16 # Imports
17 17 #-------------------------------------------------------------------------------
18 18
19 19 # Python looks for an empty string at the beginning of sys.path to enable
20 20 # importing from the cwd.
21 21 import sys
22 22 sys.path.insert(0, '')
23 23
24 24 import sys, os
25 25 from optparse import OptionParser
26 26
27 27 from twisted.application import service
28 28 from twisted.internet import reactor
29 29 from twisted.python import log
30 30
31 31 from IPython.kernel.fcutil import Tub, UnauthenticatedTub
32 32
33 33 from IPython.kernel.core.config import config_manager as core_config_manager
34 34 from IPython.config.cutils import import_item
35 35 from IPython.kernel.engineservice import EngineService
36 36 from IPython.kernel.config import config_manager as kernel_config_manager
37 37 from IPython.kernel.engineconnector import EngineConnector
38 38
39 39
40 40 #-------------------------------------------------------------------------------
41 41 # Code
42 42 #-------------------------------------------------------------------------------
43 43
44 44 def start_engine():
45 45 """
46 46 Start the engine, by creating it and starting the Twisted reactor.
47 47
48 48 This method does:
49 49
50 50 * If it exists, runs the `mpi_import_statement` to call `MPI_Init`
51 51 * Starts the engine logging
52 52 * Creates an IPython shell and wraps it in an `EngineService`
53 53 * Creates a `foolscap.Tub` to use in connecting to a controller.
54 54 * Uses the tub and the `EngineService` along with a Foolscap URL
55 55 (or FURL) to connect to the controller and register the engine
56 56 with the controller
57 57 """
58 58 kernel_config = kernel_config_manager.get_config_obj()
59 59 core_config = core_config_manager.get_config_obj()
60 60
61
61 62 # Execute the mpi import statement that needs to call MPI_Init
63 global mpi
62 64 mpikey = kernel_config['mpi']['default']
63 65 mpi_import_statement = kernel_config['mpi'].get(mpikey, None)
64 66 if mpi_import_statement is not None:
65 67 try:
66 exec mpi_import_statement in locals(), globals()
68 exec mpi_import_statement in globals()
67 69 except:
68 70 mpi = None
69 71 else:
70 mpi = None
72 mpi = mpi_namespace.get('mpi')
71 73
72 74 # Start logging
73 75 logfile = kernel_config['engine']['logfile']
74 76 if logfile:
75 77 logfile = logfile + str(os.getpid()) + '.log'
76 78 try:
77 79 openLogFile = open(logfile, 'w')
78 80 except:
79 81 openLogFile = sys.stdout
80 82 else:
81 83 openLogFile = sys.stdout
82 84 log.startLogging(openLogFile)
83 85
84 86 # Create the underlying shell class and EngineService
85 87 shell_class = import_item(core_config['shell']['shell_class'])
86 88 engine_service = EngineService(shell_class, mpi=mpi)
87 89 shell_import_statement = core_config['shell']['import_statement']
88 90 if shell_import_statement:
89 91 try:
90 92 engine_service.execute(shell_import_statement)
91 93 except:
92 94 log.msg("Error running import_statement: %s" % sis)
93 95
94 96 # Create the service hierarchy
95 97 main_service = service.MultiService()
96 98 engine_service.setServiceParent(main_service)
97 99 tub_service = Tub()
98 100 tub_service.setServiceParent(main_service)
99 101 # This needs to be called before the connection is initiated
100 102 main_service.startService()
101 103
102 104 # This initiates the connection to the controller and calls
103 105 # register_engine to tell the controller we are ready to do work
104 106 engine_connector = EngineConnector(tub_service)
105 107 furl_file = kernel_config['engine']['furl_file']
106 108 d = engine_connector.connect_to_controller(engine_service, furl_file)
107 109 d.addErrback(lambda _: reactor.stop())
108 110
109 111 reactor.run()
110 112
111 113
112 114 def init_config():
113 115 """
114 116 Initialize the configuration using default and command line options.
115 117 """
116 118
117 119 parser = OptionParser()
118 120
119 121 parser.add_option(
120 122 "--furl-file",
121 123 type="string",
122 124 dest="furl_file",
123 125 help="The filename containing the FURL of the controller"
124 126 )
125 127 parser.add_option(
126 128 "--mpi",
127 129 type="string",
128 130 dest="mpi",
129 131 help="How to enable MPI (mpi4py, pytrilinos, or empty string to disable)"
130 132 )
131 133 parser.add_option(
132 134 "-l",
133 135 "--logfile",
134 136 type="string",
135 137 dest="logfile",
136 138 help="log file name (default is stdout)"
137 139 )
138 140 parser.add_option(
139 141 "--ipythondir",
140 142 type="string",
141 143 dest="ipythondir",
142 144 help="look for config files and profiles in this directory"
143 145 )
144 146
145 147 (options, args) = parser.parse_args()
146 148
147 149 kernel_config_manager.update_config_obj_from_default_file(options.ipythondir)
148 150 core_config_manager.update_config_obj_from_default_file(options.ipythondir)
149 151
150 152 kernel_config = kernel_config_manager.get_config_obj()
151 153 # Now override with command line options
152 154 if options.furl_file is not None:
153 155 kernel_config['engine']['furl_file'] = options.furl_file
154 156 if options.logfile is not None:
155 157 kernel_config['engine']['logfile'] = options.logfile
156 158 if options.mpi is not None:
157 159 kernel_config['mpi']['default'] = options.mpi
158 160
159 161
160 162 def main():
161 163 """
162 164 After creating the configuration information, start the engine.
163 165 """
164 166 init_config()
165 167 start_engine()
166 168
167 169
168 170 if __name__ == "__main__":
169 171 main() No newline at end of file
@@ -1,162 +1,164 b''
1 1 .. _changes:
2 2
3 3 ==========
4 4 What's new
5 5 ==========
6 6
7 7 .. contents::
8 8
9 9 Release 0.9
10 10 ===========
11 11
12 12 New features
13 13 ------------
14 14
15 15 * All of the parallel computing capabilities from `ipython1-dev` have been merged into
16 16 IPython proper. This resulted in the following new subpackages:
17 17 :mod:`IPython.kernel`, :mod:`IPython.kernel.core`, :mod:`IPython.config`,
18 18 :mod:`IPython.tools` and :mod:`IPython.testing`.
19 19 * As part of merging in the `ipython1-dev` stuff, the `setup.py` script and friends
20 20 have been completely refactored. Now we are checking for dependencies using
21 21 the approach that matplotlib uses.
22 22 * The documentation has been completely reorganized to accept the documentation
23 23 from `ipython1-dev`.
24 24 * We have switched to using Foolscap for all of our network protocols in
25 25 :mod:`IPython.kernel`. This gives us secure connections that are both encrypted
26 26 and authenticated.
27 27 * We have a brand new `COPYING.txt` files that describes the IPython license
28 28 and copyright. The biggest change is that we are putting "The IPython
29 29 Development Team" as the copyright holder. We give more details about exactly
30 30 what this means in this file. All developer should read this and use the new
31 31 banner in all IPython source code files.
32 * sh profile: ./foo runs foo as system command, no need to do !./foo anymore
32 * sh profile: ./foo runs foo as system command, no need to do !./foo anymore
33 33
34 34 Bug fixes
35 35 ---------
36 36
37 * The :mod:`IPython.kernel.scripts.ipengine` script was exec'ing mpi_import_statement
38 incorrectly, which was leading the engine to crash when mpi was enabled.
37 39 * A few subpackages has missing `__init__.py` files.
38 40 * The documentation is only created is Sphinx is found. Previously, the `setup.py`
39 41 script would fail if it was missing.
40 42
41 43 Backwards incompatible changes
42 44 ------------------------------
43 45
44 46 * IPython has a larger set of dependencies if you want all of its capabilities.
45 47 See the `setup.py` script for details.
46 48 * The constructors for :class:`IPython.kernel.client.MultiEngineClient` and
47 49 :class:`IPython.kernel.client.TaskClient` no longer take the (ip,port) tuple.
48 50 Instead they take the filename of a file that contains the FURL for that
49 51 client. If the FURL file is in your IPYTHONDIR, it will be found automatically
50 52 and the constructor can be left empty.
51 53 * The asynchronous clients in :mod:`IPython.kernel.asyncclient` are now created
52 54 using the factory functions :func:`get_multiengine_client` and
53 55 :func:`get_task_client`. These return a `Deferred` to the actual client.
54 56 * The command line options to `ipcontroller` and `ipengine` have changed to
55 57 reflect the new Foolscap network protocol and the FURL files. Please see the
56 58 help for these scripts for details.
57 59 * The configuration files for the kernel have changed because of the Foolscap stuff.
58 60 If you were using custom config files before, you should delete them and regenerate
59 61 new ones.
60 62
61 63 Changes merged in from IPython1
62 64 -------------------------------
63 65
64 66 New features
65 67 ............
66 68
67 69 * Much improved ``setup.py`` and ``setupegg.py`` scripts. Because Twisted
68 70 and zope.interface are now easy installable, we can declare them as dependencies
69 71 in our setupegg.py script.
70 72 * IPython is now compatible with Twisted 2.5.0 and 8.x.
71 73 * Added a new example of how to use :mod:`ipython1.kernel.asynclient`.
72 74 * Initial draft of a process daemon in :mod:`ipython1.daemon`. This has not
73 75 been merged into IPython and is still in `ipython1-dev`.
74 76 * The ``TaskController`` now has methods for getting the queue status.
75 77 * The ``TaskResult`` objects not have information about how long the task
76 78 took to run.
77 79 * We are attaching additional attributes to exceptions ``(_ipython_*)`` that
78 80 we use to carry additional info around.
79 81 * New top-level module :mod:`asyncclient` that has asynchronous versions (that
80 82 return deferreds) of the client classes. This is designed to users who want
81 83 to run their own Twisted reactor
82 84 * All the clients in :mod:`client` are now based on Twisted. This is done by
83 85 running the Twisted reactor in a separate thread and using the
84 86 :func:`blockingCallFromThread` function that is in recent versions of Twisted.
85 87 * Functions can now be pushed/pulled to/from engines using
86 88 :meth:`MultiEngineClient.push_function` and :meth:`MultiEngineClient.pull_function`.
87 89 * Gather/scatter are now implemented in the client to reduce the work load
88 90 of the controller and improve performance.
89 91 * Complete rewrite of the IPython docuementation. All of the documentation
90 92 from the IPython website has been moved into docs/source as restructured
91 93 text documents. PDF and HTML documentation are being generated using
92 94 Sphinx.
93 95 * New developer oriented documentation: development guidelines and roadmap.
94 96 * Traditional ``ChangeLog`` has been changed to a more useful ``changes.txt`` file
95 97 that is organized by release and is meant to provide something more relevant
96 98 for users.
97 99
98 100 Bug fixes
99 101 .........
100 102
101 103 * Created a proper ``MANIFEST.in`` file to create source distributions.
102 104 * Fixed a bug in the ``MultiEngine`` interface. Previously, multi-engine
103 105 actions were being collected with a :class:`DeferredList` with
104 106 ``fireononeerrback=1``. This meant that methods were returning
105 107 before all engines had given their results. This was causing extremely odd
106 108 bugs in certain cases. To fix this problem, we have 1) set
107 109 ``fireononeerrback=0`` to make sure all results (or exceptions) are in
108 110 before returning and 2) introduced a :exc:`CompositeError` exception
109 111 that wraps all of the engine exceptions. This is a huge change as it means
110 112 that users will have to catch :exc:`CompositeError` rather than the actual
111 113 exception.
112 114
113 115 Backwards incompatible changes
114 116 ..............................
115 117
116 118 * All names have been renamed to conform to the lowercase_with_underscore
117 119 convention. This will require users to change references to all names like
118 120 ``queueStatus`` to ``queue_status``.
119 121 * Previously, methods like :meth:`MultiEngineClient.push` and
120 122 :meth:`MultiEngineClient.push` used ``*args`` and ``**kwargs``. This was
121 123 becoming a problem as we weren't able to introduce new keyword arguments into
122 124 the API. Now these methods simple take a dict or sequence. This has also allowed
123 125 us to get rid of the ``*All`` methods like :meth:`pushAll` and :meth:`pullAll`.
124 126 These things are now handled with the ``targets`` keyword argument that defaults
125 127 to ``'all'``.
126 128 * The :attr:`MultiEngineClient.magicTargets` has been renamed to
127 129 :attr:`MultiEngineClient.targets`.
128 130 * All methods in the MultiEngine interface now accept the optional keyword argument
129 131 ``block``.
130 132 * Renamed :class:`RemoteController` to :class:`MultiEngineClient` and
131 133 :class:`TaskController` to :class:`TaskClient`.
132 134 * Renamed the top-level module from :mod:`api` to :mod:`client`.
133 135 * Most methods in the multiengine interface now raise a :exc:`CompositeError` exception
134 136 that wraps the user's exceptions, rather than just raising the raw user's exception.
135 137 * Changed the ``setupNS`` and ``resultNames`` in the ``Task`` class to ``push``
136 138 and ``pull``.
137 139
138 140 Release 0.8.4
139 141 =============
140 142
141 143 Someone needs to describe what went into 0.8.4.
142 144
143 145 Release 0.8.2
144 146 =============
145 147
146 148 * %pushd/%popd behave differently; now "pushd /foo" pushes CURRENT directory
147 149 and jumps to /foo. The current behaviour is closer to the documented
148 150 behaviour, and should not trip anyone.
149 151
150 152 Release 0.8.3
151 153 =============
152 154
153 155 * pydb is now disabled by default (due to %run -d problems). You can enable
154 156 it by passing -pydb command line argument to IPython. Note that setting
155 157 it in config file won't work.
156 158
157 159 Older releases
158 160 ==============
159 161
160 162 Changes in earlier releases of IPython are described in the older file ``ChangeLog``.
161 163 Please refer to this document for details.
162 164
General Comments 0
You need to be logged in to leave comments. Login now