##// END OF EJS Templates
merge from trunk
Barry Wark -
r1441:d4c5ae87 merge
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,233 b''
1 # encoding: utf-8
2
3 """A parallelized version of Python's builtin map."""
4
5 __docformat__ = "restructuredtext en"
6
7 #----------------------------------------------------------------------------
8 # Copyright (C) 2008 The IPython Development Team
9 #
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
12 #----------------------------------------------------------------------------
13
14 #----------------------------------------------------------------------------
15 # Imports
16 #----------------------------------------------------------------------------
17
18 from types import FunctionType
19 from zope.interface import Interface, implements
20 from IPython.kernel.task import MapTask
21 from IPython.kernel.twistedutil import DeferredList, gatherBoth
22 from IPython.kernel.util import printer
23 from IPython.kernel.error import collect_exceptions
24
25 #----------------------------------------------------------------------------
26 # Code
27 #----------------------------------------------------------------------------
28
29 class IMapper(Interface):
30 """The basic interface for a Mapper.
31
32 This defines a generic interface for mapping. The idea of this is
33 similar to that of Python's builtin `map` function, which applies a function
34 elementwise to a sequence.
35 """
36
37 def map(func, *seqs):
38 """Do map in parallel.
39
40 Equivalent to map(func, *seqs) or:
41
42 [func(seqs[0][0], seqs[1][0],...), func(seqs[0][1], seqs[1][1],...),...]
43
44 :Parameters:
45 func : FunctionType
46 The function to apply to the sequence
47 sequences : tuple of iterables
48 A sequence of iterables that are used for sucessive function
49 arguments. This work just like map
50 """
51
52 class IMultiEngineMapperFactory(Interface):
53 """
54 An interface for something that creates `IMapper` instances.
55 """
56
57 def mapper(dist='b', targets='all', block=True):
58 """
59 Create an `IMapper` implementer with a given set of arguments.
60
61 The `IMapper` created using a multiengine controller is
62 not load balanced.
63 """
64
65 class ITaskMapperFactory(Interface):
66 """
67 An interface for something that creates `IMapper` instances.
68 """
69
70 def mapper(clear_before=False, clear_after=False, retries=0,
71 recovery_task=None, depend=None, block=True):
72 """
73 Create an `IMapper` implementer with a given set of arguments.
74
75 The `IMapper` created using a task controller is load balanced.
76
77 See the documentation for `IPython.kernel.task.BaseTask` for
78 documentation on the arguments to this method.
79 """
80
81
82 class MultiEngineMapper(object):
83 """
84 A Mapper for `IMultiEngine` implementers.
85 """
86
87 implements(IMapper)
88
89 def __init__(self, multiengine, dist='b', targets='all', block=True):
90 """
91 Create a Mapper for a multiengine.
92
93 The value of all arguments are used for all calls to `map`. This
94 class allows these arguemnts to be set for a series of map calls.
95
96 :Parameters:
97 multiengine : `IMultiEngine` implementer
98 The multiengine to use for running the map commands
99 dist : str
100 The type of decomposition to use. Only block ('b') is
101 supported currently
102 targets : (str, int, tuple of ints)
103 The engines to use in the map
104 block : boolean
105 Whether to block when the map is applied
106 """
107 self.multiengine = multiengine
108 self.dist = dist
109 self.targets = targets
110 self.block = block
111
112 def map(self, func, *sequences):
113 """
114 Apply func to *sequences elementwise. Like Python's builtin map.
115
116 This version is not load balanced.
117 """
118 max_len = max(len(s) for s in sequences)
119 for s in sequences:
120 if len(s)!=max_len:
121 raise ValueError('all sequences must have equal length')
122 assert isinstance(func, (str, FunctionType)), "func must be a fuction or str"
123 return self.multiengine.raw_map(func, sequences, dist=self.dist,
124 targets=self.targets, block=self.block)
125
126 class TaskMapper(object):
127 """
128 Make an `ITaskController` look like an `IMapper`.
129
130 This class provides a load balanced version of `map`.
131 """
132
133 def __init__(self, task_controller, clear_before=False, clear_after=False, retries=0,
134 recovery_task=None, depend=None, block=True):
135 """
136 Create a `IMapper` given a `TaskController` and arguments.
137
138 The additional arguments are those that are common to all types of
139 tasks and are described in the documentation for
140 `IPython.kernel.task.BaseTask`.
141
142 :Parameters:
143 task_controller : an `IBlockingTaskClient` implementer
144 The `TaskController` to use for calls to `map`
145 """
146 self.task_controller = task_controller
147 self.clear_before = clear_before
148 self.clear_after = clear_after
149 self.retries = retries
150 self.recovery_task = recovery_task
151 self.depend = depend
152 self.block = block
153
154 def map(self, func, *sequences):
155 """
156 Apply func to *sequences elementwise. Like Python's builtin map.
157
158 This version is load balanced.
159 """
160 max_len = max(len(s) for s in sequences)
161 for s in sequences:
162 if len(s)!=max_len:
163 raise ValueError('all sequences must have equal length')
164 task_args = zip(*sequences)
165 task_ids = []
166 dlist = []
167 for ta in task_args:
168 task = MapTask(func, ta, clear_before=self.clear_before,
169 clear_after=self.clear_after, retries=self.retries,
170 recovery_task=self.recovery_task, depend=self.depend)
171 dlist.append(self.task_controller.run(task))
172 dlist = gatherBoth(dlist, consumeErrors=1)
173 dlist.addCallback(collect_exceptions,'map')
174 if self.block:
175 def get_results(task_ids):
176 d = self.task_controller.barrier(task_ids)
177 d.addCallback(lambda _: gatherBoth([self.task_controller.get_task_result(tid) for tid in task_ids], consumeErrors=1))
178 d.addCallback(collect_exceptions, 'map')
179 return d
180 dlist.addCallback(get_results)
181 return dlist
182
183 class SynchronousTaskMapper(object):
184 """
185 Make an `IBlockingTaskClient` look like an `IMapper`.
186
187 This class provides a load balanced version of `map`.
188 """
189
190 def __init__(self, task_controller, clear_before=False, clear_after=False, retries=0,
191 recovery_task=None, depend=None, block=True):
192 """
193 Create a `IMapper` given a `IBlockingTaskClient` and arguments.
194
195 The additional arguments are those that are common to all types of
196 tasks and are described in the documentation for
197 `IPython.kernel.task.BaseTask`.
198
199 :Parameters:
200 task_controller : an `IBlockingTaskClient` implementer
201 The `TaskController` to use for calls to `map`
202 """
203 self.task_controller = task_controller
204 self.clear_before = clear_before
205 self.clear_after = clear_after
206 self.retries = retries
207 self.recovery_task = recovery_task
208 self.depend = depend
209 self.block = block
210
211 def map(self, func, *sequences):
212 """
213 Apply func to *sequences elementwise. Like Python's builtin map.
214
215 This version is load balanced.
216 """
217 max_len = max(len(s) for s in sequences)
218 for s in sequences:
219 if len(s)!=max_len:
220 raise ValueError('all sequences must have equal length')
221 task_args = zip(*sequences)
222 task_ids = []
223 for ta in task_args:
224 task = MapTask(func, ta, clear_before=self.clear_before,
225 clear_after=self.clear_after, retries=self.retries,
226 recovery_task=self.recovery_task, depend=self.depend)
227 task_ids.append(self.task_controller.run(task))
228 if self.block:
229 self.task_controller.barrier(task_ids)
230 task_results = [self.task_controller.get_task_result(tid) for tid in task_ids]
231 return task_results
232 else:
233 return task_ids No newline at end of file
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100755
NO CONTENT: new file 100755
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100755
NO CONTENT: new file 100755
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644, binary diff hidden
NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,3319 +1,3318 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """Magic functions for InteractiveShell.
2 """Magic functions for InteractiveShell.
3
3
4 $Id: Magic.py 2996 2008-01-30 06:31:39Z fperez $"""
4 $Id: Magic.py 2996 2008-01-30 06:31:39Z fperez $"""
5
5
6 #*****************************************************************************
6 #*****************************************************************************
7 # Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> and
7 # Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> and
8 # Copyright (C) 2001-2006 Fernando Perez <fperez@colorado.edu>
8 # Copyright (C) 2001-2006 Fernando Perez <fperez@colorado.edu>
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #*****************************************************************************
12 #*****************************************************************************
13
13
14 #****************************************************************************
14 #****************************************************************************
15 # Modules and globals
15 # Modules and globals
16
16
17 from IPython import Release
17 from IPython import Release
18 __author__ = '%s <%s>\n%s <%s>' % \
18 __author__ = '%s <%s>\n%s <%s>' % \
19 ( Release.authors['Janko'] + Release.authors['Fernando'] )
19 ( Release.authors['Janko'] + Release.authors['Fernando'] )
20 __license__ = Release.license
20 __license__ = Release.license
21
21
22 # Python standard modules
22 # Python standard modules
23 import __builtin__
23 import __builtin__
24 import bdb
24 import bdb
25 import inspect
25 import inspect
26 import os
26 import os
27 import pdb
27 import pdb
28 import pydoc
28 import pydoc
29 import sys
29 import sys
30 import re
30 import re
31 import tempfile
31 import tempfile
32 import time
32 import time
33 import cPickle as pickle
33 import cPickle as pickle
34 import textwrap
34 import textwrap
35 from cStringIO import StringIO
35 from cStringIO import StringIO
36 from getopt import getopt,GetoptError
36 from getopt import getopt,GetoptError
37 from pprint import pprint, pformat
37 from pprint import pprint, pformat
38 from sets import Set
38 from sets import Set
39
39
40 # cProfile was added in Python2.5
40 # cProfile was added in Python2.5
41 try:
41 try:
42 import cProfile as profile
42 import cProfile as profile
43 import pstats
43 import pstats
44 except ImportError:
44 except ImportError:
45 # profile isn't bundled by default in Debian for license reasons
45 # profile isn't bundled by default in Debian for license reasons
46 try:
46 try:
47 import profile,pstats
47 import profile,pstats
48 except ImportError:
48 except ImportError:
49 profile = pstats = None
49 profile = pstats = None
50
50
51 # Homebrewed
51 # Homebrewed
52 import IPython
52 import IPython
53 from IPython import Debugger, OInspect, wildcard
53 from IPython import Debugger, OInspect, wildcard
54 from IPython.FakeModule import FakeModule
54 from IPython.FakeModule import FakeModule
55 from IPython.Itpl import Itpl, itpl, printpl,itplns
55 from IPython.Itpl import Itpl, itpl, printpl,itplns
56 from IPython.PyColorize import Parser
56 from IPython.PyColorize import Parser
57 from IPython.ipstruct import Struct
57 from IPython.ipstruct import Struct
58 from IPython.macro import Macro
58 from IPython.macro import Macro
59 from IPython.genutils import *
59 from IPython.genutils import *
60 from IPython import platutils
60 from IPython import platutils
61 import IPython.generics
61 import IPython.generics
62 import IPython.ipapi
62 import IPython.ipapi
63 from IPython.ipapi import UsageError
63 from IPython.ipapi import UsageError
64 #***************************************************************************
64 #***************************************************************************
65 # Utility functions
65 # Utility functions
66 def on_off(tag):
66 def on_off(tag):
67 """Return an ON/OFF string for a 1/0 input. Simple utility function."""
67 """Return an ON/OFF string for a 1/0 input. Simple utility function."""
68 return ['OFF','ON'][tag]
68 return ['OFF','ON'][tag]
69
69
70 class Bunch: pass
70 class Bunch: pass
71
71
72 def compress_dhist(dh):
72 def compress_dhist(dh):
73 head, tail = dh[:-10], dh[-10:]
73 head, tail = dh[:-10], dh[-10:]
74
74
75 newhead = []
75 newhead = []
76 done = Set()
76 done = Set()
77 for h in head:
77 for h in head:
78 if h in done:
78 if h in done:
79 continue
79 continue
80 newhead.append(h)
80 newhead.append(h)
81 done.add(h)
81 done.add(h)
82
82
83 return newhead + tail
83 return newhead + tail
84
84
85
85
86 #***************************************************************************
86 #***************************************************************************
87 # Main class implementing Magic functionality
87 # Main class implementing Magic functionality
88 class Magic:
88 class Magic:
89 """Magic functions for InteractiveShell.
89 """Magic functions for InteractiveShell.
90
90
91 Shell functions which can be reached as %function_name. All magic
91 Shell functions which can be reached as %function_name. All magic
92 functions should accept a string, which they can parse for their own
92 functions should accept a string, which they can parse for their own
93 needs. This can make some functions easier to type, eg `%cd ../`
93 needs. This can make some functions easier to type, eg `%cd ../`
94 vs. `%cd("../")`
94 vs. `%cd("../")`
95
95
96 ALL definitions MUST begin with the prefix magic_. The user won't need it
96 ALL definitions MUST begin with the prefix magic_. The user won't need it
97 at the command line, but it is is needed in the definition. """
97 at the command line, but it is is needed in the definition. """
98
98
99 # class globals
99 # class globals
100 auto_status = ['Automagic is OFF, % prefix IS needed for magic functions.',
100 auto_status = ['Automagic is OFF, % prefix IS needed for magic functions.',
101 'Automagic is ON, % prefix NOT needed for magic functions.']
101 'Automagic is ON, % prefix NOT needed for magic functions.']
102
102
103 #......................................................................
103 #......................................................................
104 # some utility functions
104 # some utility functions
105
105
106 def __init__(self,shell):
106 def __init__(self,shell):
107
107
108 self.options_table = {}
108 self.options_table = {}
109 if profile is None:
109 if profile is None:
110 self.magic_prun = self.profile_missing_notice
110 self.magic_prun = self.profile_missing_notice
111 self.shell = shell
111 self.shell = shell
112
112
113 # namespace for holding state we may need
113 # namespace for holding state we may need
114 self._magic_state = Bunch()
114 self._magic_state = Bunch()
115
115
116 def profile_missing_notice(self, *args, **kwargs):
116 def profile_missing_notice(self, *args, **kwargs):
117 error("""\
117 error("""\
118 The profile module could not be found. It has been removed from the standard
118 The profile module could not be found. It has been removed from the standard
119 python packages because of its non-free license. To use profiling, install the
119 python packages because of its non-free license. To use profiling, install the
120 python-profiler package from non-free.""")
120 python-profiler package from non-free.""")
121
121
122 def default_option(self,fn,optstr):
122 def default_option(self,fn,optstr):
123 """Make an entry in the options_table for fn, with value optstr"""
123 """Make an entry in the options_table for fn, with value optstr"""
124
124
125 if fn not in self.lsmagic():
125 if fn not in self.lsmagic():
126 error("%s is not a magic function" % fn)
126 error("%s is not a magic function" % fn)
127 self.options_table[fn] = optstr
127 self.options_table[fn] = optstr
128
128
129 def lsmagic(self):
129 def lsmagic(self):
130 """Return a list of currently available magic functions.
130 """Return a list of currently available magic functions.
131
131
132 Gives a list of the bare names after mangling (['ls','cd', ...], not
132 Gives a list of the bare names after mangling (['ls','cd', ...], not
133 ['magic_ls','magic_cd',...]"""
133 ['magic_ls','magic_cd',...]"""
134
134
135 # FIXME. This needs a cleanup, in the way the magics list is built.
135 # FIXME. This needs a cleanup, in the way the magics list is built.
136
136
137 # magics in class definition
137 # magics in class definition
138 class_magic = lambda fn: fn.startswith('magic_') and \
138 class_magic = lambda fn: fn.startswith('magic_') and \
139 callable(Magic.__dict__[fn])
139 callable(Magic.__dict__[fn])
140 # in instance namespace (run-time user additions)
140 # in instance namespace (run-time user additions)
141 inst_magic = lambda fn: fn.startswith('magic_') and \
141 inst_magic = lambda fn: fn.startswith('magic_') and \
142 callable(self.__dict__[fn])
142 callable(self.__dict__[fn])
143 # and bound magics by user (so they can access self):
143 # and bound magics by user (so they can access self):
144 inst_bound_magic = lambda fn: fn.startswith('magic_') and \
144 inst_bound_magic = lambda fn: fn.startswith('magic_') and \
145 callable(self.__class__.__dict__[fn])
145 callable(self.__class__.__dict__[fn])
146 magics = filter(class_magic,Magic.__dict__.keys()) + \
146 magics = filter(class_magic,Magic.__dict__.keys()) + \
147 filter(inst_magic,self.__dict__.keys()) + \
147 filter(inst_magic,self.__dict__.keys()) + \
148 filter(inst_bound_magic,self.__class__.__dict__.keys())
148 filter(inst_bound_magic,self.__class__.__dict__.keys())
149 out = []
149 out = []
150 for fn in Set(magics):
150 for fn in Set(magics):
151 out.append(fn.replace('magic_','',1))
151 out.append(fn.replace('magic_','',1))
152 out.sort()
152 out.sort()
153 return out
153 return out
154
154
155 def extract_input_slices(self,slices,raw=False):
155 def extract_input_slices(self,slices,raw=False):
156 """Return as a string a set of input history slices.
156 """Return as a string a set of input history slices.
157
157
158 Inputs:
158 Inputs:
159
159
160 - slices: the set of slices is given as a list of strings (like
160 - slices: the set of slices is given as a list of strings (like
161 ['1','4:8','9'], since this function is for use by magic functions
161 ['1','4:8','9'], since this function is for use by magic functions
162 which get their arguments as strings.
162 which get their arguments as strings.
163
163
164 Optional inputs:
164 Optional inputs:
165
165
166 - raw(False): by default, the processed input is used. If this is
166 - raw(False): by default, the processed input is used. If this is
167 true, the raw input history is used instead.
167 true, the raw input history is used instead.
168
168
169 Note that slices can be called with two notations:
169 Note that slices can be called with two notations:
170
170
171 N:M -> standard python form, means including items N...(M-1).
171 N:M -> standard python form, means including items N...(M-1).
172
172
173 N-M -> include items N..M (closed endpoint)."""
173 N-M -> include items N..M (closed endpoint)."""
174
174
175 if raw:
175 if raw:
176 hist = self.shell.input_hist_raw
176 hist = self.shell.input_hist_raw
177 else:
177 else:
178 hist = self.shell.input_hist
178 hist = self.shell.input_hist
179
179
180 cmds = []
180 cmds = []
181 for chunk in slices:
181 for chunk in slices:
182 if ':' in chunk:
182 if ':' in chunk:
183 ini,fin = map(int,chunk.split(':'))
183 ini,fin = map(int,chunk.split(':'))
184 elif '-' in chunk:
184 elif '-' in chunk:
185 ini,fin = map(int,chunk.split('-'))
185 ini,fin = map(int,chunk.split('-'))
186 fin += 1
186 fin += 1
187 else:
187 else:
188 ini = int(chunk)
188 ini = int(chunk)
189 fin = ini+1
189 fin = ini+1
190 cmds.append(hist[ini:fin])
190 cmds.append(hist[ini:fin])
191 return cmds
191 return cmds
192
192
193 def _ofind(self, oname, namespaces=None):
193 def _ofind(self, oname, namespaces=None):
194 """Find an object in the available namespaces.
194 """Find an object in the available namespaces.
195
195
196 self._ofind(oname) -> dict with keys: found,obj,ospace,ismagic
196 self._ofind(oname) -> dict with keys: found,obj,ospace,ismagic
197
197
198 Has special code to detect magic functions.
198 Has special code to detect magic functions.
199 """
199 """
200
200
201 oname = oname.strip()
201 oname = oname.strip()
202
202
203 alias_ns = None
203 alias_ns = None
204 if namespaces is None:
204 if namespaces is None:
205 # Namespaces to search in:
205 # Namespaces to search in:
206 # Put them in a list. The order is important so that we
206 # Put them in a list. The order is important so that we
207 # find things in the same order that Python finds them.
207 # find things in the same order that Python finds them.
208 namespaces = [ ('Interactive', self.shell.user_ns),
208 namespaces = [ ('Interactive', self.shell.user_ns),
209 ('IPython internal', self.shell.internal_ns),
209 ('IPython internal', self.shell.internal_ns),
210 ('Python builtin', __builtin__.__dict__),
210 ('Python builtin', __builtin__.__dict__),
211 ('Alias', self.shell.alias_table),
211 ('Alias', self.shell.alias_table),
212 ]
212 ]
213 alias_ns = self.shell.alias_table
213 alias_ns = self.shell.alias_table
214
214
215 # initialize results to 'null'
215 # initialize results to 'null'
216 found = 0; obj = None; ospace = None; ds = None;
216 found = 0; obj = None; ospace = None; ds = None;
217 ismagic = 0; isalias = 0; parent = None
217 ismagic = 0; isalias = 0; parent = None
218
218
219 # Look for the given name by splitting it in parts. If the head is
219 # Look for the given name by splitting it in parts. If the head is
220 # found, then we look for all the remaining parts as members, and only
220 # found, then we look for all the remaining parts as members, and only
221 # declare success if we can find them all.
221 # declare success if we can find them all.
222 oname_parts = oname.split('.')
222 oname_parts = oname.split('.')
223 oname_head, oname_rest = oname_parts[0],oname_parts[1:]
223 oname_head, oname_rest = oname_parts[0],oname_parts[1:]
224 for nsname,ns in namespaces:
224 for nsname,ns in namespaces:
225 try:
225 try:
226 obj = ns[oname_head]
226 obj = ns[oname_head]
227 except KeyError:
227 except KeyError:
228 continue
228 continue
229 else:
229 else:
230 #print 'oname_rest:', oname_rest # dbg
230 #print 'oname_rest:', oname_rest # dbg
231 for part in oname_rest:
231 for part in oname_rest:
232 try:
232 try:
233 parent = obj
233 parent = obj
234 obj = getattr(obj,part)
234 obj = getattr(obj,part)
235 except:
235 except:
236 # Blanket except b/c some badly implemented objects
236 # Blanket except b/c some badly implemented objects
237 # allow __getattr__ to raise exceptions other than
237 # allow __getattr__ to raise exceptions other than
238 # AttributeError, which then crashes IPython.
238 # AttributeError, which then crashes IPython.
239 break
239 break
240 else:
240 else:
241 # If we finish the for loop (no break), we got all members
241 # If we finish the for loop (no break), we got all members
242 found = 1
242 found = 1
243 ospace = nsname
243 ospace = nsname
244 if ns == alias_ns:
244 if ns == alias_ns:
245 isalias = 1
245 isalias = 1
246 break # namespace loop
246 break # namespace loop
247
247
248 # Try to see if it's magic
248 # Try to see if it's magic
249 if not found:
249 if not found:
250 if oname.startswith(self.shell.ESC_MAGIC):
250 if oname.startswith(self.shell.ESC_MAGIC):
251 oname = oname[1:]
251 oname = oname[1:]
252 obj = getattr(self,'magic_'+oname,None)
252 obj = getattr(self,'magic_'+oname,None)
253 if obj is not None:
253 if obj is not None:
254 found = 1
254 found = 1
255 ospace = 'IPython internal'
255 ospace = 'IPython internal'
256 ismagic = 1
256 ismagic = 1
257
257
258 # Last try: special-case some literals like '', [], {}, etc:
258 # Last try: special-case some literals like '', [], {}, etc:
259 if not found and oname_head in ["''",'""','[]','{}','()']:
259 if not found and oname_head in ["''",'""','[]','{}','()']:
260 obj = eval(oname_head)
260 obj = eval(oname_head)
261 found = 1
261 found = 1
262 ospace = 'Interactive'
262 ospace = 'Interactive'
263
263
264 return {'found':found, 'obj':obj, 'namespace':ospace,
264 return {'found':found, 'obj':obj, 'namespace':ospace,
265 'ismagic':ismagic, 'isalias':isalias, 'parent':parent}
265 'ismagic':ismagic, 'isalias':isalias, 'parent':parent}
266
266
267 def arg_err(self,func):
267 def arg_err(self,func):
268 """Print docstring if incorrect arguments were passed"""
268 """Print docstring if incorrect arguments were passed"""
269 print 'Error in arguments:'
269 print 'Error in arguments:'
270 print OInspect.getdoc(func)
270 print OInspect.getdoc(func)
271
271
272 def format_latex(self,strng):
272 def format_latex(self,strng):
273 """Format a string for latex inclusion."""
273 """Format a string for latex inclusion."""
274
274
275 # Characters that need to be escaped for latex:
275 # Characters that need to be escaped for latex:
276 escape_re = re.compile(r'(%|_|\$|#|&)',re.MULTILINE)
276 escape_re = re.compile(r'(%|_|\$|#|&)',re.MULTILINE)
277 # Magic command names as headers:
277 # Magic command names as headers:
278 cmd_name_re = re.compile(r'^(%s.*?):' % self.shell.ESC_MAGIC,
278 cmd_name_re = re.compile(r'^(%s.*?):' % self.shell.ESC_MAGIC,
279 re.MULTILINE)
279 re.MULTILINE)
280 # Magic commands
280 # Magic commands
281 cmd_re = re.compile(r'(?P<cmd>%s.+?\b)(?!\}\}:)' % self.shell.ESC_MAGIC,
281 cmd_re = re.compile(r'(?P<cmd>%s.+?\b)(?!\}\}:)' % self.shell.ESC_MAGIC,
282 re.MULTILINE)
282 re.MULTILINE)
283 # Paragraph continue
283 # Paragraph continue
284 par_re = re.compile(r'\\$',re.MULTILINE)
284 par_re = re.compile(r'\\$',re.MULTILINE)
285
285
286 # The "\n" symbol
286 # The "\n" symbol
287 newline_re = re.compile(r'\\n')
287 newline_re = re.compile(r'\\n')
288
288
289 # Now build the string for output:
289 # Now build the string for output:
290 #strng = cmd_name_re.sub(r'\n\\texttt{\\textsl{\\large \1}}:',strng)
290 #strng = cmd_name_re.sub(r'\n\\texttt{\\textsl{\\large \1}}:',strng)
291 strng = cmd_name_re.sub(r'\n\\bigskip\n\\texttt{\\textbf{ \1}}:',
291 strng = cmd_name_re.sub(r'\n\\bigskip\n\\texttt{\\textbf{ \1}}:',
292 strng)
292 strng)
293 strng = cmd_re.sub(r'\\texttt{\g<cmd>}',strng)
293 strng = cmd_re.sub(r'\\texttt{\g<cmd>}',strng)
294 strng = par_re.sub(r'\\\\',strng)
294 strng = par_re.sub(r'\\\\',strng)
295 strng = escape_re.sub(r'\\\1',strng)
295 strng = escape_re.sub(r'\\\1',strng)
296 strng = newline_re.sub(r'\\textbackslash{}n',strng)
296 strng = newline_re.sub(r'\\textbackslash{}n',strng)
297 return strng
297 return strng
298
298
299 def format_screen(self,strng):
299 def format_screen(self,strng):
300 """Format a string for screen printing.
300 """Format a string for screen printing.
301
301
302 This removes some latex-type format codes."""
302 This removes some latex-type format codes."""
303 # Paragraph continue
303 # Paragraph continue
304 par_re = re.compile(r'\\$',re.MULTILINE)
304 par_re = re.compile(r'\\$',re.MULTILINE)
305 strng = par_re.sub('',strng)
305 strng = par_re.sub('',strng)
306 return strng
306 return strng
307
307
308 def parse_options(self,arg_str,opt_str,*long_opts,**kw):
308 def parse_options(self,arg_str,opt_str,*long_opts,**kw):
309 """Parse options passed to an argument string.
309 """Parse options passed to an argument string.
310
310
311 The interface is similar to that of getopt(), but it returns back a
311 The interface is similar to that of getopt(), but it returns back a
312 Struct with the options as keys and the stripped argument string still
312 Struct with the options as keys and the stripped argument string still
313 as a string.
313 as a string.
314
314
315 arg_str is quoted as a true sys.argv vector by using shlex.split.
315 arg_str is quoted as a true sys.argv vector by using shlex.split.
316 This allows us to easily expand variables, glob files, quote
316 This allows us to easily expand variables, glob files, quote
317 arguments, etc.
317 arguments, etc.
318
318
319 Options:
319 Options:
320 -mode: default 'string'. If given as 'list', the argument string is
320 -mode: default 'string'. If given as 'list', the argument string is
321 returned as a list (split on whitespace) instead of a string.
321 returned as a list (split on whitespace) instead of a string.
322
322
323 -list_all: put all option values in lists. Normally only options
323 -list_all: put all option values in lists. Normally only options
324 appearing more than once are put in a list.
324 appearing more than once are put in a list.
325
325
326 -posix (True): whether to split the input line in POSIX mode or not,
326 -posix (True): whether to split the input line in POSIX mode or not,
327 as per the conventions outlined in the shlex module from the
327 as per the conventions outlined in the shlex module from the
328 standard library."""
328 standard library."""
329
329
330 # inject default options at the beginning of the input line
330 # inject default options at the beginning of the input line
331 caller = sys._getframe(1).f_code.co_name.replace('magic_','')
331 caller = sys._getframe(1).f_code.co_name.replace('magic_','')
332 arg_str = '%s %s' % (self.options_table.get(caller,''),arg_str)
332 arg_str = '%s %s' % (self.options_table.get(caller,''),arg_str)
333
333
334 mode = kw.get('mode','string')
334 mode = kw.get('mode','string')
335 if mode not in ['string','list']:
335 if mode not in ['string','list']:
336 raise ValueError,'incorrect mode given: %s' % mode
336 raise ValueError,'incorrect mode given: %s' % mode
337 # Get options
337 # Get options
338 list_all = kw.get('list_all',0)
338 list_all = kw.get('list_all',0)
339 posix = kw.get('posix',True)
339 posix = kw.get('posix',True)
340
340
341 # Check if we have more than one argument to warrant extra processing:
341 # Check if we have more than one argument to warrant extra processing:
342 odict = {} # Dictionary with options
342 odict = {} # Dictionary with options
343 args = arg_str.split()
343 args = arg_str.split()
344 if len(args) >= 1:
344 if len(args) >= 1:
345 # If the list of inputs only has 0 or 1 thing in it, there's no
345 # If the list of inputs only has 0 or 1 thing in it, there's no
346 # need to look for options
346 # need to look for options
347 argv = arg_split(arg_str,posix)
347 argv = arg_split(arg_str,posix)
348 # Do regular option processing
348 # Do regular option processing
349 try:
349 try:
350 opts,args = getopt(argv,opt_str,*long_opts)
350 opts,args = getopt(argv,opt_str,*long_opts)
351 except GetoptError,e:
351 except GetoptError,e:
352 raise UsageError('%s ( allowed: "%s" %s)' % (e.msg,opt_str,
352 raise UsageError('%s ( allowed: "%s" %s)' % (e.msg,opt_str,
353 " ".join(long_opts)))
353 " ".join(long_opts)))
354 for o,a in opts:
354 for o,a in opts:
355 if o.startswith('--'):
355 if o.startswith('--'):
356 o = o[2:]
356 o = o[2:]
357 else:
357 else:
358 o = o[1:]
358 o = o[1:]
359 try:
359 try:
360 odict[o].append(a)
360 odict[o].append(a)
361 except AttributeError:
361 except AttributeError:
362 odict[o] = [odict[o],a]
362 odict[o] = [odict[o],a]
363 except KeyError:
363 except KeyError:
364 if list_all:
364 if list_all:
365 odict[o] = [a]
365 odict[o] = [a]
366 else:
366 else:
367 odict[o] = a
367 odict[o] = a
368
368
369 # Prepare opts,args for return
369 # Prepare opts,args for return
370 opts = Struct(odict)
370 opts = Struct(odict)
371 if mode == 'string':
371 if mode == 'string':
372 args = ' '.join(args)
372 args = ' '.join(args)
373
373
374 return opts,args
374 return opts,args
375
375
376 #......................................................................
376 #......................................................................
377 # And now the actual magic functions
377 # And now the actual magic functions
378
378
379 # Functions for IPython shell work (vars,funcs, config, etc)
379 # Functions for IPython shell work (vars,funcs, config, etc)
380 def magic_lsmagic(self, parameter_s = ''):
380 def magic_lsmagic(self, parameter_s = ''):
381 """List currently available magic functions."""
381 """List currently available magic functions."""
382 mesc = self.shell.ESC_MAGIC
382 mesc = self.shell.ESC_MAGIC
383 print 'Available magic functions:\n'+mesc+\
383 print 'Available magic functions:\n'+mesc+\
384 (' '+mesc).join(self.lsmagic())
384 (' '+mesc).join(self.lsmagic())
385 print '\n' + Magic.auto_status[self.shell.rc.automagic]
385 print '\n' + Magic.auto_status[self.shell.rc.automagic]
386 return None
386 return None
387
387
388 def magic_magic(self, parameter_s = ''):
388 def magic_magic(self, parameter_s = ''):
389 """Print information about the magic function system.
389 """Print information about the magic function system.
390
390
391 Supported formats: -latex, -brief, -rest
391 Supported formats: -latex, -brief, -rest
392 """
392 """
393
393
394 mode = ''
394 mode = ''
395 try:
395 try:
396 if parameter_s.split()[0] == '-latex':
396 if parameter_s.split()[0] == '-latex':
397 mode = 'latex'
397 mode = 'latex'
398 if parameter_s.split()[0] == '-brief':
398 if parameter_s.split()[0] == '-brief':
399 mode = 'brief'
399 mode = 'brief'
400 if parameter_s.split()[0] == '-rest':
400 if parameter_s.split()[0] == '-rest':
401 mode = 'rest'
401 mode = 'rest'
402 rest_docs = []
402 rest_docs = []
403 except:
403 except:
404 pass
404 pass
405
405
406 magic_docs = []
406 magic_docs = []
407 for fname in self.lsmagic():
407 for fname in self.lsmagic():
408 mname = 'magic_' + fname
408 mname = 'magic_' + fname
409 for space in (Magic,self,self.__class__):
409 for space in (Magic,self,self.__class__):
410 try:
410 try:
411 fn = space.__dict__[mname]
411 fn = space.__dict__[mname]
412 except KeyError:
412 except KeyError:
413 pass
413 pass
414 else:
414 else:
415 break
415 break
416 if mode == 'brief':
416 if mode == 'brief':
417 # only first line
417 # only first line
418 if fn.__doc__:
418 if fn.__doc__:
419 fndoc = fn.__doc__.split('\n',1)[0]
419 fndoc = fn.__doc__.split('\n',1)[0]
420 else:
420 else:
421 fndoc = 'No documentation'
421 fndoc = 'No documentation'
422 else:
422 else:
423 fndoc = fn.__doc__.rstrip()
423 fndoc = fn.__doc__.rstrip()
424
424
425 if mode == 'rest':
425 if mode == 'rest':
426 rest_docs.append('**%s%s**::\n\n\t%s\n\n' %(self.shell.ESC_MAGIC,
426 rest_docs.append('**%s%s**::\n\n\t%s\n\n' %(self.shell.ESC_MAGIC,
427 fname,fndoc))
427 fname,fndoc))
428
428
429 else:
429 else:
430 magic_docs.append('%s%s:\n\t%s\n' %(self.shell.ESC_MAGIC,
430 magic_docs.append('%s%s:\n\t%s\n' %(self.shell.ESC_MAGIC,
431 fname,fndoc))
431 fname,fndoc))
432
432
433 magic_docs = ''.join(magic_docs)
433 magic_docs = ''.join(magic_docs)
434
434
435 if mode == 'rest':
435 if mode == 'rest':
436 return "".join(rest_docs)
436 return "".join(rest_docs)
437
437
438 if mode == 'latex':
438 if mode == 'latex':
439 print self.format_latex(magic_docs)
439 print self.format_latex(magic_docs)
440 return
440 return
441 else:
441 else:
442 magic_docs = self.format_screen(magic_docs)
442 magic_docs = self.format_screen(magic_docs)
443 if mode == 'brief':
443 if mode == 'brief':
444 return magic_docs
444 return magic_docs
445
445
446 outmsg = """
446 outmsg = """
447 IPython's 'magic' functions
447 IPython's 'magic' functions
448 ===========================
448 ===========================
449
449
450 The magic function system provides a series of functions which allow you to
450 The magic function system provides a series of functions which allow you to
451 control the behavior of IPython itself, plus a lot of system-type
451 control the behavior of IPython itself, plus a lot of system-type
452 features. All these functions are prefixed with a % character, but parameters
452 features. All these functions are prefixed with a % character, but parameters
453 are given without parentheses or quotes.
453 are given without parentheses or quotes.
454
454
455 NOTE: If you have 'automagic' enabled (via the command line option or with the
455 NOTE: If you have 'automagic' enabled (via the command line option or with the
456 %automagic function), you don't need to type in the % explicitly. By default,
456 %automagic function), you don't need to type in the % explicitly. By default,
457 IPython ships with automagic on, so you should only rarely need the % escape.
457 IPython ships with automagic on, so you should only rarely need the % escape.
458
458
459 Example: typing '%cd mydir' (without the quotes) changes you working directory
459 Example: typing '%cd mydir' (without the quotes) changes you working directory
460 to 'mydir', if it exists.
460 to 'mydir', if it exists.
461
461
462 You can define your own magic functions to extend the system. See the supplied
462 You can define your own magic functions to extend the system. See the supplied
463 ipythonrc and example-magic.py files for details (in your ipython
463 ipythonrc and example-magic.py files for details (in your ipython
464 configuration directory, typically $HOME/.ipython/).
464 configuration directory, typically $HOME/.ipython/).
465
465
466 You can also define your own aliased names for magic functions. In your
466 You can also define your own aliased names for magic functions. In your
467 ipythonrc file, placing a line like:
467 ipythonrc file, placing a line like:
468
468
469 execute __IPYTHON__.magic_pf = __IPYTHON__.magic_profile
469 execute __IPYTHON__.magic_pf = __IPYTHON__.magic_profile
470
470
471 will define %pf as a new name for %profile.
471 will define %pf as a new name for %profile.
472
472
473 You can also call magics in code using the ipmagic() function, which IPython
473 You can also call magics in code using the ipmagic() function, which IPython
474 automatically adds to the builtin namespace. Type 'ipmagic?' for details.
474 automatically adds to the builtin namespace. Type 'ipmagic?' for details.
475
475
476 For a list of the available magic functions, use %lsmagic. For a description
476 For a list of the available magic functions, use %lsmagic. For a description
477 of any of them, type %magic_name?, e.g. '%cd?'.
477 of any of them, type %magic_name?, e.g. '%cd?'.
478
478
479 Currently the magic system has the following functions:\n"""
479 Currently the magic system has the following functions:\n"""
480
480
481 mesc = self.shell.ESC_MAGIC
481 mesc = self.shell.ESC_MAGIC
482 outmsg = ("%s\n%s\n\nSummary of magic functions (from %slsmagic):"
482 outmsg = ("%s\n%s\n\nSummary of magic functions (from %slsmagic):"
483 "\n\n%s%s\n\n%s" % (outmsg,
483 "\n\n%s%s\n\n%s" % (outmsg,
484 magic_docs,mesc,mesc,
484 magic_docs,mesc,mesc,
485 (' '+mesc).join(self.lsmagic()),
485 (' '+mesc).join(self.lsmagic()),
486 Magic.auto_status[self.shell.rc.automagic] ) )
486 Magic.auto_status[self.shell.rc.automagic] ) )
487
487
488 page(outmsg,screen_lines=self.shell.rc.screen_length)
488 page(outmsg,screen_lines=self.shell.rc.screen_length)
489
489
490
490
491 def magic_autoindent(self, parameter_s = ''):
491 def magic_autoindent(self, parameter_s = ''):
492 """Toggle autoindent on/off (if available)."""
492 """Toggle autoindent on/off (if available)."""
493
493
494 self.shell.set_autoindent()
494 self.shell.set_autoindent()
495 print "Automatic indentation is:",['OFF','ON'][self.shell.autoindent]
495 print "Automatic indentation is:",['OFF','ON'][self.shell.autoindent]
496
496
497
497
498 def magic_automagic(self, parameter_s = ''):
498 def magic_automagic(self, parameter_s = ''):
499 """Make magic functions callable without having to type the initial %.
499 """Make magic functions callable without having to type the initial %.
500
500
501 Without argumentsl toggles on/off (when off, you must call it as
501 Without argumentsl toggles on/off (when off, you must call it as
502 %automagic, of course). With arguments it sets the value, and you can
502 %automagic, of course). With arguments it sets the value, and you can
503 use any of (case insensitive):
503 use any of (case insensitive):
504
504
505 - on,1,True: to activate
505 - on,1,True: to activate
506
506
507 - off,0,False: to deactivate.
507 - off,0,False: to deactivate.
508
508
509 Note that magic functions have lowest priority, so if there's a
509 Note that magic functions have lowest priority, so if there's a
510 variable whose name collides with that of a magic fn, automagic won't
510 variable whose name collides with that of a magic fn, automagic won't
511 work for that function (you get the variable instead). However, if you
511 work for that function (you get the variable instead). However, if you
512 delete the variable (del var), the previously shadowed magic function
512 delete the variable (del var), the previously shadowed magic function
513 becomes visible to automagic again."""
513 becomes visible to automagic again."""
514
514
515 rc = self.shell.rc
515 rc = self.shell.rc
516 arg = parameter_s.lower()
516 arg = parameter_s.lower()
517 if parameter_s in ('on','1','true'):
517 if parameter_s in ('on','1','true'):
518 rc.automagic = True
518 rc.automagic = True
519 elif parameter_s in ('off','0','false'):
519 elif parameter_s in ('off','0','false'):
520 rc.automagic = False
520 rc.automagic = False
521 else:
521 else:
522 rc.automagic = not rc.automagic
522 rc.automagic = not rc.automagic
523 print '\n' + Magic.auto_status[rc.automagic]
523 print '\n' + Magic.auto_status[rc.automagic]
524
524
525
525
526 def magic_autocall(self, parameter_s = ''):
526 def magic_autocall(self, parameter_s = ''):
527 """Make functions callable without having to type parentheses.
527 """Make functions callable without having to type parentheses.
528
528
529 Usage:
529 Usage:
530
530
531 %autocall [mode]
531 %autocall [mode]
532
532
533 The mode can be one of: 0->Off, 1->Smart, 2->Full. If not given, the
533 The mode can be one of: 0->Off, 1->Smart, 2->Full. If not given, the
534 value is toggled on and off (remembering the previous state).
534 value is toggled on and off (remembering the previous state).
535
535
536 In more detail, these values mean:
536 In more detail, these values mean:
537
537
538 0 -> fully disabled
538 0 -> fully disabled
539
539
540 1 -> active, but do not apply if there are no arguments on the line.
540 1 -> active, but do not apply if there are no arguments on the line.
541
541
542 In this mode, you get:
542 In this mode, you get:
543
543
544 In [1]: callable
544 In [1]: callable
545 Out[1]: <built-in function callable>
545 Out[1]: <built-in function callable>
546
546
547 In [2]: callable 'hello'
547 In [2]: callable 'hello'
548 ------> callable('hello')
548 ------> callable('hello')
549 Out[2]: False
549 Out[2]: False
550
550
551 2 -> Active always. Even if no arguments are present, the callable
551 2 -> Active always. Even if no arguments are present, the callable
552 object is called:
552 object is called:
553
553
554 In [4]: callable
554 In [4]: callable
555 ------> callable()
555 ------> callable()
556
556
557 Note that even with autocall off, you can still use '/' at the start of
557 Note that even with autocall off, you can still use '/' at the start of
558 a line to treat the first argument on the command line as a function
558 a line to treat the first argument on the command line as a function
559 and add parentheses to it:
559 and add parentheses to it:
560
560
561 In [8]: /str 43
561 In [8]: /str 43
562 ------> str(43)
562 ------> str(43)
563 Out[8]: '43'
563 Out[8]: '43'
564 """
564 """
565
565
566 rc = self.shell.rc
566 rc = self.shell.rc
567
567
568 if parameter_s:
568 if parameter_s:
569 arg = int(parameter_s)
569 arg = int(parameter_s)
570 else:
570 else:
571 arg = 'toggle'
571 arg = 'toggle'
572
572
573 if not arg in (0,1,2,'toggle'):
573 if not arg in (0,1,2,'toggle'):
574 error('Valid modes: (0->Off, 1->Smart, 2->Full')
574 error('Valid modes: (0->Off, 1->Smart, 2->Full')
575 return
575 return
576
576
577 if arg in (0,1,2):
577 if arg in (0,1,2):
578 rc.autocall = arg
578 rc.autocall = arg
579 else: # toggle
579 else: # toggle
580 if rc.autocall:
580 if rc.autocall:
581 self._magic_state.autocall_save = rc.autocall
581 self._magic_state.autocall_save = rc.autocall
582 rc.autocall = 0
582 rc.autocall = 0
583 else:
583 else:
584 try:
584 try:
585 rc.autocall = self._magic_state.autocall_save
585 rc.autocall = self._magic_state.autocall_save
586 except AttributeError:
586 except AttributeError:
587 rc.autocall = self._magic_state.autocall_save = 1
587 rc.autocall = self._magic_state.autocall_save = 1
588
588
589 print "Automatic calling is:",['OFF','Smart','Full'][rc.autocall]
589 print "Automatic calling is:",['OFF','Smart','Full'][rc.autocall]
590
590
591 def magic_system_verbose(self, parameter_s = ''):
591 def magic_system_verbose(self, parameter_s = ''):
592 """Set verbose printing of system calls.
592 """Set verbose printing of system calls.
593
593
594 If called without an argument, act as a toggle"""
594 If called without an argument, act as a toggle"""
595
595
596 if parameter_s:
596 if parameter_s:
597 val = bool(eval(parameter_s))
597 val = bool(eval(parameter_s))
598 else:
598 else:
599 val = None
599 val = None
600
600
601 self.shell.rc_set_toggle('system_verbose',val)
601 self.shell.rc_set_toggle('system_verbose',val)
602 print "System verbose printing is:",\
602 print "System verbose printing is:",\
603 ['OFF','ON'][self.shell.rc.system_verbose]
603 ['OFF','ON'][self.shell.rc.system_verbose]
604
604
605
605
606 def magic_page(self, parameter_s=''):
606 def magic_page(self, parameter_s=''):
607 """Pretty print the object and display it through a pager.
607 """Pretty print the object and display it through a pager.
608
608
609 %page [options] OBJECT
609 %page [options] OBJECT
610
610
611 If no object is given, use _ (last output).
611 If no object is given, use _ (last output).
612
612
613 Options:
613 Options:
614
614
615 -r: page str(object), don't pretty-print it."""
615 -r: page str(object), don't pretty-print it."""
616
616
617 # After a function contributed by Olivier Aubert, slightly modified.
617 # After a function contributed by Olivier Aubert, slightly modified.
618
618
619 # Process options/args
619 # Process options/args
620 opts,args = self.parse_options(parameter_s,'r')
620 opts,args = self.parse_options(parameter_s,'r')
621 raw = 'r' in opts
621 raw = 'r' in opts
622
622
623 oname = args and args or '_'
623 oname = args and args or '_'
624 info = self._ofind(oname)
624 info = self._ofind(oname)
625 if info['found']:
625 if info['found']:
626 txt = (raw and str or pformat)( info['obj'] )
626 txt = (raw and str or pformat)( info['obj'] )
627 page(txt)
627 page(txt)
628 else:
628 else:
629 print 'Object `%s` not found' % oname
629 print 'Object `%s` not found' % oname
630
630
631 def magic_profile(self, parameter_s=''):
631 def magic_profile(self, parameter_s=''):
632 """Print your currently active IPyhton profile."""
632 """Print your currently active IPyhton profile."""
633 if self.shell.rc.profile:
633 if self.shell.rc.profile:
634 printpl('Current IPython profile: $self.shell.rc.profile.')
634 printpl('Current IPython profile: $self.shell.rc.profile.')
635 else:
635 else:
636 print 'No profile active.'
636 print 'No profile active.'
637
637
638 def magic_pinfo(self, parameter_s='', namespaces=None):
638 def magic_pinfo(self, parameter_s='', namespaces=None):
639 """Provide detailed information about an object.
639 """Provide detailed information about an object.
640
640
641 '%pinfo object' is just a synonym for object? or ?object."""
641 '%pinfo object' is just a synonym for object? or ?object."""
642
642
643 #print 'pinfo par: <%s>' % parameter_s # dbg
643 #print 'pinfo par: <%s>' % parameter_s # dbg
644
644
645
645
646 # detail_level: 0 -> obj? , 1 -> obj??
646 # detail_level: 0 -> obj? , 1 -> obj??
647 detail_level = 0
647 detail_level = 0
648 # We need to detect if we got called as 'pinfo pinfo foo', which can
648 # We need to detect if we got called as 'pinfo pinfo foo', which can
649 # happen if the user types 'pinfo foo?' at the cmd line.
649 # happen if the user types 'pinfo foo?' at the cmd line.
650 pinfo,qmark1,oname,qmark2 = \
650 pinfo,qmark1,oname,qmark2 = \
651 re.match('(pinfo )?(\?*)(.*?)(\??$)',parameter_s).groups()
651 re.match('(pinfo )?(\?*)(.*?)(\??$)',parameter_s).groups()
652 if pinfo or qmark1 or qmark2:
652 if pinfo or qmark1 or qmark2:
653 detail_level = 1
653 detail_level = 1
654 if "*" in oname:
654 if "*" in oname:
655 self.magic_psearch(oname)
655 self.magic_psearch(oname)
656 else:
656 else:
657 self._inspect('pinfo', oname, detail_level=detail_level,
657 self._inspect('pinfo', oname, detail_level=detail_level,
658 namespaces=namespaces)
658 namespaces=namespaces)
659
659
660 def magic_pdef(self, parameter_s='', namespaces=None):
660 def magic_pdef(self, parameter_s='', namespaces=None):
661 """Print the definition header for any callable object.
661 """Print the definition header for any callable object.
662
662
663 If the object is a class, print the constructor information."""
663 If the object is a class, print the constructor information."""
664 self._inspect('pdef',parameter_s, namespaces)
664 self._inspect('pdef',parameter_s, namespaces)
665
665
666 def magic_pdoc(self, parameter_s='', namespaces=None):
666 def magic_pdoc(self, parameter_s='', namespaces=None):
667 """Print the docstring for an object.
667 """Print the docstring for an object.
668
668
669 If the given object is a class, it will print both the class and the
669 If the given object is a class, it will print both the class and the
670 constructor docstrings."""
670 constructor docstrings."""
671 self._inspect('pdoc',parameter_s, namespaces)
671 self._inspect('pdoc',parameter_s, namespaces)
672
672
673 def magic_psource(self, parameter_s='', namespaces=None):
673 def magic_psource(self, parameter_s='', namespaces=None):
674 """Print (or run through pager) the source code for an object."""
674 """Print (or run through pager) the source code for an object."""
675 self._inspect('psource',parameter_s, namespaces)
675 self._inspect('psource',parameter_s, namespaces)
676
676
677 def magic_pfile(self, parameter_s=''):
677 def magic_pfile(self, parameter_s=''):
678 """Print (or run through pager) the file where an object is defined.
678 """Print (or run through pager) the file where an object is defined.
679
679
680 The file opens at the line where the object definition begins. IPython
680 The file opens at the line where the object definition begins. IPython
681 will honor the environment variable PAGER if set, and otherwise will
681 will honor the environment variable PAGER if set, and otherwise will
682 do its best to print the file in a convenient form.
682 do its best to print the file in a convenient form.
683
683
684 If the given argument is not an object currently defined, IPython will
684 If the given argument is not an object currently defined, IPython will
685 try to interpret it as a filename (automatically adding a .py extension
685 try to interpret it as a filename (automatically adding a .py extension
686 if needed). You can thus use %pfile as a syntax highlighting code
686 if needed). You can thus use %pfile as a syntax highlighting code
687 viewer."""
687 viewer."""
688
688
689 # first interpret argument as an object name
689 # first interpret argument as an object name
690 out = self._inspect('pfile',parameter_s)
690 out = self._inspect('pfile',parameter_s)
691 # if not, try the input as a filename
691 # if not, try the input as a filename
692 if out == 'not found':
692 if out == 'not found':
693 try:
693 try:
694 filename = get_py_filename(parameter_s)
694 filename = get_py_filename(parameter_s)
695 except IOError,msg:
695 except IOError,msg:
696 print msg
696 print msg
697 return
697 return
698 page(self.shell.inspector.format(file(filename).read()))
698 page(self.shell.inspector.format(file(filename).read()))
699
699
700 def _inspect(self,meth,oname,namespaces=None,**kw):
700 def _inspect(self,meth,oname,namespaces=None,**kw):
701 """Generic interface to the inspector system.
701 """Generic interface to the inspector system.
702
702
703 This function is meant to be called by pdef, pdoc & friends."""
703 This function is meant to be called by pdef, pdoc & friends."""
704
704
705 #oname = oname.strip()
705 #oname = oname.strip()
706 #print '1- oname: <%r>' % oname # dbg
706 #print '1- oname: <%r>' % oname # dbg
707 try:
707 try:
708 oname = oname.strip().encode('ascii')
708 oname = oname.strip().encode('ascii')
709 #print '2- oname: <%r>' % oname # dbg
709 #print '2- oname: <%r>' % oname # dbg
710 except UnicodeEncodeError:
710 except UnicodeEncodeError:
711 print 'Python identifiers can only contain ascii characters.'
711 print 'Python identifiers can only contain ascii characters.'
712 return 'not found'
712 return 'not found'
713
713
714 info = Struct(self._ofind(oname, namespaces))
714 info = Struct(self._ofind(oname, namespaces))
715
715
716 if info.found:
716 if info.found:
717 try:
717 try:
718 IPython.generics.inspect_object(info.obj)
718 IPython.generics.inspect_object(info.obj)
719 return
719 return
720 except IPython.ipapi.TryNext:
720 except IPython.ipapi.TryNext:
721 pass
721 pass
722 # Get the docstring of the class property if it exists.
722 # Get the docstring of the class property if it exists.
723 path = oname.split('.')
723 path = oname.split('.')
724 root = '.'.join(path[:-1])
724 root = '.'.join(path[:-1])
725 if info.parent is not None:
725 if info.parent is not None:
726 try:
726 try:
727 target = getattr(info.parent, '__class__')
727 target = getattr(info.parent, '__class__')
728 # The object belongs to a class instance.
728 # The object belongs to a class instance.
729 try:
729 try:
730 target = getattr(target, path[-1])
730 target = getattr(target, path[-1])
731 # The class defines the object.
731 # The class defines the object.
732 if isinstance(target, property):
732 if isinstance(target, property):
733 oname = root + '.__class__.' + path[-1]
733 oname = root + '.__class__.' + path[-1]
734 info = Struct(self._ofind(oname))
734 info = Struct(self._ofind(oname))
735 except AttributeError: pass
735 except AttributeError: pass
736 except AttributeError: pass
736 except AttributeError: pass
737
737
738 pmethod = getattr(self.shell.inspector,meth)
738 pmethod = getattr(self.shell.inspector,meth)
739 formatter = info.ismagic and self.format_screen or None
739 formatter = info.ismagic and self.format_screen or None
740 if meth == 'pdoc':
740 if meth == 'pdoc':
741 pmethod(info.obj,oname,formatter)
741 pmethod(info.obj,oname,formatter)
742 elif meth == 'pinfo':
742 elif meth == 'pinfo':
743 pmethod(info.obj,oname,formatter,info,**kw)
743 pmethod(info.obj,oname,formatter,info,**kw)
744 else:
744 else:
745 pmethod(info.obj,oname)
745 pmethod(info.obj,oname)
746 else:
746 else:
747 print 'Object `%s` not found.' % oname
747 print 'Object `%s` not found.' % oname
748 return 'not found' # so callers can take other action
748 return 'not found' # so callers can take other action
749
749
750 def magic_psearch(self, parameter_s=''):
750 def magic_psearch(self, parameter_s=''):
751 """Search for object in namespaces by wildcard.
751 """Search for object in namespaces by wildcard.
752
752
753 %psearch [options] PATTERN [OBJECT TYPE]
753 %psearch [options] PATTERN [OBJECT TYPE]
754
754
755 Note: ? can be used as a synonym for %psearch, at the beginning or at
755 Note: ? can be used as a synonym for %psearch, at the beginning or at
756 the end: both a*? and ?a* are equivalent to '%psearch a*'. Still, the
756 the end: both a*? and ?a* are equivalent to '%psearch a*'. Still, the
757 rest of the command line must be unchanged (options come first), so
757 rest of the command line must be unchanged (options come first), so
758 for example the following forms are equivalent
758 for example the following forms are equivalent
759
759
760 %psearch -i a* function
760 %psearch -i a* function
761 -i a* function?
761 -i a* function?
762 ?-i a* function
762 ?-i a* function
763
763
764 Arguments:
764 Arguments:
765
765
766 PATTERN
766 PATTERN
767
767
768 where PATTERN is a string containing * as a wildcard similar to its
768 where PATTERN is a string containing * as a wildcard similar to its
769 use in a shell. The pattern is matched in all namespaces on the
769 use in a shell. The pattern is matched in all namespaces on the
770 search path. By default objects starting with a single _ are not
770 search path. By default objects starting with a single _ are not
771 matched, many IPython generated objects have a single
771 matched, many IPython generated objects have a single
772 underscore. The default is case insensitive matching. Matching is
772 underscore. The default is case insensitive matching. Matching is
773 also done on the attributes of objects and not only on the objects
773 also done on the attributes of objects and not only on the objects
774 in a module.
774 in a module.
775
775
776 [OBJECT TYPE]
776 [OBJECT TYPE]
777
777
778 Is the name of a python type from the types module. The name is
778 Is the name of a python type from the types module. The name is
779 given in lowercase without the ending type, ex. StringType is
779 given in lowercase without the ending type, ex. StringType is
780 written string. By adding a type here only objects matching the
780 written string. By adding a type here only objects matching the
781 given type are matched. Using all here makes the pattern match all
781 given type are matched. Using all here makes the pattern match all
782 types (this is the default).
782 types (this is the default).
783
783
784 Options:
784 Options:
785
785
786 -a: makes the pattern match even objects whose names start with a
786 -a: makes the pattern match even objects whose names start with a
787 single underscore. These names are normally ommitted from the
787 single underscore. These names are normally ommitted from the
788 search.
788 search.
789
789
790 -i/-c: make the pattern case insensitive/sensitive. If neither of
790 -i/-c: make the pattern case insensitive/sensitive. If neither of
791 these options is given, the default is read from your ipythonrc
791 these options is given, the default is read from your ipythonrc
792 file. The option name which sets this value is
792 file. The option name which sets this value is
793 'wildcards_case_sensitive'. If this option is not specified in your
793 'wildcards_case_sensitive'. If this option is not specified in your
794 ipythonrc file, IPython's internal default is to do a case sensitive
794 ipythonrc file, IPython's internal default is to do a case sensitive
795 search.
795 search.
796
796
797 -e/-s NAMESPACE: exclude/search a given namespace. The pattern you
797 -e/-s NAMESPACE: exclude/search a given namespace. The pattern you
798 specifiy can be searched in any of the following namespaces:
798 specifiy can be searched in any of the following namespaces:
799 'builtin', 'user', 'user_global','internal', 'alias', where
799 'builtin', 'user', 'user_global','internal', 'alias', where
800 'builtin' and 'user' are the search defaults. Note that you should
800 'builtin' and 'user' are the search defaults. Note that you should
801 not use quotes when specifying namespaces.
801 not use quotes when specifying namespaces.
802
802
803 'Builtin' contains the python module builtin, 'user' contains all
803 'Builtin' contains the python module builtin, 'user' contains all
804 user data, 'alias' only contain the shell aliases and no python
804 user data, 'alias' only contain the shell aliases and no python
805 objects, 'internal' contains objects used by IPython. The
805 objects, 'internal' contains objects used by IPython. The
806 'user_global' namespace is only used by embedded IPython instances,
806 'user_global' namespace is only used by embedded IPython instances,
807 and it contains module-level globals. You can add namespaces to the
807 and it contains module-level globals. You can add namespaces to the
808 search with -s or exclude them with -e (these options can be given
808 search with -s or exclude them with -e (these options can be given
809 more than once).
809 more than once).
810
810
811 Examples:
811 Examples:
812
812
813 %psearch a* -> objects beginning with an a
813 %psearch a* -> objects beginning with an a
814 %psearch -e builtin a* -> objects NOT in the builtin space starting in a
814 %psearch -e builtin a* -> objects NOT in the builtin space starting in a
815 %psearch a* function -> all functions beginning with an a
815 %psearch a* function -> all functions beginning with an a
816 %psearch re.e* -> objects beginning with an e in module re
816 %psearch re.e* -> objects beginning with an e in module re
817 %psearch r*.e* -> objects that start with e in modules starting in r
817 %psearch r*.e* -> objects that start with e in modules starting in r
818 %psearch r*.* string -> all strings in modules beginning with r
818 %psearch r*.* string -> all strings in modules beginning with r
819
819
820 Case sensitve search:
820 Case sensitve search:
821
821
822 %psearch -c a* list all object beginning with lower case a
822 %psearch -c a* list all object beginning with lower case a
823
823
824 Show objects beginning with a single _:
824 Show objects beginning with a single _:
825
825
826 %psearch -a _* list objects beginning with a single underscore"""
826 %psearch -a _* list objects beginning with a single underscore"""
827 try:
827 try:
828 parameter_s = parameter_s.encode('ascii')
828 parameter_s = parameter_s.encode('ascii')
829 except UnicodeEncodeError:
829 except UnicodeEncodeError:
830 print 'Python identifiers can only contain ascii characters.'
830 print 'Python identifiers can only contain ascii characters.'
831 return
831 return
832
832
833 # default namespaces to be searched
833 # default namespaces to be searched
834 def_search = ['user','builtin']
834 def_search = ['user','builtin']
835
835
836 # Process options/args
836 # Process options/args
837 opts,args = self.parse_options(parameter_s,'cias:e:',list_all=True)
837 opts,args = self.parse_options(parameter_s,'cias:e:',list_all=True)
838 opt = opts.get
838 opt = opts.get
839 shell = self.shell
839 shell = self.shell
840 psearch = shell.inspector.psearch
840 psearch = shell.inspector.psearch
841
841
842 # select case options
842 # select case options
843 if opts.has_key('i'):
843 if opts.has_key('i'):
844 ignore_case = True
844 ignore_case = True
845 elif opts.has_key('c'):
845 elif opts.has_key('c'):
846 ignore_case = False
846 ignore_case = False
847 else:
847 else:
848 ignore_case = not shell.rc.wildcards_case_sensitive
848 ignore_case = not shell.rc.wildcards_case_sensitive
849
849
850 # Build list of namespaces to search from user options
850 # Build list of namespaces to search from user options
851 def_search.extend(opt('s',[]))
851 def_search.extend(opt('s',[]))
852 ns_exclude = ns_exclude=opt('e',[])
852 ns_exclude = ns_exclude=opt('e',[])
853 ns_search = [nm for nm in def_search if nm not in ns_exclude]
853 ns_search = [nm for nm in def_search if nm not in ns_exclude]
854
854
855 # Call the actual search
855 # Call the actual search
856 try:
856 try:
857 psearch(args,shell.ns_table,ns_search,
857 psearch(args,shell.ns_table,ns_search,
858 show_all=opt('a'),ignore_case=ignore_case)
858 show_all=opt('a'),ignore_case=ignore_case)
859 except:
859 except:
860 shell.showtraceback()
860 shell.showtraceback()
861
861
862 def magic_who_ls(self, parameter_s=''):
862 def magic_who_ls(self, parameter_s=''):
863 """Return a sorted list of all interactive variables.
863 """Return a sorted list of all interactive variables.
864
864
865 If arguments are given, only variables of types matching these
865 If arguments are given, only variables of types matching these
866 arguments are returned."""
866 arguments are returned."""
867
867
868 user_ns = self.shell.user_ns
868 user_ns = self.shell.user_ns
869 internal_ns = self.shell.internal_ns
869 internal_ns = self.shell.internal_ns
870 user_config_ns = self.shell.user_config_ns
870 user_config_ns = self.shell.user_config_ns
871 out = []
871 out = []
872 typelist = parameter_s.split()
872 typelist = parameter_s.split()
873
873
874 for i in user_ns:
874 for i in user_ns:
875 if not (i.startswith('_') or i.startswith('_i')) \
875 if not (i.startswith('_') or i.startswith('_i')) \
876 and not (i in internal_ns or i in user_config_ns):
876 and not (i in internal_ns or i in user_config_ns):
877 if typelist:
877 if typelist:
878 if type(user_ns[i]).__name__ in typelist:
878 if type(user_ns[i]).__name__ in typelist:
879 out.append(i)
879 out.append(i)
880 else:
880 else:
881 out.append(i)
881 out.append(i)
882 out.sort()
882 out.sort()
883 return out
883 return out
884
884
885 def magic_who(self, parameter_s=''):
885 def magic_who(self, parameter_s=''):
886 """Print all interactive variables, with some minimal formatting.
886 """Print all interactive variables, with some minimal formatting.
887
887
888 If any arguments are given, only variables whose type matches one of
888 If any arguments are given, only variables whose type matches one of
889 these are printed. For example:
889 these are printed. For example:
890
890
891 %who function str
891 %who function str
892
892
893 will only list functions and strings, excluding all other types of
893 will only list functions and strings, excluding all other types of
894 variables. To find the proper type names, simply use type(var) at a
894 variables. To find the proper type names, simply use type(var) at a
895 command line to see how python prints type names. For example:
895 command line to see how python prints type names. For example:
896
896
897 In [1]: type('hello')\\
897 In [1]: type('hello')\\
898 Out[1]: <type 'str'>
898 Out[1]: <type 'str'>
899
899
900 indicates that the type name for strings is 'str'.
900 indicates that the type name for strings is 'str'.
901
901
902 %who always excludes executed names loaded through your configuration
902 %who always excludes executed names loaded through your configuration
903 file and things which are internal to IPython.
903 file and things which are internal to IPython.
904
904
905 This is deliberate, as typically you may load many modules and the
905 This is deliberate, as typically you may load many modules and the
906 purpose of %who is to show you only what you've manually defined."""
906 purpose of %who is to show you only what you've manually defined."""
907
907
908 varlist = self.magic_who_ls(parameter_s)
908 varlist = self.magic_who_ls(parameter_s)
909 if not varlist:
909 if not varlist:
910 if parameter_s:
910 if parameter_s:
911 print 'No variables match your requested type.'
911 print 'No variables match your requested type.'
912 else:
912 else:
913 print 'Interactive namespace is empty.'
913 print 'Interactive namespace is empty.'
914 return
914 return
915
915
916 # if we have variables, move on...
916 # if we have variables, move on...
917 count = 0
917 count = 0
918 for i in varlist:
918 for i in varlist:
919 print i+'\t',
919 print i+'\t',
920 count += 1
920 count += 1
921 if count > 8:
921 if count > 8:
922 count = 0
922 count = 0
923 print
923 print
924 print
924 print
925
925
926 def magic_whos(self, parameter_s=''):
926 def magic_whos(self, parameter_s=''):
927 """Like %who, but gives some extra information about each variable.
927 """Like %who, but gives some extra information about each variable.
928
928
929 The same type filtering of %who can be applied here.
929 The same type filtering of %who can be applied here.
930
930
931 For all variables, the type is printed. Additionally it prints:
931 For all variables, the type is printed. Additionally it prints:
932
932
933 - For {},[],(): their length.
933 - For {},[],(): their length.
934
934
935 - For numpy and Numeric arrays, a summary with shape, number of
935 - For numpy and Numeric arrays, a summary with shape, number of
936 elements, typecode and size in memory.
936 elements, typecode and size in memory.
937
937
938 - Everything else: a string representation, snipping their middle if
938 - Everything else: a string representation, snipping their middle if
939 too long."""
939 too long."""
940
940
941 varnames = self.magic_who_ls(parameter_s)
941 varnames = self.magic_who_ls(parameter_s)
942 if not varnames:
942 if not varnames:
943 if parameter_s:
943 if parameter_s:
944 print 'No variables match your requested type.'
944 print 'No variables match your requested type.'
945 else:
945 else:
946 print 'Interactive namespace is empty.'
946 print 'Interactive namespace is empty.'
947 return
947 return
948
948
949 # if we have variables, move on...
949 # if we have variables, move on...
950
950
951 # for these types, show len() instead of data:
951 # for these types, show len() instead of data:
952 seq_types = [types.DictType,types.ListType,types.TupleType]
952 seq_types = [types.DictType,types.ListType,types.TupleType]
953
953
954 # for numpy/Numeric arrays, display summary info
954 # for numpy/Numeric arrays, display summary info
955 try:
955 try:
956 import numpy
956 import numpy
957 except ImportError:
957 except ImportError:
958 ndarray_type = None
958 ndarray_type = None
959 else:
959 else:
960 ndarray_type = numpy.ndarray.__name__
960 ndarray_type = numpy.ndarray.__name__
961 try:
961 try:
962 import Numeric
962 import Numeric
963 except ImportError:
963 except ImportError:
964 array_type = None
964 array_type = None
965 else:
965 else:
966 array_type = Numeric.ArrayType.__name__
966 array_type = Numeric.ArrayType.__name__
967
967
968 # Find all variable names and types so we can figure out column sizes
968 # Find all variable names and types so we can figure out column sizes
969 def get_vars(i):
969 def get_vars(i):
970 return self.shell.user_ns[i]
970 return self.shell.user_ns[i]
971
971
972 # some types are well known and can be shorter
972 # some types are well known and can be shorter
973 abbrevs = {'IPython.macro.Macro' : 'Macro'}
973 abbrevs = {'IPython.macro.Macro' : 'Macro'}
974 def type_name(v):
974 def type_name(v):
975 tn = type(v).__name__
975 tn = type(v).__name__
976 return abbrevs.get(tn,tn)
976 return abbrevs.get(tn,tn)
977
977
978 varlist = map(get_vars,varnames)
978 varlist = map(get_vars,varnames)
979
979
980 typelist = []
980 typelist = []
981 for vv in varlist:
981 for vv in varlist:
982 tt = type_name(vv)
982 tt = type_name(vv)
983
983
984 if tt=='instance':
984 if tt=='instance':
985 typelist.append( abbrevs.get(str(vv.__class__),
985 typelist.append( abbrevs.get(str(vv.__class__),
986 str(vv.__class__)))
986 str(vv.__class__)))
987 else:
987 else:
988 typelist.append(tt)
988 typelist.append(tt)
989
989
990 # column labels and # of spaces as separator
990 # column labels and # of spaces as separator
991 varlabel = 'Variable'
991 varlabel = 'Variable'
992 typelabel = 'Type'
992 typelabel = 'Type'
993 datalabel = 'Data/Info'
993 datalabel = 'Data/Info'
994 colsep = 3
994 colsep = 3
995 # variable format strings
995 # variable format strings
996 vformat = "$vname.ljust(varwidth)$vtype.ljust(typewidth)"
996 vformat = "$vname.ljust(varwidth)$vtype.ljust(typewidth)"
997 vfmt_short = '$vstr[:25]<...>$vstr[-25:]'
997 vfmt_short = '$vstr[:25]<...>$vstr[-25:]'
998 aformat = "%s: %s elems, type `%s`, %s bytes"
998 aformat = "%s: %s elems, type `%s`, %s bytes"
999 # find the size of the columns to format the output nicely
999 # find the size of the columns to format the output nicely
1000 varwidth = max(max(map(len,varnames)), len(varlabel)) + colsep
1000 varwidth = max(max(map(len,varnames)), len(varlabel)) + colsep
1001 typewidth = max(max(map(len,typelist)), len(typelabel)) + colsep
1001 typewidth = max(max(map(len,typelist)), len(typelabel)) + colsep
1002 # table header
1002 # table header
1003 print varlabel.ljust(varwidth) + typelabel.ljust(typewidth) + \
1003 print varlabel.ljust(varwidth) + typelabel.ljust(typewidth) + \
1004 ' '+datalabel+'\n' + '-'*(varwidth+typewidth+len(datalabel)+1)
1004 ' '+datalabel+'\n' + '-'*(varwidth+typewidth+len(datalabel)+1)
1005 # and the table itself
1005 # and the table itself
1006 kb = 1024
1006 kb = 1024
1007 Mb = 1048576 # kb**2
1007 Mb = 1048576 # kb**2
1008 for vname,var,vtype in zip(varnames,varlist,typelist):
1008 for vname,var,vtype in zip(varnames,varlist,typelist):
1009 print itpl(vformat),
1009 print itpl(vformat),
1010 if vtype in seq_types:
1010 if vtype in seq_types:
1011 print len(var)
1011 print len(var)
1012 elif vtype in [array_type,ndarray_type]:
1012 elif vtype in [array_type,ndarray_type]:
1013 vshape = str(var.shape).replace(',','').replace(' ','x')[1:-1]
1013 vshape = str(var.shape).replace(',','').replace(' ','x')[1:-1]
1014 if vtype==ndarray_type:
1014 if vtype==ndarray_type:
1015 # numpy
1015 # numpy
1016 vsize = var.size
1016 vsize = var.size
1017 vbytes = vsize*var.itemsize
1017 vbytes = vsize*var.itemsize
1018 vdtype = var.dtype
1018 vdtype = var.dtype
1019 else:
1019 else:
1020 # Numeric
1020 # Numeric
1021 vsize = Numeric.size(var)
1021 vsize = Numeric.size(var)
1022 vbytes = vsize*var.itemsize()
1022 vbytes = vsize*var.itemsize()
1023 vdtype = var.typecode()
1023 vdtype = var.typecode()
1024
1024
1025 if vbytes < 100000:
1025 if vbytes < 100000:
1026 print aformat % (vshape,vsize,vdtype,vbytes)
1026 print aformat % (vshape,vsize,vdtype,vbytes)
1027 else:
1027 else:
1028 print aformat % (vshape,vsize,vdtype,vbytes),
1028 print aformat % (vshape,vsize,vdtype,vbytes),
1029 if vbytes < Mb:
1029 if vbytes < Mb:
1030 print '(%s kb)' % (vbytes/kb,)
1030 print '(%s kb)' % (vbytes/kb,)
1031 else:
1031 else:
1032 print '(%s Mb)' % (vbytes/Mb,)
1032 print '(%s Mb)' % (vbytes/Mb,)
1033 else:
1033 else:
1034 try:
1034 try:
1035 vstr = str(var)
1035 vstr = str(var)
1036 except UnicodeEncodeError:
1036 except UnicodeEncodeError:
1037 vstr = unicode(var).encode(sys.getdefaultencoding(),
1037 vstr = unicode(var).encode(sys.getdefaultencoding(),
1038 'backslashreplace')
1038 'backslashreplace')
1039 vstr = vstr.replace('\n','\\n')
1039 vstr = vstr.replace('\n','\\n')
1040 if len(vstr) < 50:
1040 if len(vstr) < 50:
1041 print vstr
1041 print vstr
1042 else:
1042 else:
1043 printpl(vfmt_short)
1043 printpl(vfmt_short)
1044
1044
1045 def magic_reset(self, parameter_s=''):
1045 def magic_reset(self, parameter_s=''):
1046 """Resets the namespace by removing all names defined by the user.
1046 """Resets the namespace by removing all names defined by the user.
1047
1047
1048 Input/Output history are left around in case you need them."""
1048 Input/Output history are left around in case you need them."""
1049
1049
1050 ans = self.shell.ask_yes_no(
1050 ans = self.shell.ask_yes_no(
1051 "Once deleted, variables cannot be recovered. Proceed (y/[n])? ")
1051 "Once deleted, variables cannot be recovered. Proceed (y/[n])? ")
1052 if not ans:
1052 if not ans:
1053 print 'Nothing done.'
1053 print 'Nothing done.'
1054 return
1054 return
1055 user_ns = self.shell.user_ns
1055 user_ns = self.shell.user_ns
1056 for i in self.magic_who_ls():
1056 for i in self.magic_who_ls():
1057 del(user_ns[i])
1057 del(user_ns[i])
1058
1058
1059 # Also flush the private list of module references kept for script
1059 # Also flush the private list of module references kept for script
1060 # execution protection
1060 # execution protection
1061 self.shell._user_main_modules[:] = []
1061 self.shell._user_main_modules[:] = []
1062
1062
1063 def magic_logstart(self,parameter_s=''):
1063 def magic_logstart(self,parameter_s=''):
1064 """Start logging anywhere in a session.
1064 """Start logging anywhere in a session.
1065
1065
1066 %logstart [-o|-r|-t] [log_name [log_mode]]
1066 %logstart [-o|-r|-t] [log_name [log_mode]]
1067
1067
1068 If no name is given, it defaults to a file named 'ipython_log.py' in your
1068 If no name is given, it defaults to a file named 'ipython_log.py' in your
1069 current directory, in 'rotate' mode (see below).
1069 current directory, in 'rotate' mode (see below).
1070
1070
1071 '%logstart name' saves to file 'name' in 'backup' mode. It saves your
1071 '%logstart name' saves to file 'name' in 'backup' mode. It saves your
1072 history up to that point and then continues logging.
1072 history up to that point and then continues logging.
1073
1073
1074 %logstart takes a second optional parameter: logging mode. This can be one
1074 %logstart takes a second optional parameter: logging mode. This can be one
1075 of (note that the modes are given unquoted):\\
1075 of (note that the modes are given unquoted):\\
1076 append: well, that says it.\\
1076 append: well, that says it.\\
1077 backup: rename (if exists) to name~ and start name.\\
1077 backup: rename (if exists) to name~ and start name.\\
1078 global: single logfile in your home dir, appended to.\\
1078 global: single logfile in your home dir, appended to.\\
1079 over : overwrite existing log.\\
1079 over : overwrite existing log.\\
1080 rotate: create rotating logs name.1~, name.2~, etc.
1080 rotate: create rotating logs name.1~, name.2~, etc.
1081
1081
1082 Options:
1082 Options:
1083
1083
1084 -o: log also IPython's output. In this mode, all commands which
1084 -o: log also IPython's output. In this mode, all commands which
1085 generate an Out[NN] prompt are recorded to the logfile, right after
1085 generate an Out[NN] prompt are recorded to the logfile, right after
1086 their corresponding input line. The output lines are always
1086 their corresponding input line. The output lines are always
1087 prepended with a '#[Out]# ' marker, so that the log remains valid
1087 prepended with a '#[Out]# ' marker, so that the log remains valid
1088 Python code.
1088 Python code.
1089
1089
1090 Since this marker is always the same, filtering only the output from
1090 Since this marker is always the same, filtering only the output from
1091 a log is very easy, using for example a simple awk call:
1091 a log is very easy, using for example a simple awk call:
1092
1092
1093 awk -F'#\\[Out\\]# ' '{if($2) {print $2}}' ipython_log.py
1093 awk -F'#\\[Out\\]# ' '{if($2) {print $2}}' ipython_log.py
1094
1094
1095 -r: log 'raw' input. Normally, IPython's logs contain the processed
1095 -r: log 'raw' input. Normally, IPython's logs contain the processed
1096 input, so that user lines are logged in their final form, converted
1096 input, so that user lines are logged in their final form, converted
1097 into valid Python. For example, %Exit is logged as
1097 into valid Python. For example, %Exit is logged as
1098 '_ip.magic("Exit"). If the -r flag is given, all input is logged
1098 '_ip.magic("Exit"). If the -r flag is given, all input is logged
1099 exactly as typed, with no transformations applied.
1099 exactly as typed, with no transformations applied.
1100
1100
1101 -t: put timestamps before each input line logged (these are put in
1101 -t: put timestamps before each input line logged (these are put in
1102 comments)."""
1102 comments)."""
1103
1103
1104 opts,par = self.parse_options(parameter_s,'ort')
1104 opts,par = self.parse_options(parameter_s,'ort')
1105 log_output = 'o' in opts
1105 log_output = 'o' in opts
1106 log_raw_input = 'r' in opts
1106 log_raw_input = 'r' in opts
1107 timestamp = 't' in opts
1107 timestamp = 't' in opts
1108
1108
1109 rc = self.shell.rc
1109 rc = self.shell.rc
1110 logger = self.shell.logger
1110 logger = self.shell.logger
1111
1111
1112 # if no args are given, the defaults set in the logger constructor by
1112 # if no args are given, the defaults set in the logger constructor by
1113 # ipytohn remain valid
1113 # ipytohn remain valid
1114 if par:
1114 if par:
1115 try:
1115 try:
1116 logfname,logmode = par.split()
1116 logfname,logmode = par.split()
1117 except:
1117 except:
1118 logfname = par
1118 logfname = par
1119 logmode = 'backup'
1119 logmode = 'backup'
1120 else:
1120 else:
1121 logfname = logger.logfname
1121 logfname = logger.logfname
1122 logmode = logger.logmode
1122 logmode = logger.logmode
1123 # put logfname into rc struct as if it had been called on the command
1123 # put logfname into rc struct as if it had been called on the command
1124 # line, so it ends up saved in the log header Save it in case we need
1124 # line, so it ends up saved in the log header Save it in case we need
1125 # to restore it...
1125 # to restore it...
1126 old_logfile = rc.opts.get('logfile','')
1126 old_logfile = rc.opts.get('logfile','')
1127 if logfname:
1127 if logfname:
1128 logfname = os.path.expanduser(logfname)
1128 logfname = os.path.expanduser(logfname)
1129 rc.opts.logfile = logfname
1129 rc.opts.logfile = logfname
1130 loghead = self.shell.loghead_tpl % (rc.opts,rc.args)
1130 loghead = self.shell.loghead_tpl % (rc.opts,rc.args)
1131 try:
1131 try:
1132 started = logger.logstart(logfname,loghead,logmode,
1132 started = logger.logstart(logfname,loghead,logmode,
1133 log_output,timestamp,log_raw_input)
1133 log_output,timestamp,log_raw_input)
1134 except:
1134 except:
1135 rc.opts.logfile = old_logfile
1135 rc.opts.logfile = old_logfile
1136 warn("Couldn't start log: %s" % sys.exc_info()[1])
1136 warn("Couldn't start log: %s" % sys.exc_info()[1])
1137 else:
1137 else:
1138 # log input history up to this point, optionally interleaving
1138 # log input history up to this point, optionally interleaving
1139 # output if requested
1139 # output if requested
1140
1140
1141 if timestamp:
1141 if timestamp:
1142 # disable timestamping for the previous history, since we've
1142 # disable timestamping for the previous history, since we've
1143 # lost those already (no time machine here).
1143 # lost those already (no time machine here).
1144 logger.timestamp = False
1144 logger.timestamp = False
1145
1145
1146 if log_raw_input:
1146 if log_raw_input:
1147 input_hist = self.shell.input_hist_raw
1147 input_hist = self.shell.input_hist_raw
1148 else:
1148 else:
1149 input_hist = self.shell.input_hist
1149 input_hist = self.shell.input_hist
1150
1150
1151 if log_output:
1151 if log_output:
1152 log_write = logger.log_write
1152 log_write = logger.log_write
1153 output_hist = self.shell.output_hist
1153 output_hist = self.shell.output_hist
1154 for n in range(1,len(input_hist)-1):
1154 for n in range(1,len(input_hist)-1):
1155 log_write(input_hist[n].rstrip())
1155 log_write(input_hist[n].rstrip())
1156 if n in output_hist:
1156 if n in output_hist:
1157 log_write(repr(output_hist[n]),'output')
1157 log_write(repr(output_hist[n]),'output')
1158 else:
1158 else:
1159 logger.log_write(input_hist[1:])
1159 logger.log_write(input_hist[1:])
1160 if timestamp:
1160 if timestamp:
1161 # re-enable timestamping
1161 # re-enable timestamping
1162 logger.timestamp = True
1162 logger.timestamp = True
1163
1163
1164 print ('Activating auto-logging. '
1164 print ('Activating auto-logging. '
1165 'Current session state plus future input saved.')
1165 'Current session state plus future input saved.')
1166 logger.logstate()
1166 logger.logstate()
1167
1167
1168 def magic_logstop(self,parameter_s=''):
1168 def magic_logstop(self,parameter_s=''):
1169 """Fully stop logging and close log file.
1169 """Fully stop logging and close log file.
1170
1170
1171 In order to start logging again, a new %logstart call needs to be made,
1171 In order to start logging again, a new %logstart call needs to be made,
1172 possibly (though not necessarily) with a new filename, mode and other
1172 possibly (though not necessarily) with a new filename, mode and other
1173 options."""
1173 options."""
1174 self.logger.logstop()
1174 self.logger.logstop()
1175
1175
1176 def magic_logoff(self,parameter_s=''):
1176 def magic_logoff(self,parameter_s=''):
1177 """Temporarily stop logging.
1177 """Temporarily stop logging.
1178
1178
1179 You must have previously started logging."""
1179 You must have previously started logging."""
1180 self.shell.logger.switch_log(0)
1180 self.shell.logger.switch_log(0)
1181
1181
1182 def magic_logon(self,parameter_s=''):
1182 def magic_logon(self,parameter_s=''):
1183 """Restart logging.
1183 """Restart logging.
1184
1184
1185 This function is for restarting logging which you've temporarily
1185 This function is for restarting logging which you've temporarily
1186 stopped with %logoff. For starting logging for the first time, you
1186 stopped with %logoff. For starting logging for the first time, you
1187 must use the %logstart function, which allows you to specify an
1187 must use the %logstart function, which allows you to specify an
1188 optional log filename."""
1188 optional log filename."""
1189
1189
1190 self.shell.logger.switch_log(1)
1190 self.shell.logger.switch_log(1)
1191
1191
1192 def magic_logstate(self,parameter_s=''):
1192 def magic_logstate(self,parameter_s=''):
1193 """Print the status of the logging system."""
1193 """Print the status of the logging system."""
1194
1194
1195 self.shell.logger.logstate()
1195 self.shell.logger.logstate()
1196
1196
1197 def magic_pdb(self, parameter_s=''):
1197 def magic_pdb(self, parameter_s=''):
1198 """Control the automatic calling of the pdb interactive debugger.
1198 """Control the automatic calling of the pdb interactive debugger.
1199
1199
1200 Call as '%pdb on', '%pdb 1', '%pdb off' or '%pdb 0'. If called without
1200 Call as '%pdb on', '%pdb 1', '%pdb off' or '%pdb 0'. If called without
1201 argument it works as a toggle.
1201 argument it works as a toggle.
1202
1202
1203 When an exception is triggered, IPython can optionally call the
1203 When an exception is triggered, IPython can optionally call the
1204 interactive pdb debugger after the traceback printout. %pdb toggles
1204 interactive pdb debugger after the traceback printout. %pdb toggles
1205 this feature on and off.
1205 this feature on and off.
1206
1206
1207 The initial state of this feature is set in your ipythonrc
1207 The initial state of this feature is set in your ipythonrc
1208 configuration file (the variable is called 'pdb').
1208 configuration file (the variable is called 'pdb').
1209
1209
1210 If you want to just activate the debugger AFTER an exception has fired,
1210 If you want to just activate the debugger AFTER an exception has fired,
1211 without having to type '%pdb on' and rerunning your code, you can use
1211 without having to type '%pdb on' and rerunning your code, you can use
1212 the %debug magic."""
1212 the %debug magic."""
1213
1213
1214 par = parameter_s.strip().lower()
1214 par = parameter_s.strip().lower()
1215
1215
1216 if par:
1216 if par:
1217 try:
1217 try:
1218 new_pdb = {'off':0,'0':0,'on':1,'1':1}[par]
1218 new_pdb = {'off':0,'0':0,'on':1,'1':1}[par]
1219 except KeyError:
1219 except KeyError:
1220 print ('Incorrect argument. Use on/1, off/0, '
1220 print ('Incorrect argument. Use on/1, off/0, '
1221 'or nothing for a toggle.')
1221 'or nothing for a toggle.')
1222 return
1222 return
1223 else:
1223 else:
1224 # toggle
1224 # toggle
1225 new_pdb = not self.shell.call_pdb
1225 new_pdb = not self.shell.call_pdb
1226
1226
1227 # set on the shell
1227 # set on the shell
1228 self.shell.call_pdb = new_pdb
1228 self.shell.call_pdb = new_pdb
1229 print 'Automatic pdb calling has been turned',on_off(new_pdb)
1229 print 'Automatic pdb calling has been turned',on_off(new_pdb)
1230
1230
1231 def magic_debug(self, parameter_s=''):
1231 def magic_debug(self, parameter_s=''):
1232 """Activate the interactive debugger in post-mortem mode.
1232 """Activate the interactive debugger in post-mortem mode.
1233
1233
1234 If an exception has just occurred, this lets you inspect its stack
1234 If an exception has just occurred, this lets you inspect its stack
1235 frames interactively. Note that this will always work only on the last
1235 frames interactively. Note that this will always work only on the last
1236 traceback that occurred, so you must call this quickly after an
1236 traceback that occurred, so you must call this quickly after an
1237 exception that you wish to inspect has fired, because if another one
1237 exception that you wish to inspect has fired, because if another one
1238 occurs, it clobbers the previous one.
1238 occurs, it clobbers the previous one.
1239
1239
1240 If you want IPython to automatically do this on every exception, see
1240 If you want IPython to automatically do this on every exception, see
1241 the %pdb magic for more details.
1241 the %pdb magic for more details.
1242 """
1242 """
1243
1243
1244 self.shell.debugger(force=True)
1244 self.shell.debugger(force=True)
1245
1245
1246 def magic_prun(self, parameter_s ='',user_mode=1,
1246 def magic_prun(self, parameter_s ='',user_mode=1,
1247 opts=None,arg_lst=None,prog_ns=None):
1247 opts=None,arg_lst=None,prog_ns=None):
1248
1248
1249 """Run a statement through the python code profiler.
1249 """Run a statement through the python code profiler.
1250
1250
1251 Usage:\\
1251 Usage:\\
1252 %prun [options] statement
1252 %prun [options] statement
1253
1253
1254 The given statement (which doesn't require quote marks) is run via the
1254 The given statement (which doesn't require quote marks) is run via the
1255 python profiler in a manner similar to the profile.run() function.
1255 python profiler in a manner similar to the profile.run() function.
1256 Namespaces are internally managed to work correctly; profile.run
1256 Namespaces are internally managed to work correctly; profile.run
1257 cannot be used in IPython because it makes certain assumptions about
1257 cannot be used in IPython because it makes certain assumptions about
1258 namespaces which do not hold under IPython.
1258 namespaces which do not hold under IPython.
1259
1259
1260 Options:
1260 Options:
1261
1261
1262 -l <limit>: you can place restrictions on what or how much of the
1262 -l <limit>: you can place restrictions on what or how much of the
1263 profile gets printed. The limit value can be:
1263 profile gets printed. The limit value can be:
1264
1264
1265 * A string: only information for function names containing this string
1265 * A string: only information for function names containing this string
1266 is printed.
1266 is printed.
1267
1267
1268 * An integer: only these many lines are printed.
1268 * An integer: only these many lines are printed.
1269
1269
1270 * A float (between 0 and 1): this fraction of the report is printed
1270 * A float (between 0 and 1): this fraction of the report is printed
1271 (for example, use a limit of 0.4 to see the topmost 40% only).
1271 (for example, use a limit of 0.4 to see the topmost 40% only).
1272
1272
1273 You can combine several limits with repeated use of the option. For
1273 You can combine several limits with repeated use of the option. For
1274 example, '-l __init__ -l 5' will print only the topmost 5 lines of
1274 example, '-l __init__ -l 5' will print only the topmost 5 lines of
1275 information about class constructors.
1275 information about class constructors.
1276
1276
1277 -r: return the pstats.Stats object generated by the profiling. This
1277 -r: return the pstats.Stats object generated by the profiling. This
1278 object has all the information about the profile in it, and you can
1278 object has all the information about the profile in it, and you can
1279 later use it for further analysis or in other functions.
1279 later use it for further analysis or in other functions.
1280
1280
1281 -s <key>: sort profile by given key. You can provide more than one key
1281 -s <key>: sort profile by given key. You can provide more than one key
1282 by using the option several times: '-s key1 -s key2 -s key3...'. The
1282 by using the option several times: '-s key1 -s key2 -s key3...'. The
1283 default sorting key is 'time'.
1283 default sorting key is 'time'.
1284
1284
1285 The following is copied verbatim from the profile documentation
1285 The following is copied verbatim from the profile documentation
1286 referenced below:
1286 referenced below:
1287
1287
1288 When more than one key is provided, additional keys are used as
1288 When more than one key is provided, additional keys are used as
1289 secondary criteria when the there is equality in all keys selected
1289 secondary criteria when the there is equality in all keys selected
1290 before them.
1290 before them.
1291
1291
1292 Abbreviations can be used for any key names, as long as the
1292 Abbreviations can be used for any key names, as long as the
1293 abbreviation is unambiguous. The following are the keys currently
1293 abbreviation is unambiguous. The following are the keys currently
1294 defined:
1294 defined:
1295
1295
1296 Valid Arg Meaning\\
1296 Valid Arg Meaning\\
1297 "calls" call count\\
1297 "calls" call count\\
1298 "cumulative" cumulative time\\
1298 "cumulative" cumulative time\\
1299 "file" file name\\
1299 "file" file name\\
1300 "module" file name\\
1300 "module" file name\\
1301 "pcalls" primitive call count\\
1301 "pcalls" primitive call count\\
1302 "line" line number\\
1302 "line" line number\\
1303 "name" function name\\
1303 "name" function name\\
1304 "nfl" name/file/line\\
1304 "nfl" name/file/line\\
1305 "stdname" standard name\\
1305 "stdname" standard name\\
1306 "time" internal time
1306 "time" internal time
1307
1307
1308 Note that all sorts on statistics are in descending order (placing
1308 Note that all sorts on statistics are in descending order (placing
1309 most time consuming items first), where as name, file, and line number
1309 most time consuming items first), where as name, file, and line number
1310 searches are in ascending order (i.e., alphabetical). The subtle
1310 searches are in ascending order (i.e., alphabetical). The subtle
1311 distinction between "nfl" and "stdname" is that the standard name is a
1311 distinction between "nfl" and "stdname" is that the standard name is a
1312 sort of the name as printed, which means that the embedded line
1312 sort of the name as printed, which means that the embedded line
1313 numbers get compared in an odd way. For example, lines 3, 20, and 40
1313 numbers get compared in an odd way. For example, lines 3, 20, and 40
1314 would (if the file names were the same) appear in the string order
1314 would (if the file names were the same) appear in the string order
1315 "20" "3" and "40". In contrast, "nfl" does a numeric compare of the
1315 "20" "3" and "40". In contrast, "nfl" does a numeric compare of the
1316 line numbers. In fact, sort_stats("nfl") is the same as
1316 line numbers. In fact, sort_stats("nfl") is the same as
1317 sort_stats("name", "file", "line").
1317 sort_stats("name", "file", "line").
1318
1318
1319 -T <filename>: save profile results as shown on screen to a text
1319 -T <filename>: save profile results as shown on screen to a text
1320 file. The profile is still shown on screen.
1320 file. The profile is still shown on screen.
1321
1321
1322 -D <filename>: save (via dump_stats) profile statistics to given
1322 -D <filename>: save (via dump_stats) profile statistics to given
1323 filename. This data is in a format understod by the pstats module, and
1323 filename. This data is in a format understod by the pstats module, and
1324 is generated by a call to the dump_stats() method of profile
1324 is generated by a call to the dump_stats() method of profile
1325 objects. The profile is still shown on screen.
1325 objects. The profile is still shown on screen.
1326
1326
1327 If you want to run complete programs under the profiler's control, use
1327 If you want to run complete programs under the profiler's control, use
1328 '%run -p [prof_opts] filename.py [args to program]' where prof_opts
1328 '%run -p [prof_opts] filename.py [args to program]' where prof_opts
1329 contains profiler specific options as described here.
1329 contains profiler specific options as described here.
1330
1330
1331 You can read the complete documentation for the profile module with:\\
1331 You can read the complete documentation for the profile module with:\\
1332 In [1]: import profile; profile.help() """
1332 In [1]: import profile; profile.help() """
1333
1333
1334 opts_def = Struct(D=[''],l=[],s=['time'],T=[''])
1334 opts_def = Struct(D=[''],l=[],s=['time'],T=[''])
1335 # protect user quote marks
1335 # protect user quote marks
1336 parameter_s = parameter_s.replace('"',r'\"').replace("'",r"\'")
1336 parameter_s = parameter_s.replace('"',r'\"').replace("'",r"\'")
1337
1337
1338 if user_mode: # regular user call
1338 if user_mode: # regular user call
1339 opts,arg_str = self.parse_options(parameter_s,'D:l:rs:T:',
1339 opts,arg_str = self.parse_options(parameter_s,'D:l:rs:T:',
1340 list_all=1)
1340 list_all=1)
1341 namespace = self.shell.user_ns
1341 namespace = self.shell.user_ns
1342 else: # called to run a program by %run -p
1342 else: # called to run a program by %run -p
1343 try:
1343 try:
1344 filename = get_py_filename(arg_lst[0])
1344 filename = get_py_filename(arg_lst[0])
1345 except IOError,msg:
1345 except IOError,msg:
1346 error(msg)
1346 error(msg)
1347 return
1347 return
1348
1348
1349 arg_str = 'execfile(filename,prog_ns)'
1349 arg_str = 'execfile(filename,prog_ns)'
1350 namespace = locals()
1350 namespace = locals()
1351
1351
1352 opts.merge(opts_def)
1352 opts.merge(opts_def)
1353
1353
1354 prof = profile.Profile()
1354 prof = profile.Profile()
1355 try:
1355 try:
1356 prof = prof.runctx(arg_str,namespace,namespace)
1356 prof = prof.runctx(arg_str,namespace,namespace)
1357 sys_exit = ''
1357 sys_exit = ''
1358 except SystemExit:
1358 except SystemExit:
1359 sys_exit = """*** SystemExit exception caught in code being profiled."""
1359 sys_exit = """*** SystemExit exception caught in code being profiled."""
1360
1360
1361 stats = pstats.Stats(prof).strip_dirs().sort_stats(*opts.s)
1361 stats = pstats.Stats(prof).strip_dirs().sort_stats(*opts.s)
1362
1362
1363 lims = opts.l
1363 lims = opts.l
1364 if lims:
1364 if lims:
1365 lims = [] # rebuild lims with ints/floats/strings
1365 lims = [] # rebuild lims with ints/floats/strings
1366 for lim in opts.l:
1366 for lim in opts.l:
1367 try:
1367 try:
1368 lims.append(int(lim))
1368 lims.append(int(lim))
1369 except ValueError:
1369 except ValueError:
1370 try:
1370 try:
1371 lims.append(float(lim))
1371 lims.append(float(lim))
1372 except ValueError:
1372 except ValueError:
1373 lims.append(lim)
1373 lims.append(lim)
1374
1374
1375 # Trap output.
1375 # Trap output.
1376 stdout_trap = StringIO()
1376 stdout_trap = StringIO()
1377
1377
1378 if hasattr(stats,'stream'):
1378 if hasattr(stats,'stream'):
1379 # In newer versions of python, the stats object has a 'stream'
1379 # In newer versions of python, the stats object has a 'stream'
1380 # attribute to write into.
1380 # attribute to write into.
1381 stats.stream = stdout_trap
1381 stats.stream = stdout_trap
1382 stats.print_stats(*lims)
1382 stats.print_stats(*lims)
1383 else:
1383 else:
1384 # For older versions, we manually redirect stdout during printing
1384 # For older versions, we manually redirect stdout during printing
1385 sys_stdout = sys.stdout
1385 sys_stdout = sys.stdout
1386 try:
1386 try:
1387 sys.stdout = stdout_trap
1387 sys.stdout = stdout_trap
1388 stats.print_stats(*lims)
1388 stats.print_stats(*lims)
1389 finally:
1389 finally:
1390 sys.stdout = sys_stdout
1390 sys.stdout = sys_stdout
1391
1391
1392 output = stdout_trap.getvalue()
1392 output = stdout_trap.getvalue()
1393 output = output.rstrip()
1393 output = output.rstrip()
1394
1394
1395 page(output,screen_lines=self.shell.rc.screen_length)
1395 page(output,screen_lines=self.shell.rc.screen_length)
1396 print sys_exit,
1396 print sys_exit,
1397
1397
1398 dump_file = opts.D[0]
1398 dump_file = opts.D[0]
1399 text_file = opts.T[0]
1399 text_file = opts.T[0]
1400 if dump_file:
1400 if dump_file:
1401 prof.dump_stats(dump_file)
1401 prof.dump_stats(dump_file)
1402 print '\n*** Profile stats marshalled to file',\
1402 print '\n*** Profile stats marshalled to file',\
1403 `dump_file`+'.',sys_exit
1403 `dump_file`+'.',sys_exit
1404 if text_file:
1404 if text_file:
1405 pfile = file(text_file,'w')
1405 pfile = file(text_file,'w')
1406 pfile.write(output)
1406 pfile.write(output)
1407 pfile.close()
1407 pfile.close()
1408 print '\n*** Profile printout saved to text file',\
1408 print '\n*** Profile printout saved to text file',\
1409 `text_file`+'.',sys_exit
1409 `text_file`+'.',sys_exit
1410
1410
1411 if opts.has_key('r'):
1411 if opts.has_key('r'):
1412 return stats
1412 return stats
1413 else:
1413 else:
1414 return None
1414 return None
1415
1415
1416 def magic_run(self, parameter_s ='',runner=None):
1416 def magic_run(self, parameter_s ='',runner=None):
1417 """Run the named file inside IPython as a program.
1417 """Run the named file inside IPython as a program.
1418
1418
1419 Usage:\\
1419 Usage:\\
1420 %run [-n -i -t [-N<N>] -d [-b<N>] -p [profile options]] file [args]
1420 %run [-n -i -t [-N<N>] -d [-b<N>] -p [profile options]] file [args]
1421
1421
1422 Parameters after the filename are passed as command-line arguments to
1422 Parameters after the filename are passed as command-line arguments to
1423 the program (put in sys.argv). Then, control returns to IPython's
1423 the program (put in sys.argv). Then, control returns to IPython's
1424 prompt.
1424 prompt.
1425
1425
1426 This is similar to running at a system prompt:\\
1426 This is similar to running at a system prompt:\\
1427 $ python file args\\
1427 $ python file args\\
1428 but with the advantage of giving you IPython's tracebacks, and of
1428 but with the advantage of giving you IPython's tracebacks, and of
1429 loading all variables into your interactive namespace for further use
1429 loading all variables into your interactive namespace for further use
1430 (unless -p is used, see below).
1430 (unless -p is used, see below).
1431
1431
1432 The file is executed in a namespace initially consisting only of
1432 The file is executed in a namespace initially consisting only of
1433 __name__=='__main__' and sys.argv constructed as indicated. It thus
1433 __name__=='__main__' and sys.argv constructed as indicated. It thus
1434 sees its environment as if it were being run as a stand-alone program
1434 sees its environment as if it were being run as a stand-alone program
1435 (except for sharing global objects such as previously imported
1435 (except for sharing global objects such as previously imported
1436 modules). But after execution, the IPython interactive namespace gets
1436 modules). But after execution, the IPython interactive namespace gets
1437 updated with all variables defined in the program (except for __name__
1437 updated with all variables defined in the program (except for __name__
1438 and sys.argv). This allows for very convenient loading of code for
1438 and sys.argv). This allows for very convenient loading of code for
1439 interactive work, while giving each program a 'clean sheet' to run in.
1439 interactive work, while giving each program a 'clean sheet' to run in.
1440
1440
1441 Options:
1441 Options:
1442
1442
1443 -n: __name__ is NOT set to '__main__', but to the running file's name
1443 -n: __name__ is NOT set to '__main__', but to the running file's name
1444 without extension (as python does under import). This allows running
1444 without extension (as python does under import). This allows running
1445 scripts and reloading the definitions in them without calling code
1445 scripts and reloading the definitions in them without calling code
1446 protected by an ' if __name__ == "__main__" ' clause.
1446 protected by an ' if __name__ == "__main__" ' clause.
1447
1447
1448 -i: run the file in IPython's namespace instead of an empty one. This
1448 -i: run the file in IPython's namespace instead of an empty one. This
1449 is useful if you are experimenting with code written in a text editor
1449 is useful if you are experimenting with code written in a text editor
1450 which depends on variables defined interactively.
1450 which depends on variables defined interactively.
1451
1451
1452 -e: ignore sys.exit() calls or SystemExit exceptions in the script
1452 -e: ignore sys.exit() calls or SystemExit exceptions in the script
1453 being run. This is particularly useful if IPython is being used to
1453 being run. This is particularly useful if IPython is being used to
1454 run unittests, which always exit with a sys.exit() call. In such
1454 run unittests, which always exit with a sys.exit() call. In such
1455 cases you are interested in the output of the test results, not in
1455 cases you are interested in the output of the test results, not in
1456 seeing a traceback of the unittest module.
1456 seeing a traceback of the unittest module.
1457
1457
1458 -t: print timing information at the end of the run. IPython will give
1458 -t: print timing information at the end of the run. IPython will give
1459 you an estimated CPU time consumption for your script, which under
1459 you an estimated CPU time consumption for your script, which under
1460 Unix uses the resource module to avoid the wraparound problems of
1460 Unix uses the resource module to avoid the wraparound problems of
1461 time.clock(). Under Unix, an estimate of time spent on system tasks
1461 time.clock(). Under Unix, an estimate of time spent on system tasks
1462 is also given (for Windows platforms this is reported as 0.0).
1462 is also given (for Windows platforms this is reported as 0.0).
1463
1463
1464 If -t is given, an additional -N<N> option can be given, where <N>
1464 If -t is given, an additional -N<N> option can be given, where <N>
1465 must be an integer indicating how many times you want the script to
1465 must be an integer indicating how many times you want the script to
1466 run. The final timing report will include total and per run results.
1466 run. The final timing report will include total and per run results.
1467
1467
1468 For example (testing the script uniq_stable.py):
1468 For example (testing the script uniq_stable.py):
1469
1469
1470 In [1]: run -t uniq_stable
1470 In [1]: run -t uniq_stable
1471
1471
1472 IPython CPU timings (estimated):\\
1472 IPython CPU timings (estimated):\\
1473 User : 0.19597 s.\\
1473 User : 0.19597 s.\\
1474 System: 0.0 s.\\
1474 System: 0.0 s.\\
1475
1475
1476 In [2]: run -t -N5 uniq_stable
1476 In [2]: run -t -N5 uniq_stable
1477
1477
1478 IPython CPU timings (estimated):\\
1478 IPython CPU timings (estimated):\\
1479 Total runs performed: 5\\
1479 Total runs performed: 5\\
1480 Times : Total Per run\\
1480 Times : Total Per run\\
1481 User : 0.910862 s, 0.1821724 s.\\
1481 User : 0.910862 s, 0.1821724 s.\\
1482 System: 0.0 s, 0.0 s.
1482 System: 0.0 s, 0.0 s.
1483
1483
1484 -d: run your program under the control of pdb, the Python debugger.
1484 -d: run your program under the control of pdb, the Python debugger.
1485 This allows you to execute your program step by step, watch variables,
1485 This allows you to execute your program step by step, watch variables,
1486 etc. Internally, what IPython does is similar to calling:
1486 etc. Internally, what IPython does is similar to calling:
1487
1487
1488 pdb.run('execfile("YOURFILENAME")')
1488 pdb.run('execfile("YOURFILENAME")')
1489
1489
1490 with a breakpoint set on line 1 of your file. You can change the line
1490 with a breakpoint set on line 1 of your file. You can change the line
1491 number for this automatic breakpoint to be <N> by using the -bN option
1491 number for this automatic breakpoint to be <N> by using the -bN option
1492 (where N must be an integer). For example:
1492 (where N must be an integer). For example:
1493
1493
1494 %run -d -b40 myscript
1494 %run -d -b40 myscript
1495
1495
1496 will set the first breakpoint at line 40 in myscript.py. Note that
1496 will set the first breakpoint at line 40 in myscript.py. Note that
1497 the first breakpoint must be set on a line which actually does
1497 the first breakpoint must be set on a line which actually does
1498 something (not a comment or docstring) for it to stop execution.
1498 something (not a comment or docstring) for it to stop execution.
1499
1499
1500 When the pdb debugger starts, you will see a (Pdb) prompt. You must
1500 When the pdb debugger starts, you will see a (Pdb) prompt. You must
1501 first enter 'c' (without qoutes) to start execution up to the first
1501 first enter 'c' (without qoutes) to start execution up to the first
1502 breakpoint.
1502 breakpoint.
1503
1503
1504 Entering 'help' gives information about the use of the debugger. You
1504 Entering 'help' gives information about the use of the debugger. You
1505 can easily see pdb's full documentation with "import pdb;pdb.help()"
1505 can easily see pdb's full documentation with "import pdb;pdb.help()"
1506 at a prompt.
1506 at a prompt.
1507
1507
1508 -p: run program under the control of the Python profiler module (which
1508 -p: run program under the control of the Python profiler module (which
1509 prints a detailed report of execution times, function calls, etc).
1509 prints a detailed report of execution times, function calls, etc).
1510
1510
1511 You can pass other options after -p which affect the behavior of the
1511 You can pass other options after -p which affect the behavior of the
1512 profiler itself. See the docs for %prun for details.
1512 profiler itself. See the docs for %prun for details.
1513
1513
1514 In this mode, the program's variables do NOT propagate back to the
1514 In this mode, the program's variables do NOT propagate back to the
1515 IPython interactive namespace (because they remain in the namespace
1515 IPython interactive namespace (because they remain in the namespace
1516 where the profiler executes them).
1516 where the profiler executes them).
1517
1517
1518 Internally this triggers a call to %prun, see its documentation for
1518 Internally this triggers a call to %prun, see its documentation for
1519 details on the options available specifically for profiling.
1519 details on the options available specifically for profiling.
1520
1520
1521 There is one special usage for which the text above doesn't apply:
1521 There is one special usage for which the text above doesn't apply:
1522 if the filename ends with .ipy, the file is run as ipython script,
1522 if the filename ends with .ipy, the file is run as ipython script,
1523 just as if the commands were written on IPython prompt.
1523 just as if the commands were written on IPython prompt.
1524 """
1524 """
1525
1525
1526 # get arguments and set sys.argv for program to be run.
1526 # get arguments and set sys.argv for program to be run.
1527 opts,arg_lst = self.parse_options(parameter_s,'nidtN:b:pD:l:rs:T:e',
1527 opts,arg_lst = self.parse_options(parameter_s,'nidtN:b:pD:l:rs:T:e',
1528 mode='list',list_all=1)
1528 mode='list',list_all=1)
1529
1529
1530 try:
1530 try:
1531 filename = get_py_filename(arg_lst[0])
1531 filename = get_py_filename(arg_lst[0])
1532 except IndexError:
1532 except IndexError:
1533 warn('you must provide at least a filename.')
1533 warn('you must provide at least a filename.')
1534 print '\n%run:\n',OInspect.getdoc(self.magic_run)
1534 print '\n%run:\n',OInspect.getdoc(self.magic_run)
1535 return
1535 return
1536 except IOError,msg:
1536 except IOError,msg:
1537 error(msg)
1537 error(msg)
1538 return
1538 return
1539
1539
1540 if filename.lower().endswith('.ipy'):
1540 if filename.lower().endswith('.ipy'):
1541 self.api.runlines(open(filename).read())
1541 self.api.runlines(open(filename).read())
1542 return
1542 return
1543
1543
1544 # Control the response to exit() calls made by the script being run
1544 # Control the response to exit() calls made by the script being run
1545 exit_ignore = opts.has_key('e')
1545 exit_ignore = opts.has_key('e')
1546
1546
1547 # Make sure that the running script gets a proper sys.argv as if it
1547 # Make sure that the running script gets a proper sys.argv as if it
1548 # were run from a system shell.
1548 # were run from a system shell.
1549 save_argv = sys.argv # save it for later restoring
1549 save_argv = sys.argv # save it for later restoring
1550 sys.argv = [filename]+ arg_lst[1:] # put in the proper filename
1550 sys.argv = [filename]+ arg_lst[1:] # put in the proper filename
1551
1551
1552 if opts.has_key('i'):
1552 if opts.has_key('i'):
1553 # Run in user's interactive namespace
1553 # Run in user's interactive namespace
1554 prog_ns = self.shell.user_ns
1554 prog_ns = self.shell.user_ns
1555 __name__save = self.shell.user_ns['__name__']
1555 __name__save = self.shell.user_ns['__name__']
1556 prog_ns['__name__'] = '__main__'
1556 prog_ns['__name__'] = '__main__'
1557 main_mod = FakeModule(prog_ns)
1557 main_mod = FakeModule(prog_ns)
1558 else:
1558 else:
1559 # Run in a fresh, empty namespace
1559 # Run in a fresh, empty namespace
1560 if opts.has_key('n'):
1560 if opts.has_key('n'):
1561 name = os.path.splitext(os.path.basename(filename))[0]
1561 name = os.path.splitext(os.path.basename(filename))[0]
1562 else:
1562 else:
1563 name = '__main__'
1563 name = '__main__'
1564 main_mod = FakeModule()
1564 main_mod = FakeModule()
1565 prog_ns = main_mod.__dict__
1565 prog_ns = main_mod.__dict__
1566 prog_ns['__name__'] = name
1566 prog_ns['__name__'] = name
1567 # The shell MUST hold a reference to main_mod so after %run exits,
1567 # The shell MUST hold a reference to main_mod so after %run exits,
1568 # the python deletion mechanism doesn't zero it out (leaving
1568 # the python deletion mechanism doesn't zero it out (leaving
1569 # dangling references)
1569 # dangling references)
1570 self.shell._user_main_modules.append(main_mod)
1570 self.shell._user_main_modules.append(main_mod)
1571
1571
1572 # Since '%run foo' emulates 'python foo.py' at the cmd line, we must
1572 # Since '%run foo' emulates 'python foo.py' at the cmd line, we must
1573 # set the __file__ global in the script's namespace
1573 # set the __file__ global in the script's namespace
1574 prog_ns['__file__'] = filename
1574 prog_ns['__file__'] = filename
1575
1575
1576 # pickle fix. See iplib for an explanation. But we need to make sure
1576 # pickle fix. See iplib for an explanation. But we need to make sure
1577 # that, if we overwrite __main__, we replace it at the end
1577 # that, if we overwrite __main__, we replace it at the end
1578 if prog_ns['__name__'] == '__main__':
1578 if prog_ns['__name__'] == '__main__':
1579 restore_main = sys.modules['__main__']
1579 restore_main = sys.modules['__main__']
1580 else:
1580 else:
1581 restore_main = False
1581 restore_main = False
1582
1582
1583 sys.modules[prog_ns['__name__']] = main_mod
1583 sys.modules[prog_ns['__name__']] = main_mod
1584
1584
1585 stats = None
1585 stats = None
1586 try:
1586 try:
1587 self.shell.savehist()
1587 self.shell.savehist()
1588
1588
1589 if opts.has_key('p'):
1589 if opts.has_key('p'):
1590 stats = self.magic_prun('',0,opts,arg_lst,prog_ns)
1590 stats = self.magic_prun('',0,opts,arg_lst,prog_ns)
1591 else:
1591 else:
1592 if opts.has_key('d'):
1592 if opts.has_key('d'):
1593 deb = Debugger.Pdb(self.shell.rc.colors)
1593 deb = Debugger.Pdb(self.shell.rc.colors)
1594 # reset Breakpoint state, which is moronically kept
1594 # reset Breakpoint state, which is moronically kept
1595 # in a class
1595 # in a class
1596 bdb.Breakpoint.next = 1
1596 bdb.Breakpoint.next = 1
1597 bdb.Breakpoint.bplist = {}
1597 bdb.Breakpoint.bplist = {}
1598 bdb.Breakpoint.bpbynumber = [None]
1598 bdb.Breakpoint.bpbynumber = [None]
1599 # Set an initial breakpoint to stop execution
1599 # Set an initial breakpoint to stop execution
1600 maxtries = 10
1600 maxtries = 10
1601 bp = int(opts.get('b',[1])[0])
1601 bp = int(opts.get('b',[1])[0])
1602 checkline = deb.checkline(filename,bp)
1602 checkline = deb.checkline(filename,bp)
1603 if not checkline:
1603 if not checkline:
1604 for bp in range(bp+1,bp+maxtries+1):
1604 for bp in range(bp+1,bp+maxtries+1):
1605 if deb.checkline(filename,bp):
1605 if deb.checkline(filename,bp):
1606 break
1606 break
1607 else:
1607 else:
1608 msg = ("\nI failed to find a valid line to set "
1608 msg = ("\nI failed to find a valid line to set "
1609 "a breakpoint\n"
1609 "a breakpoint\n"
1610 "after trying up to line: %s.\n"
1610 "after trying up to line: %s.\n"
1611 "Please set a valid breakpoint manually "
1611 "Please set a valid breakpoint manually "
1612 "with the -b option." % bp)
1612 "with the -b option." % bp)
1613 error(msg)
1613 error(msg)
1614 return
1614 return
1615 # if we find a good linenumber, set the breakpoint
1615 # if we find a good linenumber, set the breakpoint
1616 deb.do_break('%s:%s' % (filename,bp))
1616 deb.do_break('%s:%s' % (filename,bp))
1617 # Start file run
1617 # Start file run
1618 print "NOTE: Enter 'c' at the",
1618 print "NOTE: Enter 'c' at the",
1619 print "%s prompt to start your script." % deb.prompt
1619 print "%s prompt to start your script." % deb.prompt
1620 try:
1620 try:
1621 deb.run('execfile("%s")' % filename,prog_ns)
1621 deb.run('execfile("%s")' % filename,prog_ns)
1622
1622
1623 except:
1623 except:
1624 etype, value, tb = sys.exc_info()
1624 etype, value, tb = sys.exc_info()
1625 # Skip three frames in the traceback: the %run one,
1625 # Skip three frames in the traceback: the %run one,
1626 # one inside bdb.py, and the command-line typed by the
1626 # one inside bdb.py, and the command-line typed by the
1627 # user (run by exec in pdb itself).
1627 # user (run by exec in pdb itself).
1628 self.shell.InteractiveTB(etype,value,tb,tb_offset=3)
1628 self.shell.InteractiveTB(etype,value,tb,tb_offset=3)
1629 else:
1629 else:
1630 if runner is None:
1630 if runner is None:
1631 runner = self.shell.safe_execfile
1631 runner = self.shell.safe_execfile
1632 if opts.has_key('t'):
1632 if opts.has_key('t'):
1633 # timed execution
1633 # timed execution
1634 try:
1634 try:
1635 nruns = int(opts['N'][0])
1635 nruns = int(opts['N'][0])
1636 if nruns < 1:
1636 if nruns < 1:
1637 error('Number of runs must be >=1')
1637 error('Number of runs must be >=1')
1638 return
1638 return
1639 except (KeyError):
1639 except (KeyError):
1640 nruns = 1
1640 nruns = 1
1641 if nruns == 1:
1641 if nruns == 1:
1642 t0 = clock2()
1642 t0 = clock2()
1643 runner(filename,prog_ns,prog_ns,
1643 runner(filename,prog_ns,prog_ns,
1644 exit_ignore=exit_ignore)
1644 exit_ignore=exit_ignore)
1645 t1 = clock2()
1645 t1 = clock2()
1646 t_usr = t1[0]-t0[0]
1646 t_usr = t1[0]-t0[0]
1647 t_sys = t1[1]-t1[1]
1647 t_sys = t1[1]-t1[1]
1648 print "\nIPython CPU timings (estimated):"
1648 print "\nIPython CPU timings (estimated):"
1649 print " User : %10s s." % t_usr
1649 print " User : %10s s." % t_usr
1650 print " System: %10s s." % t_sys
1650 print " System: %10s s." % t_sys
1651 else:
1651 else:
1652 runs = range(nruns)
1652 runs = range(nruns)
1653 t0 = clock2()
1653 t0 = clock2()
1654 for nr in runs:
1654 for nr in runs:
1655 runner(filename,prog_ns,prog_ns,
1655 runner(filename,prog_ns,prog_ns,
1656 exit_ignore=exit_ignore)
1656 exit_ignore=exit_ignore)
1657 t1 = clock2()
1657 t1 = clock2()
1658 t_usr = t1[0]-t0[0]
1658 t_usr = t1[0]-t0[0]
1659 t_sys = t1[1]-t1[1]
1659 t_sys = t1[1]-t1[1]
1660 print "\nIPython CPU timings (estimated):"
1660 print "\nIPython CPU timings (estimated):"
1661 print "Total runs performed:",nruns
1661 print "Total runs performed:",nruns
1662 print " Times : %10s %10s" % ('Total','Per run')
1662 print " Times : %10s %10s" % ('Total','Per run')
1663 print " User : %10s s, %10s s." % (t_usr,t_usr/nruns)
1663 print " User : %10s s, %10s s." % (t_usr,t_usr/nruns)
1664 print " System: %10s s, %10s s." % (t_sys,t_sys/nruns)
1664 print " System: %10s s, %10s s." % (t_sys,t_sys/nruns)
1665
1665
1666 else:
1666 else:
1667 # regular execution
1667 # regular execution
1668 runner(filename,prog_ns,prog_ns,exit_ignore=exit_ignore)
1668 runner(filename,prog_ns,prog_ns,exit_ignore=exit_ignore)
1669 if opts.has_key('i'):
1669 if opts.has_key('i'):
1670 self.shell.user_ns['__name__'] = __name__save
1670 self.shell.user_ns['__name__'] = __name__save
1671 else:
1671 else:
1672 # update IPython interactive namespace
1672 # update IPython interactive namespace
1673 del prog_ns['__name__']
1673 del prog_ns['__name__']
1674 self.shell.user_ns.update(prog_ns)
1674 self.shell.user_ns.update(prog_ns)
1675 finally:
1675 finally:
1676 sys.argv = save_argv
1676 sys.argv = save_argv
1677 if restore_main:
1677 if restore_main:
1678 sys.modules['__main__'] = restore_main
1678 sys.modules['__main__'] = restore_main
1679 self.shell.reloadhist()
1679 self.shell.reloadhist()
1680
1680
1681 return stats
1681 return stats
1682
1682
1683 def magic_runlog(self, parameter_s =''):
1683 def magic_runlog(self, parameter_s =''):
1684 """Run files as logs.
1684 """Run files as logs.
1685
1685
1686 Usage:\\
1686 Usage:\\
1687 %runlog file1 file2 ...
1687 %runlog file1 file2 ...
1688
1688
1689 Run the named files (treating them as log files) in sequence inside
1689 Run the named files (treating them as log files) in sequence inside
1690 the interpreter, and return to the prompt. This is much slower than
1690 the interpreter, and return to the prompt. This is much slower than
1691 %run because each line is executed in a try/except block, but it
1691 %run because each line is executed in a try/except block, but it
1692 allows running files with syntax errors in them.
1692 allows running files with syntax errors in them.
1693
1693
1694 Normally IPython will guess when a file is one of its own logfiles, so
1694 Normally IPython will guess when a file is one of its own logfiles, so
1695 you can typically use %run even for logs. This shorthand allows you to
1695 you can typically use %run even for logs. This shorthand allows you to
1696 force any file to be treated as a log file."""
1696 force any file to be treated as a log file."""
1697
1697
1698 for f in parameter_s.split():
1698 for f in parameter_s.split():
1699 self.shell.safe_execfile(f,self.shell.user_ns,
1699 self.shell.safe_execfile(f,self.shell.user_ns,
1700 self.shell.user_ns,islog=1)
1700 self.shell.user_ns,islog=1)
1701
1701
1702 def magic_timeit(self, parameter_s =''):
1702 def magic_timeit(self, parameter_s =''):
1703 """Time execution of a Python statement or expression
1703 """Time execution of a Python statement or expression
1704
1704
1705 Usage:\\
1705 Usage:\\
1706 %timeit [-n<N> -r<R> [-t|-c]] statement
1706 %timeit [-n<N> -r<R> [-t|-c]] statement
1707
1707
1708 Time execution of a Python statement or expression using the timeit
1708 Time execution of a Python statement or expression using the timeit
1709 module.
1709 module.
1710
1710
1711 Options:
1711 Options:
1712 -n<N>: execute the given statement <N> times in a loop. If this value
1712 -n<N>: execute the given statement <N> times in a loop. If this value
1713 is not given, a fitting value is chosen.
1713 is not given, a fitting value is chosen.
1714
1714
1715 -r<R>: repeat the loop iteration <R> times and take the best result.
1715 -r<R>: repeat the loop iteration <R> times and take the best result.
1716 Default: 3
1716 Default: 3
1717
1717
1718 -t: use time.time to measure the time, which is the default on Unix.
1718 -t: use time.time to measure the time, which is the default on Unix.
1719 This function measures wall time.
1719 This function measures wall time.
1720
1720
1721 -c: use time.clock to measure the time, which is the default on
1721 -c: use time.clock to measure the time, which is the default on
1722 Windows and measures wall time. On Unix, resource.getrusage is used
1722 Windows and measures wall time. On Unix, resource.getrusage is used
1723 instead and returns the CPU user time.
1723 instead and returns the CPU user time.
1724
1724
1725 -p<P>: use a precision of <P> digits to display the timing result.
1725 -p<P>: use a precision of <P> digits to display the timing result.
1726 Default: 3
1726 Default: 3
1727
1727
1728
1728
1729 Examples:\\
1729 Examples:\\
1730 In [1]: %timeit pass
1730 In [1]: %timeit pass
1731 10000000 loops, best of 3: 53.3 ns per loop
1731 10000000 loops, best of 3: 53.3 ns per loop
1732
1732
1733 In [2]: u = None
1733 In [2]: u = None
1734
1734
1735 In [3]: %timeit u is None
1735 In [3]: %timeit u is None
1736 10000000 loops, best of 3: 184 ns per loop
1736 10000000 loops, best of 3: 184 ns per loop
1737
1737
1738 In [4]: %timeit -r 4 u == None
1738 In [4]: %timeit -r 4 u == None
1739 1000000 loops, best of 4: 242 ns per loop
1739 1000000 loops, best of 4: 242 ns per loop
1740
1740
1741 In [5]: import time
1741 In [5]: import time
1742
1742
1743 In [6]: %timeit -n1 time.sleep(2)
1743 In [6]: %timeit -n1 time.sleep(2)
1744 1 loops, best of 3: 2 s per loop
1744 1 loops, best of 3: 2 s per loop
1745
1745
1746
1746
1747 The times reported by %timeit will be slightly higher than those
1747 The times reported by %timeit will be slightly higher than those
1748 reported by the timeit.py script when variables are accessed. This is
1748 reported by the timeit.py script when variables are accessed. This is
1749 due to the fact that %timeit executes the statement in the namespace
1749 due to the fact that %timeit executes the statement in the namespace
1750 of the shell, compared with timeit.py, which uses a single setup
1750 of the shell, compared with timeit.py, which uses a single setup
1751 statement to import function or create variables. Generally, the bias
1751 statement to import function or create variables. Generally, the bias
1752 does not matter as long as results from timeit.py are not mixed with
1752 does not matter as long as results from timeit.py are not mixed with
1753 those from %timeit."""
1753 those from %timeit."""
1754
1754
1755 import timeit
1755 import timeit
1756 import math
1756 import math
1757
1757
1758 units = ["s", "ms", "\xc2\xb5s", "ns"]
1758 units = ["s", "ms", "\xc2\xb5s", "ns"]
1759 scaling = [1, 1e3, 1e6, 1e9]
1759 scaling = [1, 1e3, 1e6, 1e9]
1760
1760
1761 opts, stmt = self.parse_options(parameter_s,'n:r:tcp:',
1761 opts, stmt = self.parse_options(parameter_s,'n:r:tcp:',
1762 posix=False)
1762 posix=False)
1763 if stmt == "":
1763 if stmt == "":
1764 return
1764 return
1765 timefunc = timeit.default_timer
1765 timefunc = timeit.default_timer
1766 number = int(getattr(opts, "n", 0))
1766 number = int(getattr(opts, "n", 0))
1767 repeat = int(getattr(opts, "r", timeit.default_repeat))
1767 repeat = int(getattr(opts, "r", timeit.default_repeat))
1768 precision = int(getattr(opts, "p", 3))
1768 precision = int(getattr(opts, "p", 3))
1769 if hasattr(opts, "t"):
1769 if hasattr(opts, "t"):
1770 timefunc = time.time
1770 timefunc = time.time
1771 if hasattr(opts, "c"):
1771 if hasattr(opts, "c"):
1772 timefunc = clock
1772 timefunc = clock
1773
1773
1774 timer = timeit.Timer(timer=timefunc)
1774 timer = timeit.Timer(timer=timefunc)
1775 # this code has tight coupling to the inner workings of timeit.Timer,
1775 # this code has tight coupling to the inner workings of timeit.Timer,
1776 # but is there a better way to achieve that the code stmt has access
1776 # but is there a better way to achieve that the code stmt has access
1777 # to the shell namespace?
1777 # to the shell namespace?
1778
1778
1779 src = timeit.template % {'stmt': timeit.reindent(stmt, 8),
1779 src = timeit.template % {'stmt': timeit.reindent(stmt, 8),
1780 'setup': "pass"}
1780 'setup': "pass"}
1781 # Track compilation time so it can be reported if too long
1781 # Track compilation time so it can be reported if too long
1782 # Minimum time above which compilation time will be reported
1782 # Minimum time above which compilation time will be reported
1783 tc_min = 0.1
1783 tc_min = 0.1
1784
1784
1785 t0 = clock()
1785 t0 = clock()
1786 code = compile(src, "<magic-timeit>", "exec")
1786 code = compile(src, "<magic-timeit>", "exec")
1787 tc = clock()-t0
1787 tc = clock()-t0
1788
1788
1789 ns = {}
1789 ns = {}
1790 exec code in self.shell.user_ns, ns
1790 exec code in self.shell.user_ns, ns
1791 timer.inner = ns["inner"]
1791 timer.inner = ns["inner"]
1792
1792
1793 if number == 0:
1793 if number == 0:
1794 # determine number so that 0.2 <= total time < 2.0
1794 # determine number so that 0.2 <= total time < 2.0
1795 number = 1
1795 number = 1
1796 for i in range(1, 10):
1796 for i in range(1, 10):
1797 number *= 10
1797 number *= 10
1798 if timer.timeit(number) >= 0.2:
1798 if timer.timeit(number) >= 0.2:
1799 break
1799 break
1800
1800
1801 best = min(timer.repeat(repeat, number)) / number
1801 best = min(timer.repeat(repeat, number)) / number
1802
1802
1803 if best > 0.0:
1803 if best > 0.0:
1804 order = min(-int(math.floor(math.log10(best)) // 3), 3)
1804 order = min(-int(math.floor(math.log10(best)) // 3), 3)
1805 else:
1805 else:
1806 order = 3
1806 order = 3
1807 print "%d loops, best of %d: %.*g %s per loop" % (number, repeat,
1807 print "%d loops, best of %d: %.*g %s per loop" % (number, repeat,
1808 precision,
1808 precision,
1809 best * scaling[order],
1809 best * scaling[order],
1810 units[order])
1810 units[order])
1811 if tc > tc_min:
1811 if tc > tc_min:
1812 print "Compiler time: %.2f s" % tc
1812 print "Compiler time: %.2f s" % tc
1813
1813
1814 def magic_time(self,parameter_s = ''):
1814 def magic_time(self,parameter_s = ''):
1815 """Time execution of a Python statement or expression.
1815 """Time execution of a Python statement or expression.
1816
1816
1817 The CPU and wall clock times are printed, and the value of the
1817 The CPU and wall clock times are printed, and the value of the
1818 expression (if any) is returned. Note that under Win32, system time
1818 expression (if any) is returned. Note that under Win32, system time
1819 is always reported as 0, since it can not be measured.
1819 is always reported as 0, since it can not be measured.
1820
1820
1821 This function provides very basic timing functionality. In Python
1821 This function provides very basic timing functionality. In Python
1822 2.3, the timeit module offers more control and sophistication, so this
1822 2.3, the timeit module offers more control and sophistication, so this
1823 could be rewritten to use it (patches welcome).
1823 could be rewritten to use it (patches welcome).
1824
1824
1825 Some examples:
1825 Some examples:
1826
1826
1827 In [1]: time 2**128
1827 In [1]: time 2**128
1828 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
1828 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
1829 Wall time: 0.00
1829 Wall time: 0.00
1830 Out[1]: 340282366920938463463374607431768211456L
1830 Out[1]: 340282366920938463463374607431768211456L
1831
1831
1832 In [2]: n = 1000000
1832 In [2]: n = 1000000
1833
1833
1834 In [3]: time sum(range(n))
1834 In [3]: time sum(range(n))
1835 CPU times: user 1.20 s, sys: 0.05 s, total: 1.25 s
1835 CPU times: user 1.20 s, sys: 0.05 s, total: 1.25 s
1836 Wall time: 1.37
1836 Wall time: 1.37
1837 Out[3]: 499999500000L
1837 Out[3]: 499999500000L
1838
1838
1839 In [4]: time print 'hello world'
1839 In [4]: time print 'hello world'
1840 hello world
1840 hello world
1841 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
1841 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
1842 Wall time: 0.00
1842 Wall time: 0.00
1843
1843
1844 Note that the time needed by Python to compile the given expression
1844 Note that the time needed by Python to compile the given expression
1845 will be reported if it is more than 0.1s. In this example, the
1845 will be reported if it is more than 0.1s. In this example, the
1846 actual exponentiation is done by Python at compilation time, so while
1846 actual exponentiation is done by Python at compilation time, so while
1847 the expression can take a noticeable amount of time to compute, that
1847 the expression can take a noticeable amount of time to compute, that
1848 time is purely due to the compilation:
1848 time is purely due to the compilation:
1849
1849
1850 In [5]: time 3**9999;
1850 In [5]: time 3**9999;
1851 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
1851 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
1852 Wall time: 0.00 s
1852 Wall time: 0.00 s
1853
1853
1854 In [6]: time 3**999999;
1854 In [6]: time 3**999999;
1855 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
1855 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
1856 Wall time: 0.00 s
1856 Wall time: 0.00 s
1857 Compiler : 0.78 s
1857 Compiler : 0.78 s
1858 """
1858 """
1859
1859
1860 # fail immediately if the given expression can't be compiled
1860 # fail immediately if the given expression can't be compiled
1861
1861
1862 expr = self.shell.prefilter(parameter_s,False)
1862 expr = self.shell.prefilter(parameter_s,False)
1863
1863
1864 # Minimum time above which compilation time will be reported
1864 # Minimum time above which compilation time will be reported
1865 tc_min = 0.1
1865 tc_min = 0.1
1866
1866
1867 try:
1867 try:
1868 mode = 'eval'
1868 mode = 'eval'
1869 t0 = clock()
1869 t0 = clock()
1870 code = compile(expr,'<timed eval>',mode)
1870 code = compile(expr,'<timed eval>',mode)
1871 tc = clock()-t0
1871 tc = clock()-t0
1872 except SyntaxError:
1872 except SyntaxError:
1873 mode = 'exec'
1873 mode = 'exec'
1874 t0 = clock()
1874 t0 = clock()
1875 code = compile(expr,'<timed exec>',mode)
1875 code = compile(expr,'<timed exec>',mode)
1876 tc = clock()-t0
1876 tc = clock()-t0
1877 # skew measurement as little as possible
1877 # skew measurement as little as possible
1878 glob = self.shell.user_ns
1878 glob = self.shell.user_ns
1879 clk = clock2
1879 clk = clock2
1880 wtime = time.time
1880 wtime = time.time
1881 # time execution
1881 # time execution
1882 wall_st = wtime()
1882 wall_st = wtime()
1883 if mode=='eval':
1883 if mode=='eval':
1884 st = clk()
1884 st = clk()
1885 out = eval(code,glob)
1885 out = eval(code,glob)
1886 end = clk()
1886 end = clk()
1887 else:
1887 else:
1888 st = clk()
1888 st = clk()
1889 exec code in glob
1889 exec code in glob
1890 end = clk()
1890 end = clk()
1891 out = None
1891 out = None
1892 wall_end = wtime()
1892 wall_end = wtime()
1893 # Compute actual times and report
1893 # Compute actual times and report
1894 wall_time = wall_end-wall_st
1894 wall_time = wall_end-wall_st
1895 cpu_user = end[0]-st[0]
1895 cpu_user = end[0]-st[0]
1896 cpu_sys = end[1]-st[1]
1896 cpu_sys = end[1]-st[1]
1897 cpu_tot = cpu_user+cpu_sys
1897 cpu_tot = cpu_user+cpu_sys
1898 print "CPU times: user %.2f s, sys: %.2f s, total: %.2f s" % \
1898 print "CPU times: user %.2f s, sys: %.2f s, total: %.2f s" % \
1899 (cpu_user,cpu_sys,cpu_tot)
1899 (cpu_user,cpu_sys,cpu_tot)
1900 print "Wall time: %.2f s" % wall_time
1900 print "Wall time: %.2f s" % wall_time
1901 if tc > tc_min:
1901 if tc > tc_min:
1902 print "Compiler : %.2f s" % tc
1902 print "Compiler : %.2f s" % tc
1903 return out
1903 return out
1904
1904
1905 def magic_macro(self,parameter_s = ''):
1905 def magic_macro(self,parameter_s = ''):
1906 """Define a set of input lines as a macro for future re-execution.
1906 """Define a set of input lines as a macro for future re-execution.
1907
1907
1908 Usage:\\
1908 Usage:\\
1909 %macro [options] name n1-n2 n3-n4 ... n5 .. n6 ...
1909 %macro [options] name n1-n2 n3-n4 ... n5 .. n6 ...
1910
1910
1911 Options:
1911 Options:
1912
1912
1913 -r: use 'raw' input. By default, the 'processed' history is used,
1913 -r: use 'raw' input. By default, the 'processed' history is used,
1914 so that magics are loaded in their transformed version to valid
1914 so that magics are loaded in their transformed version to valid
1915 Python. If this option is given, the raw input as typed as the
1915 Python. If this option is given, the raw input as typed as the
1916 command line is used instead.
1916 command line is used instead.
1917
1917
1918 This will define a global variable called `name` which is a string
1918 This will define a global variable called `name` which is a string
1919 made of joining the slices and lines you specify (n1,n2,... numbers
1919 made of joining the slices and lines you specify (n1,n2,... numbers
1920 above) from your input history into a single string. This variable
1920 above) from your input history into a single string. This variable
1921 acts like an automatic function which re-executes those lines as if
1921 acts like an automatic function which re-executes those lines as if
1922 you had typed them. You just type 'name' at the prompt and the code
1922 you had typed them. You just type 'name' at the prompt and the code
1923 executes.
1923 executes.
1924
1924
1925 The notation for indicating number ranges is: n1-n2 means 'use line
1925 The notation for indicating number ranges is: n1-n2 means 'use line
1926 numbers n1,...n2' (the endpoint is included). That is, '5-7' means
1926 numbers n1,...n2' (the endpoint is included). That is, '5-7' means
1927 using the lines numbered 5,6 and 7.
1927 using the lines numbered 5,6 and 7.
1928
1928
1929 Note: as a 'hidden' feature, you can also use traditional python slice
1929 Note: as a 'hidden' feature, you can also use traditional python slice
1930 notation, where N:M means numbers N through M-1.
1930 notation, where N:M means numbers N through M-1.
1931
1931
1932 For example, if your history contains (%hist prints it):
1932 For example, if your history contains (%hist prints it):
1933
1933
1934 44: x=1\\
1934 44: x=1\\
1935 45: y=3\\
1935 45: y=3\\
1936 46: z=x+y\\
1936 46: z=x+y\\
1937 47: print x\\
1937 47: print x\\
1938 48: a=5\\
1938 48: a=5\\
1939 49: print 'x',x,'y',y\\
1939 49: print 'x',x,'y',y\\
1940
1940
1941 you can create a macro with lines 44 through 47 (included) and line 49
1941 you can create a macro with lines 44 through 47 (included) and line 49
1942 called my_macro with:
1942 called my_macro with:
1943
1943
1944 In [51]: %macro my_macro 44-47 49
1944 In [51]: %macro my_macro 44-47 49
1945
1945
1946 Now, typing `my_macro` (without quotes) will re-execute all this code
1946 Now, typing `my_macro` (without quotes) will re-execute all this code
1947 in one pass.
1947 in one pass.
1948
1948
1949 You don't need to give the line-numbers in order, and any given line
1949 You don't need to give the line-numbers in order, and any given line
1950 number can appear multiple times. You can assemble macros with any
1950 number can appear multiple times. You can assemble macros with any
1951 lines from your input history in any order.
1951 lines from your input history in any order.
1952
1952
1953 The macro is a simple object which holds its value in an attribute,
1953 The macro is a simple object which holds its value in an attribute,
1954 but IPython's display system checks for macros and executes them as
1954 but IPython's display system checks for macros and executes them as
1955 code instead of printing them when you type their name.
1955 code instead of printing them when you type their name.
1956
1956
1957 You can view a macro's contents by explicitly printing it with:
1957 You can view a macro's contents by explicitly printing it with:
1958
1958
1959 'print macro_name'.
1959 'print macro_name'.
1960
1960
1961 For one-off cases which DON'T contain magic function calls in them you
1961 For one-off cases which DON'T contain magic function calls in them you
1962 can obtain similar results by explicitly executing slices from your
1962 can obtain similar results by explicitly executing slices from your
1963 input history with:
1963 input history with:
1964
1964
1965 In [60]: exec In[44:48]+In[49]"""
1965 In [60]: exec In[44:48]+In[49]"""
1966
1966
1967 opts,args = self.parse_options(parameter_s,'r',mode='list')
1967 opts,args = self.parse_options(parameter_s,'r',mode='list')
1968 if not args:
1968 if not args:
1969 macs = [k for k,v in self.shell.user_ns.items() if isinstance(v, Macro)]
1969 macs = [k for k,v in self.shell.user_ns.items() if isinstance(v, Macro)]
1970 macs.sort()
1970 macs.sort()
1971 return macs
1971 return macs
1972 if len(args) == 1:
1972 if len(args) == 1:
1973 raise UsageError(
1973 raise UsageError(
1974 "%macro insufficient args; usage '%macro name n1-n2 n3-4...")
1974 "%macro insufficient args; usage '%macro name n1-n2 n3-4...")
1975 name,ranges = args[0], args[1:]
1975 name,ranges = args[0], args[1:]
1976
1976
1977 #print 'rng',ranges # dbg
1977 #print 'rng',ranges # dbg
1978 lines = self.extract_input_slices(ranges,opts.has_key('r'))
1978 lines = self.extract_input_slices(ranges,opts.has_key('r'))
1979 macro = Macro(lines)
1979 macro = Macro(lines)
1980 self.shell.user_ns.update({name:macro})
1980 self.shell.user_ns.update({name:macro})
1981 print 'Macro `%s` created. To execute, type its name (without quotes).' % name
1981 print 'Macro `%s` created. To execute, type its name (without quotes).' % name
1982 print 'Macro contents:'
1982 print 'Macro contents:'
1983 print macro,
1983 print macro,
1984
1984
1985 def magic_save(self,parameter_s = ''):
1985 def magic_save(self,parameter_s = ''):
1986 """Save a set of lines to a given filename.
1986 """Save a set of lines to a given filename.
1987
1987
1988 Usage:\\
1988 Usage:\\
1989 %save [options] filename n1-n2 n3-n4 ... n5 .. n6 ...
1989 %save [options] filename n1-n2 n3-n4 ... n5 .. n6 ...
1990
1990
1991 Options:
1991 Options:
1992
1992
1993 -r: use 'raw' input. By default, the 'processed' history is used,
1993 -r: use 'raw' input. By default, the 'processed' history is used,
1994 so that magics are loaded in their transformed version to valid
1994 so that magics are loaded in their transformed version to valid
1995 Python. If this option is given, the raw input as typed as the
1995 Python. If this option is given, the raw input as typed as the
1996 command line is used instead.
1996 command line is used instead.
1997
1997
1998 This function uses the same syntax as %macro for line extraction, but
1998 This function uses the same syntax as %macro for line extraction, but
1999 instead of creating a macro it saves the resulting string to the
1999 instead of creating a macro it saves the resulting string to the
2000 filename you specify.
2000 filename you specify.
2001
2001
2002 It adds a '.py' extension to the file if you don't do so yourself, and
2002 It adds a '.py' extension to the file if you don't do so yourself, and
2003 it asks for confirmation before overwriting existing files."""
2003 it asks for confirmation before overwriting existing files."""
2004
2004
2005 opts,args = self.parse_options(parameter_s,'r',mode='list')
2005 opts,args = self.parse_options(parameter_s,'r',mode='list')
2006 fname,ranges = args[0], args[1:]
2006 fname,ranges = args[0], args[1:]
2007 if not fname.endswith('.py'):
2007 if not fname.endswith('.py'):
2008 fname += '.py'
2008 fname += '.py'
2009 if os.path.isfile(fname):
2009 if os.path.isfile(fname):
2010 ans = raw_input('File `%s` exists. Overwrite (y/[N])? ' % fname)
2010 ans = raw_input('File `%s` exists. Overwrite (y/[N])? ' % fname)
2011 if ans.lower() not in ['y','yes']:
2011 if ans.lower() not in ['y','yes']:
2012 print 'Operation cancelled.'
2012 print 'Operation cancelled.'
2013 return
2013 return
2014 cmds = ''.join(self.extract_input_slices(ranges,opts.has_key('r')))
2014 cmds = ''.join(self.extract_input_slices(ranges,opts.has_key('r')))
2015 f = file(fname,'w')
2015 f = file(fname,'w')
2016 f.write(cmds)
2016 f.write(cmds)
2017 f.close()
2017 f.close()
2018 print 'The following commands were written to file `%s`:' % fname
2018 print 'The following commands were written to file `%s`:' % fname
2019 print cmds
2019 print cmds
2020
2020
2021 def _edit_macro(self,mname,macro):
2021 def _edit_macro(self,mname,macro):
2022 """open an editor with the macro data in a file"""
2022 """open an editor with the macro data in a file"""
2023 filename = self.shell.mktempfile(macro.value)
2023 filename = self.shell.mktempfile(macro.value)
2024 self.shell.hooks.editor(filename)
2024 self.shell.hooks.editor(filename)
2025
2025
2026 # and make a new macro object, to replace the old one
2026 # and make a new macro object, to replace the old one
2027 mfile = open(filename)
2027 mfile = open(filename)
2028 mvalue = mfile.read()
2028 mvalue = mfile.read()
2029 mfile.close()
2029 mfile.close()
2030 self.shell.user_ns[mname] = Macro(mvalue)
2030 self.shell.user_ns[mname] = Macro(mvalue)
2031
2031
2032 def magic_ed(self,parameter_s=''):
2032 def magic_ed(self,parameter_s=''):
2033 """Alias to %edit."""
2033 """Alias to %edit."""
2034 return self.magic_edit(parameter_s)
2034 return self.magic_edit(parameter_s)
2035
2035
2036 def magic_edit(self,parameter_s='',last_call=['','']):
2036 def magic_edit(self,parameter_s='',last_call=['','']):
2037 """Bring up an editor and execute the resulting code.
2037 """Bring up an editor and execute the resulting code.
2038
2038
2039 Usage:
2039 Usage:
2040 %edit [options] [args]
2040 %edit [options] [args]
2041
2041
2042 %edit runs IPython's editor hook. The default version of this hook is
2042 %edit runs IPython's editor hook. The default version of this hook is
2043 set to call the __IPYTHON__.rc.editor command. This is read from your
2043 set to call the __IPYTHON__.rc.editor command. This is read from your
2044 environment variable $EDITOR. If this isn't found, it will default to
2044 environment variable $EDITOR. If this isn't found, it will default to
2045 vi under Linux/Unix and to notepad under Windows. See the end of this
2045 vi under Linux/Unix and to notepad under Windows. See the end of this
2046 docstring for how to change the editor hook.
2046 docstring for how to change the editor hook.
2047
2047
2048 You can also set the value of this editor via the command line option
2048 You can also set the value of this editor via the command line option
2049 '-editor' or in your ipythonrc file. This is useful if you wish to use
2049 '-editor' or in your ipythonrc file. This is useful if you wish to use
2050 specifically for IPython an editor different from your typical default
2050 specifically for IPython an editor different from your typical default
2051 (and for Windows users who typically don't set environment variables).
2051 (and for Windows users who typically don't set environment variables).
2052
2052
2053 This command allows you to conveniently edit multi-line code right in
2053 This command allows you to conveniently edit multi-line code right in
2054 your IPython session.
2054 your IPython session.
2055
2055
2056 If called without arguments, %edit opens up an empty editor with a
2056 If called without arguments, %edit opens up an empty editor with a
2057 temporary file and will execute the contents of this file when you
2057 temporary file and will execute the contents of this file when you
2058 close it (don't forget to save it!).
2058 close it (don't forget to save it!).
2059
2059
2060
2060
2061 Options:
2061 Options:
2062
2062
2063 -n <number>: open the editor at a specified line number. By default,
2063 -n <number>: open the editor at a specified line number. By default,
2064 the IPython editor hook uses the unix syntax 'editor +N filename', but
2064 the IPython editor hook uses the unix syntax 'editor +N filename', but
2065 you can configure this by providing your own modified hook if your
2065 you can configure this by providing your own modified hook if your
2066 favorite editor supports line-number specifications with a different
2066 favorite editor supports line-number specifications with a different
2067 syntax.
2067 syntax.
2068
2068
2069 -p: this will call the editor with the same data as the previous time
2069 -p: this will call the editor with the same data as the previous time
2070 it was used, regardless of how long ago (in your current session) it
2070 it was used, regardless of how long ago (in your current session) it
2071 was.
2071 was.
2072
2072
2073 -r: use 'raw' input. This option only applies to input taken from the
2073 -r: use 'raw' input. This option only applies to input taken from the
2074 user's history. By default, the 'processed' history is used, so that
2074 user's history. By default, the 'processed' history is used, so that
2075 magics are loaded in their transformed version to valid Python. If
2075 magics are loaded in their transformed version to valid Python. If
2076 this option is given, the raw input as typed as the command line is
2076 this option is given, the raw input as typed as the command line is
2077 used instead. When you exit the editor, it will be executed by
2077 used instead. When you exit the editor, it will be executed by
2078 IPython's own processor.
2078 IPython's own processor.
2079
2079
2080 -x: do not execute the edited code immediately upon exit. This is
2080 -x: do not execute the edited code immediately upon exit. This is
2081 mainly useful if you are editing programs which need to be called with
2081 mainly useful if you are editing programs which need to be called with
2082 command line arguments, which you can then do using %run.
2082 command line arguments, which you can then do using %run.
2083
2083
2084
2084
2085 Arguments:
2085 Arguments:
2086
2086
2087 If arguments are given, the following possibilites exist:
2087 If arguments are given, the following possibilites exist:
2088
2088
2089 - The arguments are numbers or pairs of colon-separated numbers (like
2089 - The arguments are numbers or pairs of colon-separated numbers (like
2090 1 4:8 9). These are interpreted as lines of previous input to be
2090 1 4:8 9). These are interpreted as lines of previous input to be
2091 loaded into the editor. The syntax is the same of the %macro command.
2091 loaded into the editor. The syntax is the same of the %macro command.
2092
2092
2093 - If the argument doesn't start with a number, it is evaluated as a
2093 - If the argument doesn't start with a number, it is evaluated as a
2094 variable and its contents loaded into the editor. You can thus edit
2094 variable and its contents loaded into the editor. You can thus edit
2095 any string which contains python code (including the result of
2095 any string which contains python code (including the result of
2096 previous edits).
2096 previous edits).
2097
2097
2098 - If the argument is the name of an object (other than a string),
2098 - If the argument is the name of an object (other than a string),
2099 IPython will try to locate the file where it was defined and open the
2099 IPython will try to locate the file where it was defined and open the
2100 editor at the point where it is defined. You can use `%edit function`
2100 editor at the point where it is defined. You can use `%edit function`
2101 to load an editor exactly at the point where 'function' is defined,
2101 to load an editor exactly at the point where 'function' is defined,
2102 edit it and have the file be executed automatically.
2102 edit it and have the file be executed automatically.
2103
2103
2104 If the object is a macro (see %macro for details), this opens up your
2104 If the object is a macro (see %macro for details), this opens up your
2105 specified editor with a temporary file containing the macro's data.
2105 specified editor with a temporary file containing the macro's data.
2106 Upon exit, the macro is reloaded with the contents of the file.
2106 Upon exit, the macro is reloaded with the contents of the file.
2107
2107
2108 Note: opening at an exact line is only supported under Unix, and some
2108 Note: opening at an exact line is only supported under Unix, and some
2109 editors (like kedit and gedit up to Gnome 2.8) do not understand the
2109 editors (like kedit and gedit up to Gnome 2.8) do not understand the
2110 '+NUMBER' parameter necessary for this feature. Good editors like
2110 '+NUMBER' parameter necessary for this feature. Good editors like
2111 (X)Emacs, vi, jed, pico and joe all do.
2111 (X)Emacs, vi, jed, pico and joe all do.
2112
2112
2113 - If the argument is not found as a variable, IPython will look for a
2113 - If the argument is not found as a variable, IPython will look for a
2114 file with that name (adding .py if necessary) and load it into the
2114 file with that name (adding .py if necessary) and load it into the
2115 editor. It will execute its contents with execfile() when you exit,
2115 editor. It will execute its contents with execfile() when you exit,
2116 loading any code in the file into your interactive namespace.
2116 loading any code in the file into your interactive namespace.
2117
2117
2118 After executing your code, %edit will return as output the code you
2118 After executing your code, %edit will return as output the code you
2119 typed in the editor (except when it was an existing file). This way
2119 typed in the editor (except when it was an existing file). This way
2120 you can reload the code in further invocations of %edit as a variable,
2120 you can reload the code in further invocations of %edit as a variable,
2121 via _<NUMBER> or Out[<NUMBER>], where <NUMBER> is the prompt number of
2121 via _<NUMBER> or Out[<NUMBER>], where <NUMBER> is the prompt number of
2122 the output.
2122 the output.
2123
2123
2124 Note that %edit is also available through the alias %ed.
2124 Note that %edit is also available through the alias %ed.
2125
2125
2126 This is an example of creating a simple function inside the editor and
2126 This is an example of creating a simple function inside the editor and
2127 then modifying it. First, start up the editor:
2127 then modifying it. First, start up the editor:
2128
2128
2129 In [1]: ed\\
2129 In [1]: ed\\
2130 Editing... done. Executing edited code...\\
2130 Editing... done. Executing edited code...\\
2131 Out[1]: 'def foo():\\n print "foo() was defined in an editing session"\\n'
2131 Out[1]: 'def foo():\\n print "foo() was defined in an editing session"\\n'
2132
2132
2133 We can then call the function foo():
2133 We can then call the function foo():
2134
2134
2135 In [2]: foo()\\
2135 In [2]: foo()\\
2136 foo() was defined in an editing session
2136 foo() was defined in an editing session
2137
2137
2138 Now we edit foo. IPython automatically loads the editor with the
2138 Now we edit foo. IPython automatically loads the editor with the
2139 (temporary) file where foo() was previously defined:
2139 (temporary) file where foo() was previously defined:
2140
2140
2141 In [3]: ed foo\\
2141 In [3]: ed foo\\
2142 Editing... done. Executing edited code...
2142 Editing... done. Executing edited code...
2143
2143
2144 And if we call foo() again we get the modified version:
2144 And if we call foo() again we get the modified version:
2145
2145
2146 In [4]: foo()\\
2146 In [4]: foo()\\
2147 foo() has now been changed!
2147 foo() has now been changed!
2148
2148
2149 Here is an example of how to edit a code snippet successive
2149 Here is an example of how to edit a code snippet successive
2150 times. First we call the editor:
2150 times. First we call the editor:
2151
2151
2152 In [8]: ed\\
2152 In [8]: ed\\
2153 Editing... done. Executing edited code...\\
2153 Editing... done. Executing edited code...\\
2154 hello\\
2154 hello\\
2155 Out[8]: "print 'hello'\\n"
2155 Out[8]: "print 'hello'\\n"
2156
2156
2157 Now we call it again with the previous output (stored in _):
2157 Now we call it again with the previous output (stored in _):
2158
2158
2159 In [9]: ed _\\
2159 In [9]: ed _\\
2160 Editing... done. Executing edited code...\\
2160 Editing... done. Executing edited code...\\
2161 hello world\\
2161 hello world\\
2162 Out[9]: "print 'hello world'\\n"
2162 Out[9]: "print 'hello world'\\n"
2163
2163
2164 Now we call it with the output #8 (stored in _8, also as Out[8]):
2164 Now we call it with the output #8 (stored in _8, also as Out[8]):
2165
2165
2166 In [10]: ed _8\\
2166 In [10]: ed _8\\
2167 Editing... done. Executing edited code...\\
2167 Editing... done. Executing edited code...\\
2168 hello again\\
2168 hello again\\
2169 Out[10]: "print 'hello again'\\n"
2169 Out[10]: "print 'hello again'\\n"
2170
2170
2171
2171
2172 Changing the default editor hook:
2172 Changing the default editor hook:
2173
2173
2174 If you wish to write your own editor hook, you can put it in a
2174 If you wish to write your own editor hook, you can put it in a
2175 configuration file which you load at startup time. The default hook
2175 configuration file which you load at startup time. The default hook
2176 is defined in the IPython.hooks module, and you can use that as a
2176 is defined in the IPython.hooks module, and you can use that as a
2177 starting example for further modifications. That file also has
2177 starting example for further modifications. That file also has
2178 general instructions on how to set a new hook for use once you've
2178 general instructions on how to set a new hook for use once you've
2179 defined it."""
2179 defined it."""
2180
2180
2181 # FIXME: This function has become a convoluted mess. It needs a
2181 # FIXME: This function has become a convoluted mess. It needs a
2182 # ground-up rewrite with clean, simple logic.
2182 # ground-up rewrite with clean, simple logic.
2183
2183
2184 def make_filename(arg):
2184 def make_filename(arg):
2185 "Make a filename from the given args"
2185 "Make a filename from the given args"
2186 try:
2186 try:
2187 filename = get_py_filename(arg)
2187 filename = get_py_filename(arg)
2188 except IOError:
2188 except IOError:
2189 if args.endswith('.py'):
2189 if args.endswith('.py'):
2190 filename = arg
2190 filename = arg
2191 else:
2191 else:
2192 filename = None
2192 filename = None
2193 return filename
2193 return filename
2194
2194
2195 # custom exceptions
2195 # custom exceptions
2196 class DataIsObject(Exception): pass
2196 class DataIsObject(Exception): pass
2197
2197
2198 opts,args = self.parse_options(parameter_s,'prxn:')
2198 opts,args = self.parse_options(parameter_s,'prxn:')
2199 # Set a few locals from the options for convenience:
2199 # Set a few locals from the options for convenience:
2200 opts_p = opts.has_key('p')
2200 opts_p = opts.has_key('p')
2201 opts_r = opts.has_key('r')
2201 opts_r = opts.has_key('r')
2202
2202
2203 # Default line number value
2203 # Default line number value
2204 lineno = opts.get('n',None)
2204 lineno = opts.get('n',None)
2205
2205
2206 if opts_p:
2206 if opts_p:
2207 args = '_%s' % last_call[0]
2207 args = '_%s' % last_call[0]
2208 if not self.shell.user_ns.has_key(args):
2208 if not self.shell.user_ns.has_key(args):
2209 args = last_call[1]
2209 args = last_call[1]
2210
2210
2211 # use last_call to remember the state of the previous call, but don't
2211 # use last_call to remember the state of the previous call, but don't
2212 # let it be clobbered by successive '-p' calls.
2212 # let it be clobbered by successive '-p' calls.
2213 try:
2213 try:
2214 last_call[0] = self.shell.outputcache.prompt_count
2214 last_call[0] = self.shell.outputcache.prompt_count
2215 if not opts_p:
2215 if not opts_p:
2216 last_call[1] = parameter_s
2216 last_call[1] = parameter_s
2217 except:
2217 except:
2218 pass
2218 pass
2219
2219
2220 # by default this is done with temp files, except when the given
2220 # by default this is done with temp files, except when the given
2221 # arg is a filename
2221 # arg is a filename
2222 use_temp = 1
2222 use_temp = 1
2223
2223
2224 if re.match(r'\d',args):
2224 if re.match(r'\d',args):
2225 # Mode where user specifies ranges of lines, like in %macro.
2225 # Mode where user specifies ranges of lines, like in %macro.
2226 # This means that you can't edit files whose names begin with
2226 # This means that you can't edit files whose names begin with
2227 # numbers this way. Tough.
2227 # numbers this way. Tough.
2228 ranges = args.split()
2228 ranges = args.split()
2229 data = ''.join(self.extract_input_slices(ranges,opts_r))
2229 data = ''.join(self.extract_input_slices(ranges,opts_r))
2230 elif args.endswith('.py'):
2230 elif args.endswith('.py'):
2231 filename = make_filename(args)
2231 filename = make_filename(args)
2232 data = ''
2232 data = ''
2233 use_temp = 0
2233 use_temp = 0
2234 elif args:
2234 elif args:
2235 try:
2235 try:
2236 # Load the parameter given as a variable. If not a string,
2236 # Load the parameter given as a variable. If not a string,
2237 # process it as an object instead (below)
2237 # process it as an object instead (below)
2238
2238
2239 #print '*** args',args,'type',type(args) # dbg
2239 #print '*** args',args,'type',type(args) # dbg
2240 data = eval(args,self.shell.user_ns)
2240 data = eval(args,self.shell.user_ns)
2241 if not type(data) in StringTypes:
2241 if not type(data) in StringTypes:
2242 raise DataIsObject
2242 raise DataIsObject
2243
2243
2244 except (NameError,SyntaxError):
2244 except (NameError,SyntaxError):
2245 # given argument is not a variable, try as a filename
2245 # given argument is not a variable, try as a filename
2246 filename = make_filename(args)
2246 filename = make_filename(args)
2247 if filename is None:
2247 if filename is None:
2248 warn("Argument given (%s) can't be found as a variable "
2248 warn("Argument given (%s) can't be found as a variable "
2249 "or as a filename." % args)
2249 "or as a filename." % args)
2250 return
2250 return
2251
2251
2252 data = ''
2252 data = ''
2253 use_temp = 0
2253 use_temp = 0
2254 except DataIsObject:
2254 except DataIsObject:
2255
2255
2256 # macros have a special edit function
2256 # macros have a special edit function
2257 if isinstance(data,Macro):
2257 if isinstance(data,Macro):
2258 self._edit_macro(args,data)
2258 self._edit_macro(args,data)
2259 return
2259 return
2260
2260
2261 # For objects, try to edit the file where they are defined
2261 # For objects, try to edit the file where they are defined
2262 try:
2262 try:
2263 filename = inspect.getabsfile(data)
2263 filename = inspect.getabsfile(data)
2264 if 'fakemodule' in filename.lower() and inspect.isclass(data):
2264 if 'fakemodule' in filename.lower() and inspect.isclass(data):
2265 # class created by %edit? Try to find source
2265 # class created by %edit? Try to find source
2266 # by looking for method definitions instead, the
2266 # by looking for method definitions instead, the
2267 # __module__ in those classes is FakeModule.
2267 # __module__ in those classes is FakeModule.
2268 attrs = [getattr(data, aname) for aname in dir(data)]
2268 attrs = [getattr(data, aname) for aname in dir(data)]
2269 for attr in attrs:
2269 for attr in attrs:
2270 if not inspect.ismethod(attr):
2270 if not inspect.ismethod(attr):
2271 continue
2271 continue
2272 filename = inspect.getabsfile(attr)
2272 filename = inspect.getabsfile(attr)
2273 if filename and 'fakemodule' not in filename.lower():
2273 if filename and 'fakemodule' not in filename.lower():
2274 # change the attribute to be the edit target instead
2274 # change the attribute to be the edit target instead
2275 data = attr
2275 data = attr
2276 break
2276 break
2277
2277
2278 datafile = 1
2278 datafile = 1
2279 except TypeError:
2279 except TypeError:
2280 filename = make_filename(args)
2280 filename = make_filename(args)
2281 datafile = 1
2281 datafile = 1
2282 warn('Could not find file where `%s` is defined.\n'
2282 warn('Could not find file where `%s` is defined.\n'
2283 'Opening a file named `%s`' % (args,filename))
2283 'Opening a file named `%s`' % (args,filename))
2284 # Now, make sure we can actually read the source (if it was in
2284 # Now, make sure we can actually read the source (if it was in
2285 # a temp file it's gone by now).
2285 # a temp file it's gone by now).
2286 if datafile:
2286 if datafile:
2287 try:
2287 try:
2288 if lineno is None:
2288 if lineno is None:
2289 lineno = inspect.getsourcelines(data)[1]
2289 lineno = inspect.getsourcelines(data)[1]
2290 except IOError:
2290 except IOError:
2291 filename = make_filename(args)
2291 filename = make_filename(args)
2292 if filename is None:
2292 if filename is None:
2293 warn('The file `%s` where `%s` was defined cannot '
2293 warn('The file `%s` where `%s` was defined cannot '
2294 'be read.' % (filename,data))
2294 'be read.' % (filename,data))
2295 return
2295 return
2296 use_temp = 0
2296 use_temp = 0
2297 else:
2297 else:
2298 data = ''
2298 data = ''
2299
2299
2300 if use_temp:
2300 if use_temp:
2301 filename = self.shell.mktempfile(data)
2301 filename = self.shell.mktempfile(data)
2302 print 'IPython will make a temporary file named:',filename
2302 print 'IPython will make a temporary file named:',filename
2303
2303
2304 # do actual editing here
2304 # do actual editing here
2305 print 'Editing...',
2305 print 'Editing...',
2306 sys.stdout.flush()
2306 sys.stdout.flush()
2307 self.shell.hooks.editor(filename,lineno)
2307 self.shell.hooks.editor(filename,lineno)
2308 if opts.has_key('x'): # -x prevents actual execution
2308 if opts.has_key('x'): # -x prevents actual execution
2309 print
2309 print
2310 else:
2310 else:
2311 print 'done. Executing edited code...'
2311 print 'done. Executing edited code...'
2312 if opts_r:
2312 if opts_r:
2313 self.shell.runlines(file_read(filename))
2313 self.shell.runlines(file_read(filename))
2314 else:
2314 else:
2315 self.shell.safe_execfile(filename,self.shell.user_ns,
2315 self.shell.safe_execfile(filename,self.shell.user_ns,
2316 self.shell.user_ns)
2316 self.shell.user_ns)
2317 if use_temp:
2317 if use_temp:
2318 try:
2318 try:
2319 return open(filename).read()
2319 return open(filename).read()
2320 except IOError,msg:
2320 except IOError,msg:
2321 if msg.filename == filename:
2321 if msg.filename == filename:
2322 warn('File not found. Did you forget to save?')
2322 warn('File not found. Did you forget to save?')
2323 return
2323 return
2324 else:
2324 else:
2325 self.shell.showtraceback()
2325 self.shell.showtraceback()
2326
2326
2327 def magic_xmode(self,parameter_s = ''):
2327 def magic_xmode(self,parameter_s = ''):
2328 """Switch modes for the exception handlers.
2328 """Switch modes for the exception handlers.
2329
2329
2330 Valid modes: Plain, Context and Verbose.
2330 Valid modes: Plain, Context and Verbose.
2331
2331
2332 If called without arguments, acts as a toggle."""
2332 If called without arguments, acts as a toggle."""
2333
2333
2334 def xmode_switch_err(name):
2334 def xmode_switch_err(name):
2335 warn('Error changing %s exception modes.\n%s' %
2335 warn('Error changing %s exception modes.\n%s' %
2336 (name,sys.exc_info()[1]))
2336 (name,sys.exc_info()[1]))
2337
2337
2338 shell = self.shell
2338 shell = self.shell
2339 new_mode = parameter_s.strip().capitalize()
2339 new_mode = parameter_s.strip().capitalize()
2340 try:
2340 try:
2341 shell.InteractiveTB.set_mode(mode=new_mode)
2341 shell.InteractiveTB.set_mode(mode=new_mode)
2342 print 'Exception reporting mode:',shell.InteractiveTB.mode
2342 print 'Exception reporting mode:',shell.InteractiveTB.mode
2343 except:
2343 except:
2344 xmode_switch_err('user')
2344 xmode_switch_err('user')
2345
2345
2346 # threaded shells use a special handler in sys.excepthook
2346 # threaded shells use a special handler in sys.excepthook
2347 if shell.isthreaded:
2347 if shell.isthreaded:
2348 try:
2348 try:
2349 shell.sys_excepthook.set_mode(mode=new_mode)
2349 shell.sys_excepthook.set_mode(mode=new_mode)
2350 except:
2350 except:
2351 xmode_switch_err('threaded')
2351 xmode_switch_err('threaded')
2352
2352
2353 def magic_colors(self,parameter_s = ''):
2353 def magic_colors(self,parameter_s = ''):
2354 """Switch color scheme for prompts, info system and exception handlers.
2354 """Switch color scheme for prompts, info system and exception handlers.
2355
2355
2356 Currently implemented schemes: NoColor, Linux, LightBG.
2356 Currently implemented schemes: NoColor, Linux, LightBG.
2357
2357
2358 Color scheme names are not case-sensitive."""
2358 Color scheme names are not case-sensitive."""
2359
2359
2360 def color_switch_err(name):
2360 def color_switch_err(name):
2361 warn('Error changing %s color schemes.\n%s' %
2361 warn('Error changing %s color schemes.\n%s' %
2362 (name,sys.exc_info()[1]))
2362 (name,sys.exc_info()[1]))
2363
2363
2364
2364
2365 new_scheme = parameter_s.strip()
2365 new_scheme = parameter_s.strip()
2366 if not new_scheme:
2366 if not new_scheme:
2367 raise UsageError(
2367 raise UsageError(
2368 "%colors: you must specify a color scheme. See '%colors?'")
2368 "%colors: you must specify a color scheme. See '%colors?'")
2369 return
2369 return
2370 # local shortcut
2370 # local shortcut
2371 shell = self.shell
2371 shell = self.shell
2372
2372
2373 import IPython.rlineimpl as readline
2373 import IPython.rlineimpl as readline
2374
2374
2375 if not readline.have_readline and sys.platform == "win32":
2375 if not readline.have_readline and sys.platform == "win32":
2376 msg = """\
2376 msg = """\
2377 Proper color support under MS Windows requires the pyreadline library.
2377 Proper color support under MS Windows requires the pyreadline library.
2378 You can find it at:
2378 You can find it at:
2379 http://ipython.scipy.org/moin/PyReadline/Intro
2379 http://ipython.scipy.org/moin/PyReadline/Intro
2380 Gary's readline needs the ctypes module, from:
2380 Gary's readline needs the ctypes module, from:
2381 http://starship.python.net/crew/theller/ctypes
2381 http://starship.python.net/crew/theller/ctypes
2382 (Note that ctypes is already part of Python versions 2.5 and newer).
2382 (Note that ctypes is already part of Python versions 2.5 and newer).
2383
2383
2384 Defaulting color scheme to 'NoColor'"""
2384 Defaulting color scheme to 'NoColor'"""
2385 new_scheme = 'NoColor'
2385 new_scheme = 'NoColor'
2386 warn(msg)
2386 warn(msg)
2387
2387
2388 # readline option is 0
2388 # readline option is 0
2389 if not shell.has_readline:
2389 if not shell.has_readline:
2390 new_scheme = 'NoColor'
2390 new_scheme = 'NoColor'
2391
2391
2392 # Set prompt colors
2392 # Set prompt colors
2393 try:
2393 try:
2394 shell.outputcache.set_colors(new_scheme)
2394 shell.outputcache.set_colors(new_scheme)
2395 except:
2395 except:
2396 color_switch_err('prompt')
2396 color_switch_err('prompt')
2397 else:
2397 else:
2398 shell.rc.colors = \
2398 shell.rc.colors = \
2399 shell.outputcache.color_table.active_scheme_name
2399 shell.outputcache.color_table.active_scheme_name
2400 # Set exception colors
2400 # Set exception colors
2401 try:
2401 try:
2402 shell.InteractiveTB.set_colors(scheme = new_scheme)
2402 shell.InteractiveTB.set_colors(scheme = new_scheme)
2403 shell.SyntaxTB.set_colors(scheme = new_scheme)
2403 shell.SyntaxTB.set_colors(scheme = new_scheme)
2404 except:
2404 except:
2405 color_switch_err('exception')
2405 color_switch_err('exception')
2406
2406
2407 # threaded shells use a verbose traceback in sys.excepthook
2407 # threaded shells use a verbose traceback in sys.excepthook
2408 if shell.isthreaded:
2408 if shell.isthreaded:
2409 try:
2409 try:
2410 shell.sys_excepthook.set_colors(scheme=new_scheme)
2410 shell.sys_excepthook.set_colors(scheme=new_scheme)
2411 except:
2411 except:
2412 color_switch_err('system exception handler')
2412 color_switch_err('system exception handler')
2413
2413
2414 # Set info (for 'object?') colors
2414 # Set info (for 'object?') colors
2415 if shell.rc.color_info:
2415 if shell.rc.color_info:
2416 try:
2416 try:
2417 shell.inspector.set_active_scheme(new_scheme)
2417 shell.inspector.set_active_scheme(new_scheme)
2418 except:
2418 except:
2419 color_switch_err('object inspector')
2419 color_switch_err('object inspector')
2420 else:
2420 else:
2421 shell.inspector.set_active_scheme('NoColor')
2421 shell.inspector.set_active_scheme('NoColor')
2422
2422
2423 def magic_color_info(self,parameter_s = ''):
2423 def magic_color_info(self,parameter_s = ''):
2424 """Toggle color_info.
2424 """Toggle color_info.
2425
2425
2426 The color_info configuration parameter controls whether colors are
2426 The color_info configuration parameter controls whether colors are
2427 used for displaying object details (by things like %psource, %pfile or
2427 used for displaying object details (by things like %psource, %pfile or
2428 the '?' system). This function toggles this value with each call.
2428 the '?' system). This function toggles this value with each call.
2429
2429
2430 Note that unless you have a fairly recent pager (less works better
2430 Note that unless you have a fairly recent pager (less works better
2431 than more) in your system, using colored object information displays
2431 than more) in your system, using colored object information displays
2432 will not work properly. Test it and see."""
2432 will not work properly. Test it and see."""
2433
2433
2434 self.shell.rc.color_info = 1 - self.shell.rc.color_info
2434 self.shell.rc.color_info = 1 - self.shell.rc.color_info
2435 self.magic_colors(self.shell.rc.colors)
2435 self.magic_colors(self.shell.rc.colors)
2436 print 'Object introspection functions have now coloring:',
2436 print 'Object introspection functions have now coloring:',
2437 print ['OFF','ON'][self.shell.rc.color_info]
2437 print ['OFF','ON'][self.shell.rc.color_info]
2438
2438
2439 def magic_Pprint(self, parameter_s=''):
2439 def magic_Pprint(self, parameter_s=''):
2440 """Toggle pretty printing on/off."""
2440 """Toggle pretty printing on/off."""
2441
2441
2442 self.shell.rc.pprint = 1 - self.shell.rc.pprint
2442 self.shell.rc.pprint = 1 - self.shell.rc.pprint
2443 print 'Pretty printing has been turned', \
2443 print 'Pretty printing has been turned', \
2444 ['OFF','ON'][self.shell.rc.pprint]
2444 ['OFF','ON'][self.shell.rc.pprint]
2445
2445
2446 def magic_exit(self, parameter_s=''):
2446 def magic_exit(self, parameter_s=''):
2447 """Exit IPython, confirming if configured to do so.
2447 """Exit IPython, confirming if configured to do so.
2448
2448
2449 You can configure whether IPython asks for confirmation upon exit by
2449 You can configure whether IPython asks for confirmation upon exit by
2450 setting the confirm_exit flag in the ipythonrc file."""
2450 setting the confirm_exit flag in the ipythonrc file."""
2451
2451
2452 self.shell.exit()
2452 self.shell.exit()
2453
2453
2454 def magic_quit(self, parameter_s=''):
2454 def magic_quit(self, parameter_s=''):
2455 """Exit IPython, confirming if configured to do so (like %exit)"""
2455 """Exit IPython, confirming if configured to do so (like %exit)"""
2456
2456
2457 self.shell.exit()
2457 self.shell.exit()
2458
2458
2459 def magic_Exit(self, parameter_s=''):
2459 def magic_Exit(self, parameter_s=''):
2460 """Exit IPython without confirmation."""
2460 """Exit IPython without confirmation."""
2461
2461
2462 self.shell.exit_now = True
2462 self.shell.exit_now = True
2463
2463
2464 #......................................................................
2464 #......................................................................
2465 # Functions to implement unix shell-type things
2465 # Functions to implement unix shell-type things
2466
2466
2467 def magic_alias(self, parameter_s = ''):
2467 def magic_alias(self, parameter_s = ''):
2468 """Define an alias for a system command.
2468 """Define an alias for a system command.
2469
2469
2470 '%alias alias_name cmd' defines 'alias_name' as an alias for 'cmd'
2470 '%alias alias_name cmd' defines 'alias_name' as an alias for 'cmd'
2471
2471
2472 Then, typing 'alias_name params' will execute the system command 'cmd
2472 Then, typing 'alias_name params' will execute the system command 'cmd
2473 params' (from your underlying operating system).
2473 params' (from your underlying operating system).
2474
2474
2475 Aliases have lower precedence than magic functions and Python normal
2475 Aliases have lower precedence than magic functions and Python normal
2476 variables, so if 'foo' is both a Python variable and an alias, the
2476 variables, so if 'foo' is both a Python variable and an alias, the
2477 alias can not be executed until 'del foo' removes the Python variable.
2477 alias can not be executed until 'del foo' removes the Python variable.
2478
2478
2479 You can use the %l specifier in an alias definition to represent the
2479 You can use the %l specifier in an alias definition to represent the
2480 whole line when the alias is called. For example:
2480 whole line when the alias is called. For example:
2481
2481
2482 In [2]: alias all echo "Input in brackets: <%l>"\\
2482 In [2]: alias all echo "Input in brackets: <%l>"\\
2483 In [3]: all hello world\\
2483 In [3]: all hello world\\
2484 Input in brackets: <hello world>
2484 Input in brackets: <hello world>
2485
2485
2486 You can also define aliases with parameters using %s specifiers (one
2486 You can also define aliases with parameters using %s specifiers (one
2487 per parameter):
2487 per parameter):
2488
2488
2489 In [1]: alias parts echo first %s second %s\\
2489 In [1]: alias parts echo first %s second %s\\
2490 In [2]: %parts A B\\
2490 In [2]: %parts A B\\
2491 first A second B\\
2491 first A second B\\
2492 In [3]: %parts A\\
2492 In [3]: %parts A\\
2493 Incorrect number of arguments: 2 expected.\\
2493 Incorrect number of arguments: 2 expected.\\
2494 parts is an alias to: 'echo first %s second %s'
2494 parts is an alias to: 'echo first %s second %s'
2495
2495
2496 Note that %l and %s are mutually exclusive. You can only use one or
2496 Note that %l and %s are mutually exclusive. You can only use one or
2497 the other in your aliases.
2497 the other in your aliases.
2498
2498
2499 Aliases expand Python variables just like system calls using ! or !!
2499 Aliases expand Python variables just like system calls using ! or !!
2500 do: all expressions prefixed with '$' get expanded. For details of
2500 do: all expressions prefixed with '$' get expanded. For details of
2501 the semantic rules, see PEP-215:
2501 the semantic rules, see PEP-215:
2502 http://www.python.org/peps/pep-0215.html. This is the library used by
2502 http://www.python.org/peps/pep-0215.html. This is the library used by
2503 IPython for variable expansion. If you want to access a true shell
2503 IPython for variable expansion. If you want to access a true shell
2504 variable, an extra $ is necessary to prevent its expansion by IPython:
2504 variable, an extra $ is necessary to prevent its expansion by IPython:
2505
2505
2506 In [6]: alias show echo\\
2506 In [6]: alias show echo\\
2507 In [7]: PATH='A Python string'\\
2507 In [7]: PATH='A Python string'\\
2508 In [8]: show $PATH\\
2508 In [8]: show $PATH\\
2509 A Python string\\
2509 A Python string\\
2510 In [9]: show $$PATH\\
2510 In [9]: show $$PATH\\
2511 /usr/local/lf9560/bin:/usr/local/intel/compiler70/ia32/bin:...
2511 /usr/local/lf9560/bin:/usr/local/intel/compiler70/ia32/bin:...
2512
2512
2513 You can use the alias facility to acess all of $PATH. See the %rehash
2513 You can use the alias facility to acess all of $PATH. See the %rehash
2514 and %rehashx functions, which automatically create aliases for the
2514 and %rehashx functions, which automatically create aliases for the
2515 contents of your $PATH.
2515 contents of your $PATH.
2516
2516
2517 If called with no parameters, %alias prints the current alias table."""
2517 If called with no parameters, %alias prints the current alias table."""
2518
2518
2519 par = parameter_s.strip()
2519 par = parameter_s.strip()
2520 if not par:
2520 if not par:
2521 stored = self.db.get('stored_aliases', {} )
2521 stored = self.db.get('stored_aliases', {} )
2522 atab = self.shell.alias_table
2522 atab = self.shell.alias_table
2523 aliases = atab.keys()
2523 aliases = atab.keys()
2524 aliases.sort()
2524 aliases.sort()
2525 res = []
2525 res = []
2526 showlast = []
2526 showlast = []
2527 for alias in aliases:
2527 for alias in aliases:
2528 special = False
2528 special = False
2529 try:
2529 try:
2530 tgt = atab[alias][1]
2530 tgt = atab[alias][1]
2531 except (TypeError, AttributeError):
2531 except (TypeError, AttributeError):
2532 # unsubscriptable? probably a callable
2532 # unsubscriptable? probably a callable
2533 tgt = atab[alias]
2533 tgt = atab[alias]
2534 special = True
2534 special = True
2535 # 'interesting' aliases
2535 # 'interesting' aliases
2536 if (alias in stored or
2536 if (alias in stored or
2537 special or
2537 special or
2538 alias.lower() != os.path.splitext(tgt)[0].lower() or
2538 alias.lower() != os.path.splitext(tgt)[0].lower() or
2539 ' ' in tgt):
2539 ' ' in tgt):
2540 showlast.append((alias, tgt))
2540 showlast.append((alias, tgt))
2541 else:
2541 else:
2542 res.append((alias, tgt ))
2542 res.append((alias, tgt ))
2543
2543
2544 # show most interesting aliases last
2544 # show most interesting aliases last
2545 res.extend(showlast)
2545 res.extend(showlast)
2546 print "Total number of aliases:",len(aliases)
2546 print "Total number of aliases:",len(aliases)
2547 return res
2547 return res
2548 try:
2548 try:
2549 alias,cmd = par.split(None,1)
2549 alias,cmd = par.split(None,1)
2550 except:
2550 except:
2551 print OInspect.getdoc(self.magic_alias)
2551 print OInspect.getdoc(self.magic_alias)
2552 else:
2552 else:
2553 nargs = cmd.count('%s')
2553 nargs = cmd.count('%s')
2554 if nargs>0 and cmd.find('%l')>=0:
2554 if nargs>0 and cmd.find('%l')>=0:
2555 error('The %s and %l specifiers are mutually exclusive '
2555 error('The %s and %l specifiers are mutually exclusive '
2556 'in alias definitions.')
2556 'in alias definitions.')
2557 else: # all looks OK
2557 else: # all looks OK
2558 self.shell.alias_table[alias] = (nargs,cmd)
2558 self.shell.alias_table[alias] = (nargs,cmd)
2559 self.shell.alias_table_validate(verbose=0)
2559 self.shell.alias_table_validate(verbose=0)
2560 # end magic_alias
2560 # end magic_alias
2561
2561
2562 def magic_unalias(self, parameter_s = ''):
2562 def magic_unalias(self, parameter_s = ''):
2563 """Remove an alias"""
2563 """Remove an alias"""
2564
2564
2565 aname = parameter_s.strip()
2565 aname = parameter_s.strip()
2566 if aname in self.shell.alias_table:
2566 if aname in self.shell.alias_table:
2567 del self.shell.alias_table[aname]
2567 del self.shell.alias_table[aname]
2568 stored = self.db.get('stored_aliases', {} )
2568 stored = self.db.get('stored_aliases', {} )
2569 if aname in stored:
2569 if aname in stored:
2570 print "Removing %stored alias",aname
2570 print "Removing %stored alias",aname
2571 del stored[aname]
2571 del stored[aname]
2572 self.db['stored_aliases'] = stored
2572 self.db['stored_aliases'] = stored
2573
2573
2574
2574
2575 def magic_rehashx(self, parameter_s = ''):
2575 def magic_rehashx(self, parameter_s = ''):
2576 """Update the alias table with all executable files in $PATH.
2576 """Update the alias table with all executable files in $PATH.
2577
2577
2578 This version explicitly checks that every entry in $PATH is a file
2578 This version explicitly checks that every entry in $PATH is a file
2579 with execute access (os.X_OK), so it is much slower than %rehash.
2579 with execute access (os.X_OK), so it is much slower than %rehash.
2580
2580
2581 Under Windows, it checks executability as a match agains a
2581 Under Windows, it checks executability as a match agains a
2582 '|'-separated string of extensions, stored in the IPython config
2582 '|'-separated string of extensions, stored in the IPython config
2583 variable win_exec_ext. This defaults to 'exe|com|bat'.
2583 variable win_exec_ext. This defaults to 'exe|com|bat'.
2584
2584
2585 This function also resets the root module cache of module completer,
2585 This function also resets the root module cache of module completer,
2586 used on slow filesystems.
2586 used on slow filesystems.
2587 """
2587 """
2588
2588
2589
2589
2590 ip = self.api
2590 ip = self.api
2591
2591
2592 # for the benefit of module completer in ipy_completers.py
2592 # for the benefit of module completer in ipy_completers.py
2593 del ip.db['rootmodules']
2593 del ip.db['rootmodules']
2594
2594
2595 path = [os.path.abspath(os.path.expanduser(p)) for p in
2595 path = [os.path.abspath(os.path.expanduser(p)) for p in
2596 os.environ.get('PATH','').split(os.pathsep)]
2596 os.environ.get('PATH','').split(os.pathsep)]
2597 path = filter(os.path.isdir,path)
2597 path = filter(os.path.isdir,path)
2598
2598
2599 alias_table = self.shell.alias_table
2599 alias_table = self.shell.alias_table
2600 syscmdlist = []
2600 syscmdlist = []
2601 if os.name == 'posix':
2601 if os.name == 'posix':
2602 isexec = lambda fname:os.path.isfile(fname) and \
2602 isexec = lambda fname:os.path.isfile(fname) and \
2603 os.access(fname,os.X_OK)
2603 os.access(fname,os.X_OK)
2604 else:
2604 else:
2605
2605
2606 try:
2606 try:
2607 winext = os.environ['pathext'].replace(';','|').replace('.','')
2607 winext = os.environ['pathext'].replace(';','|').replace('.','')
2608 except KeyError:
2608 except KeyError:
2609 winext = 'exe|com|bat|py'
2609 winext = 'exe|com|bat|py'
2610 if 'py' not in winext:
2610 if 'py' not in winext:
2611 winext += '|py'
2611 winext += '|py'
2612 execre = re.compile(r'(.*)\.(%s)$' % winext,re.IGNORECASE)
2612 execre = re.compile(r'(.*)\.(%s)$' % winext,re.IGNORECASE)
2613 isexec = lambda fname:os.path.isfile(fname) and execre.match(fname)
2613 isexec = lambda fname:os.path.isfile(fname) and execre.match(fname)
2614 savedir = os.getcwd()
2614 savedir = os.getcwd()
2615 try:
2615 try:
2616 # write the whole loop for posix/Windows so we don't have an if in
2616 # write the whole loop for posix/Windows so we don't have an if in
2617 # the innermost part
2617 # the innermost part
2618 if os.name == 'posix':
2618 if os.name == 'posix':
2619 for pdir in path:
2619 for pdir in path:
2620 os.chdir(pdir)
2620 os.chdir(pdir)
2621 for ff in os.listdir(pdir):
2621 for ff in os.listdir(pdir):
2622 if isexec(ff) and ff not in self.shell.no_alias:
2622 if isexec(ff) and ff not in self.shell.no_alias:
2623 # each entry in the alias table must be (N,name),
2623 # each entry in the alias table must be (N,name),
2624 # where N is the number of positional arguments of the
2624 # where N is the number of positional arguments of the
2625 # alias.
2625 # alias.
2626 alias_table[ff] = (0,ff)
2626 alias_table[ff] = (0,ff)
2627 syscmdlist.append(ff)
2627 syscmdlist.append(ff)
2628 else:
2628 else:
2629 for pdir in path:
2629 for pdir in path:
2630 os.chdir(pdir)
2630 os.chdir(pdir)
2631 for ff in os.listdir(pdir):
2631 for ff in os.listdir(pdir):
2632 base, ext = os.path.splitext(ff)
2632 base, ext = os.path.splitext(ff)
2633 if isexec(ff) and base.lower() not in self.shell.no_alias:
2633 if isexec(ff) and base.lower() not in self.shell.no_alias:
2634 if ext.lower() == '.exe':
2634 if ext.lower() == '.exe':
2635 ff = base
2635 ff = base
2636 alias_table[base.lower()] = (0,ff)
2636 alias_table[base.lower()] = (0,ff)
2637 syscmdlist.append(ff)
2637 syscmdlist.append(ff)
2638 # Make sure the alias table doesn't contain keywords or builtins
2638 # Make sure the alias table doesn't contain keywords or builtins
2639 self.shell.alias_table_validate()
2639 self.shell.alias_table_validate()
2640 # Call again init_auto_alias() so we get 'rm -i' and other
2640 # Call again init_auto_alias() so we get 'rm -i' and other
2641 # modified aliases since %rehashx will probably clobber them
2641 # modified aliases since %rehashx will probably clobber them
2642
2642
2643 # no, we don't want them. if %rehashx clobbers them, good,
2643 # no, we don't want them. if %rehashx clobbers them, good,
2644 # we'll probably get better versions
2644 # we'll probably get better versions
2645 # self.shell.init_auto_alias()
2645 # self.shell.init_auto_alias()
2646 db = ip.db
2646 db = ip.db
2647 db['syscmdlist'] = syscmdlist
2647 db['syscmdlist'] = syscmdlist
2648 finally:
2648 finally:
2649 os.chdir(savedir)
2649 os.chdir(savedir)
2650
2650
2651 def magic_pwd(self, parameter_s = ''):
2651 def magic_pwd(self, parameter_s = ''):
2652 """Return the current working directory path."""
2652 """Return the current working directory path."""
2653 return os.getcwd()
2653 return os.getcwd()
2654
2654
2655 def magic_cd(self, parameter_s=''):
2655 def magic_cd(self, parameter_s=''):
2656 """Change the current working directory.
2656 """Change the current working directory.
2657
2657
2658 This command automatically maintains an internal list of directories
2658 This command automatically maintains an internal list of directories
2659 you visit during your IPython session, in the variable _dh. The
2659 you visit during your IPython session, in the variable _dh. The
2660 command %dhist shows this history nicely formatted. You can also
2660 command %dhist shows this history nicely formatted. You can also
2661 do 'cd -<tab>' to see directory history conveniently.
2661 do 'cd -<tab>' to see directory history conveniently.
2662
2662
2663 Usage:
2663 Usage:
2664
2664
2665 cd 'dir': changes to directory 'dir'.
2665 cd 'dir': changes to directory 'dir'.
2666
2666
2667 cd -: changes to the last visited directory.
2667 cd -: changes to the last visited directory.
2668
2668
2669 cd -<n>: changes to the n-th directory in the directory history.
2669 cd -<n>: changes to the n-th directory in the directory history.
2670
2670
2671 cd -b <bookmark_name>: jump to a bookmark set by %bookmark
2671 cd -b <bookmark_name>: jump to a bookmark set by %bookmark
2672 (note: cd <bookmark_name> is enough if there is no
2672 (note: cd <bookmark_name> is enough if there is no
2673 directory <bookmark_name>, but a bookmark with the name exists.)
2673 directory <bookmark_name>, but a bookmark with the name exists.)
2674 'cd -b <tab>' allows you to tab-complete bookmark names.
2674 'cd -b <tab>' allows you to tab-complete bookmark names.
2675
2675
2676 Options:
2676 Options:
2677
2677
2678 -q: quiet. Do not print the working directory after the cd command is
2678 -q: quiet. Do not print the working directory after the cd command is
2679 executed. By default IPython's cd command does print this directory,
2679 executed. By default IPython's cd command does print this directory,
2680 since the default prompts do not display path information.
2680 since the default prompts do not display path information.
2681
2681
2682 Note that !cd doesn't work for this purpose because the shell where
2682 Note that !cd doesn't work for this purpose because the shell where
2683 !command runs is immediately discarded after executing 'command'."""
2683 !command runs is immediately discarded after executing 'command'."""
2684
2684
2685 parameter_s = parameter_s.strip()
2685 parameter_s = parameter_s.strip()
2686 #bkms = self.shell.persist.get("bookmarks",{})
2686 #bkms = self.shell.persist.get("bookmarks",{})
2687
2687
2688 oldcwd = os.getcwd()
2688 oldcwd = os.getcwd()
2689 numcd = re.match(r'(-)(\d+)$',parameter_s)
2689 numcd = re.match(r'(-)(\d+)$',parameter_s)
2690 # jump in directory history by number
2690 # jump in directory history by number
2691 if numcd:
2691 if numcd:
2692 nn = int(numcd.group(2))
2692 nn = int(numcd.group(2))
2693 try:
2693 try:
2694 ps = self.shell.user_ns['_dh'][nn]
2694 ps = self.shell.user_ns['_dh'][nn]
2695 except IndexError:
2695 except IndexError:
2696 print 'The requested directory does not exist in history.'
2696 print 'The requested directory does not exist in history.'
2697 return
2697 return
2698 else:
2698 else:
2699 opts = {}
2699 opts = {}
2700 else:
2700 else:
2701 #turn all non-space-escaping backslashes to slashes,
2701 #turn all non-space-escaping backslashes to slashes,
2702 # for c:\windows\directory\names\
2702 # for c:\windows\directory\names\
2703 parameter_s = re.sub(r'\\(?! )','/', parameter_s)
2703 parameter_s = re.sub(r'\\(?! )','/', parameter_s)
2704 opts,ps = self.parse_options(parameter_s,'qb',mode='string')
2704 opts,ps = self.parse_options(parameter_s,'qb',mode='string')
2705 # jump to previous
2705 # jump to previous
2706 if ps == '-':
2706 if ps == '-':
2707 try:
2707 try:
2708 ps = self.shell.user_ns['_dh'][-2]
2708 ps = self.shell.user_ns['_dh'][-2]
2709 except IndexError:
2709 except IndexError:
2710 raise UsageError('%cd -: No previous directory to change to.')
2710 raise UsageError('%cd -: No previous directory to change to.')
2711 # jump to bookmark if needed
2711 # jump to bookmark if needed
2712 else:
2712 else:
2713 if not os.path.isdir(ps) or opts.has_key('b'):
2713 if not os.path.isdir(ps) or opts.has_key('b'):
2714 bkms = self.db.get('bookmarks', {})
2714 bkms = self.db.get('bookmarks', {})
2715
2715
2716 if bkms.has_key(ps):
2716 if bkms.has_key(ps):
2717 target = bkms[ps]
2717 target = bkms[ps]
2718 print '(bookmark:%s) -> %s' % (ps,target)
2718 print '(bookmark:%s) -> %s' % (ps,target)
2719 ps = target
2719 ps = target
2720 else:
2720 else:
2721 if opts.has_key('b'):
2721 if opts.has_key('b'):
2722 raise UsageError("Bookmark '%s' not found. "
2722 raise UsageError("Bookmark '%s' not found. "
2723 "Use '%%bookmark -l' to see your bookmarks." % ps)
2723 "Use '%%bookmark -l' to see your bookmarks." % ps)
2724
2724
2725 # at this point ps should point to the target dir
2725 # at this point ps should point to the target dir
2726 if ps:
2726 if ps:
2727 try:
2727 try:
2728 os.chdir(os.path.expanduser(ps))
2728 os.chdir(os.path.expanduser(ps))
2729 if self.shell.rc.term_title:
2729 if self.shell.rc.term_title:
2730 #print 'set term title:',self.shell.rc.term_title # dbg
2730 #print 'set term title:',self.shell.rc.term_title # dbg
2731 ttitle = 'IPy ' + abbrev_cwd()
2731 platutils.set_term_title('IPy ' + abbrev_cwd())
2732 platutils.set_term_title(ttitle)
2733 except OSError:
2732 except OSError:
2734 print sys.exc_info()[1]
2733 print sys.exc_info()[1]
2735 else:
2734 else:
2736 cwd = os.getcwd()
2735 cwd = os.getcwd()
2737 dhist = self.shell.user_ns['_dh']
2736 dhist = self.shell.user_ns['_dh']
2738 if oldcwd != cwd:
2737 if oldcwd != cwd:
2739 dhist.append(cwd)
2738 dhist.append(cwd)
2740 self.db['dhist'] = compress_dhist(dhist)[-100:]
2739 self.db['dhist'] = compress_dhist(dhist)[-100:]
2741
2740
2742 else:
2741 else:
2743 os.chdir(self.shell.home_dir)
2742 os.chdir(self.shell.home_dir)
2744 if self.shell.rc.term_title:
2743 if self.shell.rc.term_title:
2745 platutils.set_term_title("IPy ~")
2744 platutils.set_term_title("IPy ~")
2746 cwd = os.getcwd()
2745 cwd = os.getcwd()
2747 dhist = self.shell.user_ns['_dh']
2746 dhist = self.shell.user_ns['_dh']
2748
2747
2749 if oldcwd != cwd:
2748 if oldcwd != cwd:
2750 dhist.append(cwd)
2749 dhist.append(cwd)
2751 self.db['dhist'] = compress_dhist(dhist)[-100:]
2750 self.db['dhist'] = compress_dhist(dhist)[-100:]
2752 if not 'q' in opts and self.shell.user_ns['_dh']:
2751 if not 'q' in opts and self.shell.user_ns['_dh']:
2753 print self.shell.user_ns['_dh'][-1]
2752 print self.shell.user_ns['_dh'][-1]
2754
2753
2755
2754
2756 def magic_env(self, parameter_s=''):
2755 def magic_env(self, parameter_s=''):
2757 """List environment variables."""
2756 """List environment variables."""
2758
2757
2759 return os.environ.data
2758 return os.environ.data
2760
2759
2761 def magic_pushd(self, parameter_s=''):
2760 def magic_pushd(self, parameter_s=''):
2762 """Place the current dir on stack and change directory.
2761 """Place the current dir on stack and change directory.
2763
2762
2764 Usage:\\
2763 Usage:\\
2765 %pushd ['dirname']
2764 %pushd ['dirname']
2766 """
2765 """
2767
2766
2768 dir_s = self.shell.dir_stack
2767 dir_s = self.shell.dir_stack
2769 tgt = os.path.expanduser(parameter_s)
2768 tgt = os.path.expanduser(parameter_s)
2770 cwd = os.getcwd().replace(self.home_dir,'~')
2769 cwd = os.getcwd().replace(self.home_dir,'~')
2771 if tgt:
2770 if tgt:
2772 self.magic_cd(parameter_s)
2771 self.magic_cd(parameter_s)
2773 dir_s.insert(0,cwd)
2772 dir_s.insert(0,cwd)
2774 return self.magic_dirs()
2773 return self.magic_dirs()
2775
2774
2776 def magic_popd(self, parameter_s=''):
2775 def magic_popd(self, parameter_s=''):
2777 """Change to directory popped off the top of the stack.
2776 """Change to directory popped off the top of the stack.
2778 """
2777 """
2779 if not self.shell.dir_stack:
2778 if not self.shell.dir_stack:
2780 raise UsageError("%popd on empty stack")
2779 raise UsageError("%popd on empty stack")
2781 top = self.shell.dir_stack.pop(0)
2780 top = self.shell.dir_stack.pop(0)
2782 self.magic_cd(top)
2781 self.magic_cd(top)
2783 print "popd ->",top
2782 print "popd ->",top
2784
2783
2785 def magic_dirs(self, parameter_s=''):
2784 def magic_dirs(self, parameter_s=''):
2786 """Return the current directory stack."""
2785 """Return the current directory stack."""
2787
2786
2788 return self.shell.dir_stack
2787 return self.shell.dir_stack
2789
2788
2790 def magic_dhist(self, parameter_s=''):
2789 def magic_dhist(self, parameter_s=''):
2791 """Print your history of visited directories.
2790 """Print your history of visited directories.
2792
2791
2793 %dhist -> print full history\\
2792 %dhist -> print full history\\
2794 %dhist n -> print last n entries only\\
2793 %dhist n -> print last n entries only\\
2795 %dhist n1 n2 -> print entries between n1 and n2 (n1 not included)\\
2794 %dhist n1 n2 -> print entries between n1 and n2 (n1 not included)\\
2796
2795
2797 This history is automatically maintained by the %cd command, and
2796 This history is automatically maintained by the %cd command, and
2798 always available as the global list variable _dh. You can use %cd -<n>
2797 always available as the global list variable _dh. You can use %cd -<n>
2799 to go to directory number <n>.
2798 to go to directory number <n>.
2800
2799
2801 Note that most of time, you should view directory history by entering
2800 Note that most of time, you should view directory history by entering
2802 cd -<TAB>.
2801 cd -<TAB>.
2803
2802
2804 """
2803 """
2805
2804
2806 dh = self.shell.user_ns['_dh']
2805 dh = self.shell.user_ns['_dh']
2807 if parameter_s:
2806 if parameter_s:
2808 try:
2807 try:
2809 args = map(int,parameter_s.split())
2808 args = map(int,parameter_s.split())
2810 except:
2809 except:
2811 self.arg_err(Magic.magic_dhist)
2810 self.arg_err(Magic.magic_dhist)
2812 return
2811 return
2813 if len(args) == 1:
2812 if len(args) == 1:
2814 ini,fin = max(len(dh)-(args[0]),0),len(dh)
2813 ini,fin = max(len(dh)-(args[0]),0),len(dh)
2815 elif len(args) == 2:
2814 elif len(args) == 2:
2816 ini,fin = args
2815 ini,fin = args
2817 else:
2816 else:
2818 self.arg_err(Magic.magic_dhist)
2817 self.arg_err(Magic.magic_dhist)
2819 return
2818 return
2820 else:
2819 else:
2821 ini,fin = 0,len(dh)
2820 ini,fin = 0,len(dh)
2822 nlprint(dh,
2821 nlprint(dh,
2823 header = 'Directory history (kept in _dh)',
2822 header = 'Directory history (kept in _dh)',
2824 start=ini,stop=fin)
2823 start=ini,stop=fin)
2825
2824
2826
2825
2827 def magic_sc(self, parameter_s=''):
2826 def magic_sc(self, parameter_s=''):
2828 """Shell capture - execute a shell command and capture its output.
2827 """Shell capture - execute a shell command and capture its output.
2829
2828
2830 DEPRECATED. Suboptimal, retained for backwards compatibility.
2829 DEPRECATED. Suboptimal, retained for backwards compatibility.
2831
2830
2832 You should use the form 'var = !command' instead. Example:
2831 You should use the form 'var = !command' instead. Example:
2833
2832
2834 "%sc -l myfiles = ls ~" should now be written as
2833 "%sc -l myfiles = ls ~" should now be written as
2835
2834
2836 "myfiles = !ls ~"
2835 "myfiles = !ls ~"
2837
2836
2838 myfiles.s, myfiles.l and myfiles.n still apply as documented
2837 myfiles.s, myfiles.l and myfiles.n still apply as documented
2839 below.
2838 below.
2840
2839
2841 --
2840 --
2842 %sc [options] varname=command
2841 %sc [options] varname=command
2843
2842
2844 IPython will run the given command using commands.getoutput(), and
2843 IPython will run the given command using commands.getoutput(), and
2845 will then update the user's interactive namespace with a variable
2844 will then update the user's interactive namespace with a variable
2846 called varname, containing the value of the call. Your command can
2845 called varname, containing the value of the call. Your command can
2847 contain shell wildcards, pipes, etc.
2846 contain shell wildcards, pipes, etc.
2848
2847
2849 The '=' sign in the syntax is mandatory, and the variable name you
2848 The '=' sign in the syntax is mandatory, and the variable name you
2850 supply must follow Python's standard conventions for valid names.
2849 supply must follow Python's standard conventions for valid names.
2851
2850
2852 (A special format without variable name exists for internal use)
2851 (A special format without variable name exists for internal use)
2853
2852
2854 Options:
2853 Options:
2855
2854
2856 -l: list output. Split the output on newlines into a list before
2855 -l: list output. Split the output on newlines into a list before
2857 assigning it to the given variable. By default the output is stored
2856 assigning it to the given variable. By default the output is stored
2858 as a single string.
2857 as a single string.
2859
2858
2860 -v: verbose. Print the contents of the variable.
2859 -v: verbose. Print the contents of the variable.
2861
2860
2862 In most cases you should not need to split as a list, because the
2861 In most cases you should not need to split as a list, because the
2863 returned value is a special type of string which can automatically
2862 returned value is a special type of string which can automatically
2864 provide its contents either as a list (split on newlines) or as a
2863 provide its contents either as a list (split on newlines) or as a
2865 space-separated string. These are convenient, respectively, either
2864 space-separated string. These are convenient, respectively, either
2866 for sequential processing or to be passed to a shell command.
2865 for sequential processing or to be passed to a shell command.
2867
2866
2868 For example:
2867 For example:
2869
2868
2870 # Capture into variable a
2869 # Capture into variable a
2871 In [9]: sc a=ls *py
2870 In [9]: sc a=ls *py
2872
2871
2873 # a is a string with embedded newlines
2872 # a is a string with embedded newlines
2874 In [10]: a
2873 In [10]: a
2875 Out[10]: 'setup.py\nwin32_manual_post_install.py'
2874 Out[10]: 'setup.py\nwin32_manual_post_install.py'
2876
2875
2877 # which can be seen as a list:
2876 # which can be seen as a list:
2878 In [11]: a.l
2877 In [11]: a.l
2879 Out[11]: ['setup.py', 'win32_manual_post_install.py']
2878 Out[11]: ['setup.py', 'win32_manual_post_install.py']
2880
2879
2881 # or as a whitespace-separated string:
2880 # or as a whitespace-separated string:
2882 In [12]: a.s
2881 In [12]: a.s
2883 Out[12]: 'setup.py win32_manual_post_install.py'
2882 Out[12]: 'setup.py win32_manual_post_install.py'
2884
2883
2885 # a.s is useful to pass as a single command line:
2884 # a.s is useful to pass as a single command line:
2886 In [13]: !wc -l $a.s
2885 In [13]: !wc -l $a.s
2887 146 setup.py
2886 146 setup.py
2888 130 win32_manual_post_install.py
2887 130 win32_manual_post_install.py
2889 276 total
2888 276 total
2890
2889
2891 # while the list form is useful to loop over:
2890 # while the list form is useful to loop over:
2892 In [14]: for f in a.l:
2891 In [14]: for f in a.l:
2893 ....: !wc -l $f
2892 ....: !wc -l $f
2894 ....:
2893 ....:
2895 146 setup.py
2894 146 setup.py
2896 130 win32_manual_post_install.py
2895 130 win32_manual_post_install.py
2897
2896
2898 Similiarly, the lists returned by the -l option are also special, in
2897 Similiarly, the lists returned by the -l option are also special, in
2899 the sense that you can equally invoke the .s attribute on them to
2898 the sense that you can equally invoke the .s attribute on them to
2900 automatically get a whitespace-separated string from their contents:
2899 automatically get a whitespace-separated string from their contents:
2901
2900
2902 In [1]: sc -l b=ls *py
2901 In [1]: sc -l b=ls *py
2903
2902
2904 In [2]: b
2903 In [2]: b
2905 Out[2]: ['setup.py', 'win32_manual_post_install.py']
2904 Out[2]: ['setup.py', 'win32_manual_post_install.py']
2906
2905
2907 In [3]: b.s
2906 In [3]: b.s
2908 Out[3]: 'setup.py win32_manual_post_install.py'
2907 Out[3]: 'setup.py win32_manual_post_install.py'
2909
2908
2910 In summary, both the lists and strings used for ouptut capture have
2909 In summary, both the lists and strings used for ouptut capture have
2911 the following special attributes:
2910 the following special attributes:
2912
2911
2913 .l (or .list) : value as list.
2912 .l (or .list) : value as list.
2914 .n (or .nlstr): value as newline-separated string.
2913 .n (or .nlstr): value as newline-separated string.
2915 .s (or .spstr): value as space-separated string.
2914 .s (or .spstr): value as space-separated string.
2916 """
2915 """
2917
2916
2918 opts,args = self.parse_options(parameter_s,'lv')
2917 opts,args = self.parse_options(parameter_s,'lv')
2919 # Try to get a variable name and command to run
2918 # Try to get a variable name and command to run
2920 try:
2919 try:
2921 # the variable name must be obtained from the parse_options
2920 # the variable name must be obtained from the parse_options
2922 # output, which uses shlex.split to strip options out.
2921 # output, which uses shlex.split to strip options out.
2923 var,_ = args.split('=',1)
2922 var,_ = args.split('=',1)
2924 var = var.strip()
2923 var = var.strip()
2925 # But the the command has to be extracted from the original input
2924 # But the the command has to be extracted from the original input
2926 # parameter_s, not on what parse_options returns, to avoid the
2925 # parameter_s, not on what parse_options returns, to avoid the
2927 # quote stripping which shlex.split performs on it.
2926 # quote stripping which shlex.split performs on it.
2928 _,cmd = parameter_s.split('=',1)
2927 _,cmd = parameter_s.split('=',1)
2929 except ValueError:
2928 except ValueError:
2930 var,cmd = '',''
2929 var,cmd = '',''
2931 # If all looks ok, proceed
2930 # If all looks ok, proceed
2932 out,err = self.shell.getoutputerror(cmd)
2931 out,err = self.shell.getoutputerror(cmd)
2933 if err:
2932 if err:
2934 print >> Term.cerr,err
2933 print >> Term.cerr,err
2935 if opts.has_key('l'):
2934 if opts.has_key('l'):
2936 out = SList(out.split('\n'))
2935 out = SList(out.split('\n'))
2937 else:
2936 else:
2938 out = LSString(out)
2937 out = LSString(out)
2939 if opts.has_key('v'):
2938 if opts.has_key('v'):
2940 print '%s ==\n%s' % (var,pformat(out))
2939 print '%s ==\n%s' % (var,pformat(out))
2941 if var:
2940 if var:
2942 self.shell.user_ns.update({var:out})
2941 self.shell.user_ns.update({var:out})
2943 else:
2942 else:
2944 return out
2943 return out
2945
2944
2946 def magic_sx(self, parameter_s=''):
2945 def magic_sx(self, parameter_s=''):
2947 """Shell execute - run a shell command and capture its output.
2946 """Shell execute - run a shell command and capture its output.
2948
2947
2949 %sx command
2948 %sx command
2950
2949
2951 IPython will run the given command using commands.getoutput(), and
2950 IPython will run the given command using commands.getoutput(), and
2952 return the result formatted as a list (split on '\\n'). Since the
2951 return the result formatted as a list (split on '\\n'). Since the
2953 output is _returned_, it will be stored in ipython's regular output
2952 output is _returned_, it will be stored in ipython's regular output
2954 cache Out[N] and in the '_N' automatic variables.
2953 cache Out[N] and in the '_N' automatic variables.
2955
2954
2956 Notes:
2955 Notes:
2957
2956
2958 1) If an input line begins with '!!', then %sx is automatically
2957 1) If an input line begins with '!!', then %sx is automatically
2959 invoked. That is, while:
2958 invoked. That is, while:
2960 !ls
2959 !ls
2961 causes ipython to simply issue system('ls'), typing
2960 causes ipython to simply issue system('ls'), typing
2962 !!ls
2961 !!ls
2963 is a shorthand equivalent to:
2962 is a shorthand equivalent to:
2964 %sx ls
2963 %sx ls
2965
2964
2966 2) %sx differs from %sc in that %sx automatically splits into a list,
2965 2) %sx differs from %sc in that %sx automatically splits into a list,
2967 like '%sc -l'. The reason for this is to make it as easy as possible
2966 like '%sc -l'. The reason for this is to make it as easy as possible
2968 to process line-oriented shell output via further python commands.
2967 to process line-oriented shell output via further python commands.
2969 %sc is meant to provide much finer control, but requires more
2968 %sc is meant to provide much finer control, but requires more
2970 typing.
2969 typing.
2971
2970
2972 3) Just like %sc -l, this is a list with special attributes:
2971 3) Just like %sc -l, this is a list with special attributes:
2973
2972
2974 .l (or .list) : value as list.
2973 .l (or .list) : value as list.
2975 .n (or .nlstr): value as newline-separated string.
2974 .n (or .nlstr): value as newline-separated string.
2976 .s (or .spstr): value as whitespace-separated string.
2975 .s (or .spstr): value as whitespace-separated string.
2977
2976
2978 This is very useful when trying to use such lists as arguments to
2977 This is very useful when trying to use such lists as arguments to
2979 system commands."""
2978 system commands."""
2980
2979
2981 if parameter_s:
2980 if parameter_s:
2982 out,err = self.shell.getoutputerror(parameter_s)
2981 out,err = self.shell.getoutputerror(parameter_s)
2983 if err:
2982 if err:
2984 print >> Term.cerr,err
2983 print >> Term.cerr,err
2985 return SList(out.split('\n'))
2984 return SList(out.split('\n'))
2986
2985
2987 def magic_bg(self, parameter_s=''):
2986 def magic_bg(self, parameter_s=''):
2988 """Run a job in the background, in a separate thread.
2987 """Run a job in the background, in a separate thread.
2989
2988
2990 For example,
2989 For example,
2991
2990
2992 %bg myfunc(x,y,z=1)
2991 %bg myfunc(x,y,z=1)
2993
2992
2994 will execute 'myfunc(x,y,z=1)' in a background thread. As soon as the
2993 will execute 'myfunc(x,y,z=1)' in a background thread. As soon as the
2995 execution starts, a message will be printed indicating the job
2994 execution starts, a message will be printed indicating the job
2996 number. If your job number is 5, you can use
2995 number. If your job number is 5, you can use
2997
2996
2998 myvar = jobs.result(5) or myvar = jobs[5].result
2997 myvar = jobs.result(5) or myvar = jobs[5].result
2999
2998
3000 to assign this result to variable 'myvar'.
2999 to assign this result to variable 'myvar'.
3001
3000
3002 IPython has a job manager, accessible via the 'jobs' object. You can
3001 IPython has a job manager, accessible via the 'jobs' object. You can
3003 type jobs? to get more information about it, and use jobs.<TAB> to see
3002 type jobs? to get more information about it, and use jobs.<TAB> to see
3004 its attributes. All attributes not starting with an underscore are
3003 its attributes. All attributes not starting with an underscore are
3005 meant for public use.
3004 meant for public use.
3006
3005
3007 In particular, look at the jobs.new() method, which is used to create
3006 In particular, look at the jobs.new() method, which is used to create
3008 new jobs. This magic %bg function is just a convenience wrapper
3007 new jobs. This magic %bg function is just a convenience wrapper
3009 around jobs.new(), for expression-based jobs. If you want to create a
3008 around jobs.new(), for expression-based jobs. If you want to create a
3010 new job with an explicit function object and arguments, you must call
3009 new job with an explicit function object and arguments, you must call
3011 jobs.new() directly.
3010 jobs.new() directly.
3012
3011
3013 The jobs.new docstring also describes in detail several important
3012 The jobs.new docstring also describes in detail several important
3014 caveats associated with a thread-based model for background job
3013 caveats associated with a thread-based model for background job
3015 execution. Type jobs.new? for details.
3014 execution. Type jobs.new? for details.
3016
3015
3017 You can check the status of all jobs with jobs.status().
3016 You can check the status of all jobs with jobs.status().
3018
3017
3019 The jobs variable is set by IPython into the Python builtin namespace.
3018 The jobs variable is set by IPython into the Python builtin namespace.
3020 If you ever declare a variable named 'jobs', you will shadow this
3019 If you ever declare a variable named 'jobs', you will shadow this
3021 name. You can either delete your global jobs variable to regain
3020 name. You can either delete your global jobs variable to regain
3022 access to the job manager, or make a new name and assign it manually
3021 access to the job manager, or make a new name and assign it manually
3023 to the manager (stored in IPython's namespace). For example, to
3022 to the manager (stored in IPython's namespace). For example, to
3024 assign the job manager to the Jobs name, use:
3023 assign the job manager to the Jobs name, use:
3025
3024
3026 Jobs = __builtins__.jobs"""
3025 Jobs = __builtins__.jobs"""
3027
3026
3028 self.shell.jobs.new(parameter_s,self.shell.user_ns)
3027 self.shell.jobs.new(parameter_s,self.shell.user_ns)
3029
3028
3030 def magic_r(self, parameter_s=''):
3029 def magic_r(self, parameter_s=''):
3031 """Repeat previous input.
3030 """Repeat previous input.
3032
3031
3033 Note: Consider using the more powerfull %rep instead!
3032 Note: Consider using the more powerfull %rep instead!
3034
3033
3035 If given an argument, repeats the previous command which starts with
3034 If given an argument, repeats the previous command which starts with
3036 the same string, otherwise it just repeats the previous input.
3035 the same string, otherwise it just repeats the previous input.
3037
3036
3038 Shell escaped commands (with ! as first character) are not recognized
3037 Shell escaped commands (with ! as first character) are not recognized
3039 by this system, only pure python code and magic commands.
3038 by this system, only pure python code and magic commands.
3040 """
3039 """
3041
3040
3042 start = parameter_s.strip()
3041 start = parameter_s.strip()
3043 esc_magic = self.shell.ESC_MAGIC
3042 esc_magic = self.shell.ESC_MAGIC
3044 # Identify magic commands even if automagic is on (which means
3043 # Identify magic commands even if automagic is on (which means
3045 # the in-memory version is different from that typed by the user).
3044 # the in-memory version is different from that typed by the user).
3046 if self.shell.rc.automagic:
3045 if self.shell.rc.automagic:
3047 start_magic = esc_magic+start
3046 start_magic = esc_magic+start
3048 else:
3047 else:
3049 start_magic = start
3048 start_magic = start
3050 # Look through the input history in reverse
3049 # Look through the input history in reverse
3051 for n in range(len(self.shell.input_hist)-2,0,-1):
3050 for n in range(len(self.shell.input_hist)-2,0,-1):
3052 input = self.shell.input_hist[n]
3051 input = self.shell.input_hist[n]
3053 # skip plain 'r' lines so we don't recurse to infinity
3052 # skip plain 'r' lines so we don't recurse to infinity
3054 if input != '_ip.magic("r")\n' and \
3053 if input != '_ip.magic("r")\n' and \
3055 (input.startswith(start) or input.startswith(start_magic)):
3054 (input.startswith(start) or input.startswith(start_magic)):
3056 #print 'match',`input` # dbg
3055 #print 'match',`input` # dbg
3057 print 'Executing:',input,
3056 print 'Executing:',input,
3058 self.shell.runlines(input)
3057 self.shell.runlines(input)
3059 return
3058 return
3060 print 'No previous input matching `%s` found.' % start
3059 print 'No previous input matching `%s` found.' % start
3061
3060
3062
3061
3063 def magic_bookmark(self, parameter_s=''):
3062 def magic_bookmark(self, parameter_s=''):
3064 """Manage IPython's bookmark system.
3063 """Manage IPython's bookmark system.
3065
3064
3066 %bookmark <name> - set bookmark to current dir
3065 %bookmark <name> - set bookmark to current dir
3067 %bookmark <name> <dir> - set bookmark to <dir>
3066 %bookmark <name> <dir> - set bookmark to <dir>
3068 %bookmark -l - list all bookmarks
3067 %bookmark -l - list all bookmarks
3069 %bookmark -d <name> - remove bookmark
3068 %bookmark -d <name> - remove bookmark
3070 %bookmark -r - remove all bookmarks
3069 %bookmark -r - remove all bookmarks
3071
3070
3072 You can later on access a bookmarked folder with:
3071 You can later on access a bookmarked folder with:
3073 %cd -b <name>
3072 %cd -b <name>
3074 or simply '%cd <name>' if there is no directory called <name> AND
3073 or simply '%cd <name>' if there is no directory called <name> AND
3075 there is such a bookmark defined.
3074 there is such a bookmark defined.
3076
3075
3077 Your bookmarks persist through IPython sessions, but they are
3076 Your bookmarks persist through IPython sessions, but they are
3078 associated with each profile."""
3077 associated with each profile."""
3079
3078
3080 opts,args = self.parse_options(parameter_s,'drl',mode='list')
3079 opts,args = self.parse_options(parameter_s,'drl',mode='list')
3081 if len(args) > 2:
3080 if len(args) > 2:
3082 raise UsageError("%bookmark: too many arguments")
3081 raise UsageError("%bookmark: too many arguments")
3083
3082
3084 bkms = self.db.get('bookmarks',{})
3083 bkms = self.db.get('bookmarks',{})
3085
3084
3086 if opts.has_key('d'):
3085 if opts.has_key('d'):
3087 try:
3086 try:
3088 todel = args[0]
3087 todel = args[0]
3089 except IndexError:
3088 except IndexError:
3090 raise UsageError(
3089 raise UsageError(
3091 "%bookmark -d: must provide a bookmark to delete")
3090 "%bookmark -d: must provide a bookmark to delete")
3092 else:
3091 else:
3093 try:
3092 try:
3094 del bkms[todel]
3093 del bkms[todel]
3095 except KeyError:
3094 except KeyError:
3096 raise UsageError(
3095 raise UsageError(
3097 "%%bookmark -d: Can't delete bookmark '%s'" % todel)
3096 "%%bookmark -d: Can't delete bookmark '%s'" % todel)
3098
3097
3099 elif opts.has_key('r'):
3098 elif opts.has_key('r'):
3100 bkms = {}
3099 bkms = {}
3101 elif opts.has_key('l'):
3100 elif opts.has_key('l'):
3102 bks = bkms.keys()
3101 bks = bkms.keys()
3103 bks.sort()
3102 bks.sort()
3104 if bks:
3103 if bks:
3105 size = max(map(len,bks))
3104 size = max(map(len,bks))
3106 else:
3105 else:
3107 size = 0
3106 size = 0
3108 fmt = '%-'+str(size)+'s -> %s'
3107 fmt = '%-'+str(size)+'s -> %s'
3109 print 'Current bookmarks:'
3108 print 'Current bookmarks:'
3110 for bk in bks:
3109 for bk in bks:
3111 print fmt % (bk,bkms[bk])
3110 print fmt % (bk,bkms[bk])
3112 else:
3111 else:
3113 if not args:
3112 if not args:
3114 raise UsageError("%bookmark: You must specify the bookmark name")
3113 raise UsageError("%bookmark: You must specify the bookmark name")
3115 elif len(args)==1:
3114 elif len(args)==1:
3116 bkms[args[0]] = os.getcwd()
3115 bkms[args[0]] = os.getcwd()
3117 elif len(args)==2:
3116 elif len(args)==2:
3118 bkms[args[0]] = args[1]
3117 bkms[args[0]] = args[1]
3119 self.db['bookmarks'] = bkms
3118 self.db['bookmarks'] = bkms
3120
3119
3121 def magic_pycat(self, parameter_s=''):
3120 def magic_pycat(self, parameter_s=''):
3122 """Show a syntax-highlighted file through a pager.
3121 """Show a syntax-highlighted file through a pager.
3123
3122
3124 This magic is similar to the cat utility, but it will assume the file
3123 This magic is similar to the cat utility, but it will assume the file
3125 to be Python source and will show it with syntax highlighting. """
3124 to be Python source and will show it with syntax highlighting. """
3126
3125
3127 try:
3126 try:
3128 filename = get_py_filename(parameter_s)
3127 filename = get_py_filename(parameter_s)
3129 cont = file_read(filename)
3128 cont = file_read(filename)
3130 except IOError:
3129 except IOError:
3131 try:
3130 try:
3132 cont = eval(parameter_s,self.user_ns)
3131 cont = eval(parameter_s,self.user_ns)
3133 except NameError:
3132 except NameError:
3134 cont = None
3133 cont = None
3135 if cont is None:
3134 if cont is None:
3136 print "Error: no such file or variable"
3135 print "Error: no such file or variable"
3137 return
3136 return
3138
3137
3139 page(self.shell.pycolorize(cont),
3138 page(self.shell.pycolorize(cont),
3140 screen_lines=self.shell.rc.screen_length)
3139 screen_lines=self.shell.rc.screen_length)
3141
3140
3142 def magic_cpaste(self, parameter_s=''):
3141 def magic_cpaste(self, parameter_s=''):
3143 """Allows you to paste & execute a pre-formatted code block from clipboard.
3142 """Allows you to paste & execute a pre-formatted code block from clipboard.
3144
3143
3145 You must terminate the block with '--' (two minus-signs) alone on the
3144 You must terminate the block with '--' (two minus-signs) alone on the
3146 line. You can also provide your own sentinel with '%paste -s %%' ('%%'
3145 line. You can also provide your own sentinel with '%paste -s %%' ('%%'
3147 is the new sentinel for this operation)
3146 is the new sentinel for this operation)
3148
3147
3149 The block is dedented prior to execution to enable execution of method
3148 The block is dedented prior to execution to enable execution of method
3150 definitions. '>' and '+' characters at the beginning of a line are
3149 definitions. '>' and '+' characters at the beginning of a line are
3151 ignored, to allow pasting directly from e-mails, diff files and
3150 ignored, to allow pasting directly from e-mails, diff files and
3152 doctests (the '...' continuation prompt is also stripped). The
3151 doctests (the '...' continuation prompt is also stripped). The
3153 executed block is also assigned to variable named 'pasted_block' for
3152 executed block is also assigned to variable named 'pasted_block' for
3154 later editing with '%edit pasted_block'.
3153 later editing with '%edit pasted_block'.
3155
3154
3156 You can also pass a variable name as an argument, e.g. '%cpaste foo'.
3155 You can also pass a variable name as an argument, e.g. '%cpaste foo'.
3157 This assigns the pasted block to variable 'foo' as string, without
3156 This assigns the pasted block to variable 'foo' as string, without
3158 dedenting or executing it (preceding >>> and + is still stripped)
3157 dedenting or executing it (preceding >>> and + is still stripped)
3159
3158
3160 Do not be alarmed by garbled output on Windows (it's a readline bug).
3159 Do not be alarmed by garbled output on Windows (it's a readline bug).
3161 Just press enter and type -- (and press enter again) and the block
3160 Just press enter and type -- (and press enter again) and the block
3162 will be what was just pasted.
3161 will be what was just pasted.
3163
3162
3164 IPython statements (magics, shell escapes) are not supported (yet).
3163 IPython statements (magics, shell escapes) are not supported (yet).
3165 """
3164 """
3166 opts,args = self.parse_options(parameter_s,'s:',mode='string')
3165 opts,args = self.parse_options(parameter_s,'s:',mode='string')
3167 par = args.strip()
3166 par = args.strip()
3168 sentinel = opts.get('s','--')
3167 sentinel = opts.get('s','--')
3169
3168
3170 # Regular expressions that declare text we strip from the input:
3169 # Regular expressions that declare text we strip from the input:
3171 strip_re = [r'^\s*In \[\d+\]:', # IPython input prompt
3170 strip_re = [r'^\s*In \[\d+\]:', # IPython input prompt
3172 r'^\s*(\s?>)+', # Python input prompt
3171 r'^\s*(\s?>)+', # Python input prompt
3173 r'^\s*\.{3,}', # Continuation prompts
3172 r'^\s*\.{3,}', # Continuation prompts
3174 r'^\++',
3173 r'^\++',
3175 ]
3174 ]
3176
3175
3177 strip_from_start = map(re.compile,strip_re)
3176 strip_from_start = map(re.compile,strip_re)
3178
3177
3179 from IPython import iplib
3178 from IPython import iplib
3180 lines = []
3179 lines = []
3181 print "Pasting code; enter '%s' alone on the line to stop." % sentinel
3180 print "Pasting code; enter '%s' alone on the line to stop." % sentinel
3182 while 1:
3181 while 1:
3183 l = iplib.raw_input_original(':')
3182 l = iplib.raw_input_original(':')
3184 if l ==sentinel:
3183 if l ==sentinel:
3185 break
3184 break
3186
3185
3187 for pat in strip_from_start:
3186 for pat in strip_from_start:
3188 l = pat.sub('',l)
3187 l = pat.sub('',l)
3189 lines.append(l)
3188 lines.append(l)
3190
3189
3191 block = "\n".join(lines) + '\n'
3190 block = "\n".join(lines) + '\n'
3192 #print "block:\n",block
3191 #print "block:\n",block
3193 if not par:
3192 if not par:
3194 b = textwrap.dedent(block)
3193 b = textwrap.dedent(block)
3195 exec b in self.user_ns
3194 exec b in self.user_ns
3196 self.user_ns['pasted_block'] = b
3195 self.user_ns['pasted_block'] = b
3197 else:
3196 else:
3198 self.user_ns[par] = block
3197 self.user_ns[par] = SList(block.splitlines())
3199 print "Block assigned to '%s'" % par
3198 print "Block assigned to '%s'" % par
3200
3199
3201 def magic_quickref(self,arg):
3200 def magic_quickref(self,arg):
3202 """ Show a quick reference sheet """
3201 """ Show a quick reference sheet """
3203 import IPython.usage
3202 import IPython.usage
3204 qr = IPython.usage.quick_reference + self.magic_magic('-brief')
3203 qr = IPython.usage.quick_reference + self.magic_magic('-brief')
3205
3204
3206 page(qr)
3205 page(qr)
3207
3206
3208 def magic_upgrade(self,arg):
3207 def magic_upgrade(self,arg):
3209 """ Upgrade your IPython installation
3208 """ Upgrade your IPython installation
3210
3209
3211 This will copy the config files that don't yet exist in your
3210 This will copy the config files that don't yet exist in your
3212 ipython dir from the system config dir. Use this after upgrading
3211 ipython dir from the system config dir. Use this after upgrading
3213 IPython if you don't wish to delete your .ipython dir.
3212 IPython if you don't wish to delete your .ipython dir.
3214
3213
3215 Call with -nolegacy to get rid of ipythonrc* files (recommended for
3214 Call with -nolegacy to get rid of ipythonrc* files (recommended for
3216 new users)
3215 new users)
3217
3216
3218 """
3217 """
3219 ip = self.getapi()
3218 ip = self.getapi()
3220 ipinstallation = path(IPython.__file__).dirname()
3219 ipinstallation = path(IPython.__file__).dirname()
3221 upgrade_script = '%s "%s"' % (sys.executable,ipinstallation / 'upgrade_dir.py')
3220 upgrade_script = '%s "%s"' % (sys.executable,ipinstallation / 'upgrade_dir.py')
3222 src_config = ipinstallation / 'UserConfig'
3221 src_config = ipinstallation / 'UserConfig'
3223 userdir = path(ip.options.ipythondir)
3222 userdir = path(ip.options.ipythondir)
3224 cmd = '%s "%s" "%s"' % (upgrade_script, src_config, userdir)
3223 cmd = '%s "%s" "%s"' % (upgrade_script, src_config, userdir)
3225 print ">",cmd
3224 print ">",cmd
3226 shell(cmd)
3225 shell(cmd)
3227 if arg == '-nolegacy':
3226 if arg == '-nolegacy':
3228 legacy = userdir.files('ipythonrc*')
3227 legacy = userdir.files('ipythonrc*')
3229 print "Nuking legacy files:",legacy
3228 print "Nuking legacy files:",legacy
3230
3229
3231 [p.remove() for p in legacy]
3230 [p.remove() for p in legacy]
3232 suffix = (sys.platform == 'win32' and '.ini' or '')
3231 suffix = (sys.platform == 'win32' and '.ini' or '')
3233 (userdir / ('ipythonrc' + suffix)).write_text('# Empty, see ipy_user_conf.py\n')
3232 (userdir / ('ipythonrc' + suffix)).write_text('# Empty, see ipy_user_conf.py\n')
3234
3233
3235
3234
3236 def magic_doctest_mode(self,parameter_s=''):
3235 def magic_doctest_mode(self,parameter_s=''):
3237 """Toggle doctest mode on and off.
3236 """Toggle doctest mode on and off.
3238
3237
3239 This mode allows you to toggle the prompt behavior between normal
3238 This mode allows you to toggle the prompt behavior between normal
3240 IPython prompts and ones that are as similar to the default IPython
3239 IPython prompts and ones that are as similar to the default IPython
3241 interpreter as possible.
3240 interpreter as possible.
3242
3241
3243 It also supports the pasting of code snippets that have leading '>>>'
3242 It also supports the pasting of code snippets that have leading '>>>'
3244 and '...' prompts in them. This means that you can paste doctests from
3243 and '...' prompts in them. This means that you can paste doctests from
3245 files or docstrings (even if they have leading whitespace), and the
3244 files or docstrings (even if they have leading whitespace), and the
3246 code will execute correctly. You can then use '%history -tn' to see
3245 code will execute correctly. You can then use '%history -tn' to see
3247 the translated history without line numbers; this will give you the
3246 the translated history without line numbers; this will give you the
3248 input after removal of all the leading prompts and whitespace, which
3247 input after removal of all the leading prompts and whitespace, which
3249 can be pasted back into an editor.
3248 can be pasted back into an editor.
3250
3249
3251 With these features, you can switch into this mode easily whenever you
3250 With these features, you can switch into this mode easily whenever you
3252 need to do testing and changes to doctests, without having to leave
3251 need to do testing and changes to doctests, without having to leave
3253 your existing IPython session.
3252 your existing IPython session.
3254 """
3253 """
3255
3254
3256 # XXX - Fix this to have cleaner activate/deactivate calls.
3255 # XXX - Fix this to have cleaner activate/deactivate calls.
3257 from IPython.Extensions import InterpreterPasteInput as ipaste
3256 from IPython.Extensions import InterpreterPasteInput as ipaste
3258 from IPython.ipstruct import Struct
3257 from IPython.ipstruct import Struct
3259
3258
3260 # Shorthands
3259 # Shorthands
3261 shell = self.shell
3260 shell = self.shell
3262 oc = shell.outputcache
3261 oc = shell.outputcache
3263 rc = shell.rc
3262 rc = shell.rc
3264 meta = shell.meta
3263 meta = shell.meta
3265 # dstore is a data store kept in the instance metadata bag to track any
3264 # dstore is a data store kept in the instance metadata bag to track any
3266 # changes we make, so we can undo them later.
3265 # changes we make, so we can undo them later.
3267 dstore = meta.setdefault('doctest_mode',Struct())
3266 dstore = meta.setdefault('doctest_mode',Struct())
3268 save_dstore = dstore.setdefault
3267 save_dstore = dstore.setdefault
3269
3268
3270 # save a few values we'll need to recover later
3269 # save a few values we'll need to recover later
3271 mode = save_dstore('mode',False)
3270 mode = save_dstore('mode',False)
3272 save_dstore('rc_pprint',rc.pprint)
3271 save_dstore('rc_pprint',rc.pprint)
3273 save_dstore('xmode',shell.InteractiveTB.mode)
3272 save_dstore('xmode',shell.InteractiveTB.mode)
3274 save_dstore('rc_separate_out',rc.separate_out)
3273 save_dstore('rc_separate_out',rc.separate_out)
3275 save_dstore('rc_separate_out2',rc.separate_out2)
3274 save_dstore('rc_separate_out2',rc.separate_out2)
3276 save_dstore('rc_prompts_pad_left',rc.prompts_pad_left)
3275 save_dstore('rc_prompts_pad_left',rc.prompts_pad_left)
3277
3276
3278 if mode == False:
3277 if mode == False:
3279 # turn on
3278 # turn on
3280 ipaste.activate_prefilter()
3279 ipaste.activate_prefilter()
3281
3280
3282 oc.prompt1.p_template = '>>> '
3281 oc.prompt1.p_template = '>>> '
3283 oc.prompt2.p_template = '... '
3282 oc.prompt2.p_template = '... '
3284 oc.prompt_out.p_template = ''
3283 oc.prompt_out.p_template = ''
3285
3284
3286 oc.output_sep = ''
3285 oc.output_sep = ''
3287 oc.output_sep2 = ''
3286 oc.output_sep2 = ''
3288
3287
3289 oc.prompt1.pad_left = oc.prompt2.pad_left = \
3288 oc.prompt1.pad_left = oc.prompt2.pad_left = \
3290 oc.prompt_out.pad_left = False
3289 oc.prompt_out.pad_left = False
3291
3290
3292 rc.pprint = False
3291 rc.pprint = False
3293
3292
3294 shell.magic_xmode('Plain')
3293 shell.magic_xmode('Plain')
3295
3294
3296 else:
3295 else:
3297 # turn off
3296 # turn off
3298 ipaste.deactivate_prefilter()
3297 ipaste.deactivate_prefilter()
3299
3298
3300 oc.prompt1.p_template = rc.prompt_in1
3299 oc.prompt1.p_template = rc.prompt_in1
3301 oc.prompt2.p_template = rc.prompt_in2
3300 oc.prompt2.p_template = rc.prompt_in2
3302 oc.prompt_out.p_template = rc.prompt_out
3301 oc.prompt_out.p_template = rc.prompt_out
3303
3302
3304 oc.output_sep = dstore.rc_separate_out
3303 oc.output_sep = dstore.rc_separate_out
3305 oc.output_sep2 = dstore.rc_separate_out2
3304 oc.output_sep2 = dstore.rc_separate_out2
3306
3305
3307 oc.prompt1.pad_left = oc.prompt2.pad_left = \
3306 oc.prompt1.pad_left = oc.prompt2.pad_left = \
3308 oc.prompt_out.pad_left = dstore.rc_prompts_pad_left
3307 oc.prompt_out.pad_left = dstore.rc_prompts_pad_left
3309
3308
3310 rc.pprint = dstore.rc_pprint
3309 rc.pprint = dstore.rc_pprint
3311
3310
3312 shell.magic_xmode(dstore.xmode)
3311 shell.magic_xmode(dstore.xmode)
3313
3312
3314 # Store new mode and inform
3313 # Store new mode and inform
3315 dstore.mode = bool(1-int(mode))
3314 dstore.mode = bool(1-int(mode))
3316 print 'Doctest mode is:',
3315 print 'Doctest mode is:',
3317 print ['OFF','ON'][dstore.mode]
3316 print ['OFF','ON'][dstore.mode]
3318
3317
3319 # end Magic
3318 # end Magic
@@ -1,95 +1,99 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """Release data for the IPython project.
2 """Release data for the IPython project.
3
3
4 $Id: Release.py 3002 2008-02-01 07:17:00Z fperez $"""
4 $Id: Release.py 3002 2008-02-01 07:17:00Z fperez $"""
5
5
6 #*****************************************************************************
6 #*****************************************************************************
7 # Copyright (C) 2001-2006 Fernando Perez <fperez@colorado.edu>
7 # Copyright (C) 2001-2006 Fernando Perez <fperez@colorado.edu>
8 #
8 #
9 # Copyright (c) 2001 Janko Hauser <jhauser@zscout.de> and Nathaniel Gray
9 # Copyright (c) 2001 Janko Hauser <jhauser@zscout.de> and Nathaniel Gray
10 # <n8gray@caltech.edu>
10 # <n8gray@caltech.edu>
11 #
11 #
12 # Distributed under the terms of the BSD License. The full license is in
12 # Distributed under the terms of the BSD License. The full license is in
13 # the file COPYING, distributed as part of this software.
13 # the file COPYING, distributed as part of this software.
14 #*****************************************************************************
14 #*****************************************************************************
15
15
16 # Name of the package for release purposes. This is the name which labels
16 # Name of the package for release purposes. This is the name which labels
17 # the tarballs and RPMs made by distutils, so it's best to lowercase it.
17 # the tarballs and RPMs made by distutils, so it's best to lowercase it.
18 name = 'ipython'
18 name = 'ipython'
19
19
20 # For versions with substrings (like 0.6.16.svn), use an extra . to separate
20 # For versions with substrings (like 0.6.16.svn), use an extra . to separate
21 # the new substring. We have to avoid using either dashes or underscores,
21 # the new substring. We have to avoid using either dashes or underscores,
22 # because bdist_rpm does not accept dashes (an RPM) convention, and
22 # because bdist_rpm does not accept dashes (an RPM) convention, and
23 # bdist_deb does not accept underscores (a Debian convention).
23 # bdist_deb does not accept underscores (a Debian convention).
24
24
25 revision = '1016'
25 development = True # change this to False to do a release
26 version_base = '0.9.0'
26 branch = 'ipython'
27 branch = 'ipython'
28 revision = '1016'
27
29
30 if development:
28 if branch == 'ipython':
31 if branch == 'ipython':
29 version = '0.9.0.bzr.r' + revision
32 version = '%s.bzr.r%s' % (version_base, revision)
33 else:
34 version = '%s.bzr.r%s.%s' % (version_base, revision, branch)
30 else:
35 else:
31 version = '0.9.0.bzr.r%s.%s' % (revision,branch)
36 version = version_base
32
37
33 # version = '0.8.4'
34
38
35 description = "Tools for interactive development in Python."
39 description = "Tools for interactive development in Python."
36
40
37 long_description = \
41 long_description = \
38 """
42 """
39 IPython provides a replacement for the interactive Python interpreter with
43 IPython provides a replacement for the interactive Python interpreter with
40 extra functionality.
44 extra functionality.
41
45
42 Main features:
46 Main features:
43
47
44 * Comprehensive object introspection.
48 * Comprehensive object introspection.
45
49
46 * Input history, persistent across sessions.
50 * Input history, persistent across sessions.
47
51
48 * Caching of output results during a session with automatically generated
52 * Caching of output results during a session with automatically generated
49 references.
53 references.
50
54
51 * Readline based name completion.
55 * Readline based name completion.
52
56
53 * Extensible system of 'magic' commands for controlling the environment and
57 * Extensible system of 'magic' commands for controlling the environment and
54 performing many tasks related either to IPython or the operating system.
58 performing many tasks related either to IPython or the operating system.
55
59
56 * Configuration system with easy switching between different setups (simpler
60 * Configuration system with easy switching between different setups (simpler
57 than changing $PYTHONSTARTUP environment variables every time).
61 than changing $PYTHONSTARTUP environment variables every time).
58
62
59 * Session logging and reloading.
63 * Session logging and reloading.
60
64
61 * Extensible syntax processing for special purpose situations.
65 * Extensible syntax processing for special purpose situations.
62
66
63 * Access to the system shell with user-extensible alias system.
67 * Access to the system shell with user-extensible alias system.
64
68
65 * Easily embeddable in other Python programs.
69 * Easily embeddable in other Python programs.
66
70
67 * Integrated access to the pdb debugger and the Python profiler.
71 * Integrated access to the pdb debugger and the Python profiler.
68
72
69 The latest development version is always available at the IPython subversion
73 The latest development version is always available at the IPython subversion
70 repository_.
74 repository_.
71
75
72 .. _repository: http://ipython.scipy.org/svn/ipython/ipython/trunk#egg=ipython-dev
76 .. _repository: http://ipython.scipy.org/svn/ipython/ipython/trunk#egg=ipython-dev
73 """
77 """
74
78
75 license = 'BSD'
79 license = 'BSD'
76
80
77 authors = {'Fernando' : ('Fernando Perez','fperez@colorado.edu'),
81 authors = {'Fernando' : ('Fernando Perez','fperez@colorado.edu'),
78 'Janko' : ('Janko Hauser','jhauser@zscout.de'),
82 'Janko' : ('Janko Hauser','jhauser@zscout.de'),
79 'Nathan' : ('Nathaniel Gray','n8gray@caltech.edu'),
83 'Nathan' : ('Nathaniel Gray','n8gray@caltech.edu'),
80 'Ville' : ('Ville Vainio','vivainio@gmail.com'),
84 'Ville' : ('Ville Vainio','vivainio@gmail.com'),
81 'Brian' : ('Brian E Granger', 'ellisonbg@gmail.com'),
85 'Brian' : ('Brian E Granger', 'ellisonbg@gmail.com'),
82 'Min' : ('Min Ragan-Kelley', 'benjaminrk@gmail.com')
86 'Min' : ('Min Ragan-Kelley', 'benjaminrk@gmail.com')
83 }
87 }
84
88
85 author = 'The IPython Development Team'
89 author = 'The IPython Development Team'
86
90
87 author_email = 'ipython-dev@scipy.org'
91 author_email = 'ipython-dev@scipy.org'
88
92
89 url = 'http://ipython.scipy.org'
93 url = 'http://ipython.scipy.org'
90
94
91 download_url = 'http://ipython.scipy.org/dist'
95 download_url = 'http://ipython.scipy.org/dist'
92
96
93 platforms = ['Linux','Mac OSX','Windows XP/2000/NT','Windows 95/98/ME']
97 platforms = ['Linux','Mac OSX','Windows XP/2000/NT','Windows 95/98/ME']
94
98
95 keywords = ['Interactive','Interpreter','Shell','Parallel','Distributed']
99 keywords = ['Interactive','Interpreter','Shell','Parallel','Distributed']
@@ -1,203 +1,231 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2
2
3 r""" mglob - enhanced file list expansion module
3 r""" mglob - enhanced file list expansion module
4
4
5 Use as stand-alone utility (for xargs, `backticks` etc.),
5 Use as stand-alone utility (for xargs, `backticks` etc.),
6 or a globbing library for own python programs. Globbing the sys.argv is something
6 or a globbing library for own python programs. Globbing the sys.argv is something
7 that almost every Windows script has to perform manually, and this module is here
7 that almost every Windows script has to perform manually, and this module is here
8 to help with that task. Also Unix users will benefit from enhanced modes
8 to help with that task. Also Unix users will benefit from enhanced modes
9 such as recursion, exclusion, directory omission...
9 such as recursion, exclusion, directory omission...
10
10
11 Unlike glob.glob, directories are not included in the glob unless specified
11 Unlike glob.glob, directories are not included in the glob unless specified
12 with 'dir:'
12 with 'dir:'
13
13
14 'expand' is the function to use in python programs. Typical use
14 'expand' is the function to use in python programs. Typical use
15 to expand argv (esp. in windows)::
15 to expand argv (esp. in windows)::
16
16
17 try:
17 try:
18 import mglob
18 import mglob
19 files = mglob.expand(sys.argv[1:])
19 files = mglob.expand(sys.argv[1:])
20 except ImportError:
20 except ImportError:
21 print "mglob not found; try 'easy_install mglob' for extra features"
21 print "mglob not found; try 'easy_install mglob' for extra features"
22 files = sys.argv[1:]
22 files = sys.argv[1:]
23
23
24 Note that for unix, shell expands *normal* wildcards (*.cpp, etc.) in argv.
24 Note that for unix, shell expands *normal* wildcards (*.cpp, etc.) in argv.
25 Therefore, you might want to use quotes with normal wildcards to prevent this
25 Therefore, you might want to use quotes with normal wildcards to prevent this
26 expansion, in order for mglob to see the wildcards and get the wanted behaviour.
26 expansion, in order for mglob to see the wildcards and get the wanted behaviour.
27 Not quoting the wildcards is harmless and typically has equivalent results, though.
27 Not quoting the wildcards is harmless and typically has equivalent results, though.
28
28
29 Author: Ville Vainio <vivainio@gmail.com>
29 Author: Ville Vainio <vivainio@gmail.com>
30 License: MIT Open Source license
30 License: MIT Open Source license
31
31
32 """
32 """
33
33
34 #Assigned in variable for "usage" printing convenience"
34 #Assigned in variable for "usage" printing convenience"
35
35
36 globsyntax = """\
36 globsyntax = """\
37 This program allows specifying filenames with "mglob" mechanism.
37 This program allows specifying filenames with "mglob" mechanism.
38 Supported syntax in globs (wilcard matching patterns)::
38 Supported syntax in globs (wilcard matching patterns)::
39
39
40 *.cpp ?ellowo*
40 *.cpp ?ellowo*
41 - obvious. Differs from normal glob in that dirs are not included.
41 - obvious. Differs from normal glob in that dirs are not included.
42 Unix users might want to write this as: "*.cpp" "?ellowo*"
42 Unix users might want to write this as: "*.cpp" "?ellowo*"
43 rec:/usr/share=*.txt,*.doc
43 rec:/usr/share=*.txt,*.doc
44 - get all *.txt and *.doc under /usr/share,
44 - get all *.txt and *.doc under /usr/share,
45 recursively
45 recursively
46 rec:/usr/share
46 rec:/usr/share
47 - All files under /usr/share, recursively
47 - All files under /usr/share, recursively
48 rec:*.py
48 rec:*.py
49 - All .py files under current working dir, recursively
49 - All .py files under current working dir, recursively
50 foo
50 foo
51 - File or dir foo
51 - File or dir foo
52 !*.bak readme*
52 !*.bak readme*
53 - readme*, exclude files ending with .bak
53 - readme*, exclude files ending with .bak
54 !.svn/ !.hg/ !*_Data/ rec:.
54 !.svn/ !.hg/ !*_Data/ rec:.
55 - Skip .svn, .hg, foo_Data dirs (and their subdirs) in recurse.
55 - Skip .svn, .hg, foo_Data dirs (and their subdirs) in recurse.
56 Trailing / is the key, \ does not work!
56 Trailing / is the key, \ does not work! Use !.*/ for all hidden.
57 dir:foo
57 dir:foo
58 - the directory foo if it exists (not files in foo)
58 - the directory foo if it exists (not files in foo)
59 dir:*
59 dir:*
60 - all directories in current folder
60 - all directories in current folder
61 foo.py bar.* !h* rec:*.py
61 foo.py bar.* !h* rec:*.py
62 - Obvious. !h* exclusion only applies for rec:*.py.
62 - Obvious. !h* exclusion only applies for rec:*.py.
63 foo.py is *not* included twice.
63 foo.py is *not* included twice.
64 @filelist.txt
64 @filelist.txt
65 - All files listed in 'filelist.txt' file, on separate lines.
65 - All files listed in 'filelist.txt' file, on separate lines.
66 "cont:class \wak:" rec:*.py
67 - Match files containing regexp. Applies to subsequent files.
68 note quotes because of whitespace.
66 """
69 """
67
70
68
71
69 __version__ = "0.2"
72 __version__ = "0.2"
70
73
71
74
72 import os,glob,fnmatch,sys
75 import os,glob,fnmatch,sys,re
73 from sets import Set as set
76 from sets import Set as set
74
77
75
78
76 def expand(flist,exp_dirs = False):
79 def expand(flist,exp_dirs = False):
77 """ Expand the glob(s) in flist.
80 """ Expand the glob(s) in flist.
78
81
79 flist may be either a whitespace-separated list of globs/files
82 flist may be either a whitespace-separated list of globs/files
80 or an array of globs/files.
83 or an array of globs/files.
81
84
82 if exp_dirs is true, directory names in glob are expanded to the files
85 if exp_dirs is true, directory names in glob are expanded to the files
83 contained in them - otherwise, directory names are returned as is.
86 contained in them - otherwise, directory names are returned as is.
84
87
85 """
88 """
86 if isinstance(flist, basestring):
89 if isinstance(flist, basestring):
87 flist = flist.split()
90 import shlex
91 flist = shlex.split(flist)
88 done_set = set()
92 done_set = set()
89 denied_set = set()
93 denied_set = set()
94 cont_set = set()
95 cur_rejected_dirs = set()
90
96
91 def recfind(p, pats = ["*"]):
97 def recfind(p, pats = ["*"]):
92 denied_dirs = ["*" + d+"*" for d in denied_set if d.endswith("/")]
98 denied_dirs = [os.path.dirname(d) for d in denied_set if d.endswith("/")]
93 #print "de", denied_dirs
94 for (dp,dnames,fnames) in os.walk(p):
99 for (dp,dnames,fnames) in os.walk(p):
95 # see if we should ignore the whole directory
100 # see if we should ignore the whole directory
96 dp_norm = dp.replace("\\","/") + "/"
101 dp_norm = dp.replace("\\","/") + "/"
97 deny = False
102 deny = False
103 # do not traverse under already rejected dirs
104 for d in cur_rejected_dirs:
105 if dp.startswith(d):
106 deny = True
107 break
108 if deny:
109 continue
110
111
98 #print "dp",dp
112 #print "dp",dp
113 bname = os.path.basename(dp)
99 for deny_pat in denied_dirs:
114 for deny_pat in denied_dirs:
100 if fnmatch.fnmatch( dp_norm, deny_pat):
115 if fnmatch.fnmatch( bname, deny_pat):
101 deny = True
116 deny = True
117 cur_rejected_dirs.add(dp)
102 break
118 break
103 if deny:
119 if deny:
104 continue
120 continue
105
121
106
122
107 for f in fnames:
123 for f in fnames:
108 matched = False
124 matched = False
109 for p in pats:
125 for p in pats:
110 if fnmatch.fnmatch(f,p):
126 if fnmatch.fnmatch(f,p):
111 matched = True
127 matched = True
112 break
128 break
113 if matched:
129 if matched:
114 yield os.path.join(dp,f)
130 yield os.path.join(dp,f)
115
131
116 def once_filter(seq):
132 def once_filter(seq):
117 for it in seq:
133 for it in seq:
118 p = os.path.abspath(it)
134 p = os.path.abspath(it)
119 if p in done_set:
135 if p in done_set:
120 continue
136 continue
121 done_set.add(p)
137 done_set.add(p)
122 deny = False
138 deny = False
123 for deny_pat in denied_set:
139 for deny_pat in denied_set:
124 if fnmatch.fnmatch(os.path.basename(p), deny_pat):
140 if fnmatch.fnmatch(os.path.basename(p), deny_pat):
125 deny = True
141 deny = True
126 break
142 break
143 if cont_set:
144 try:
145 cont = open(p).read()
146 except IOError:
147 # deny
148 continue
149 for pat in cont_set:
150 if not re.search(pat,cont, re.IGNORECASE):
151 deny = True
152 break
153
127 if not deny:
154 if not deny:
128 yield it
155 yield it
129 return
156 return
130
157
131 res = []
158 res = []
132
159
133 for ent in flist:
160 for ent in flist:
134 ent = os.path.expanduser(os.path.expandvars(ent))
161 ent = os.path.expanduser(os.path.expandvars(ent))
135 if ent.lower().startswith('rec:'):
162 if ent.lower().startswith('rec:'):
136 fields = ent[4:].split('=')
163 fields = ent[4:].split('=')
137 if len(fields) == 2:
164 if len(fields) == 2:
138 pth, patlist = fields
165 pth, patlist = fields
139 elif len(fields) == 1:
166 elif len(fields) == 1:
140 if os.path.isdir(fields[0]):
167 if os.path.isdir(fields[0]):
141 # single arg is dir
168 # single arg is dir
142 pth, patlist = fields[0], '*'
169 pth, patlist = fields[0], '*'
143 else:
170 else:
144 # single arg is pattern
171 # single arg is pattern
145 pth, patlist = '.', fields[0]
172 pth, patlist = '.', fields[0]
146
173
147 elif len(fields) == 0:
174 elif len(fields) == 0:
148 pth, pathlist = '.','*'
175 pth, pathlist = '.','*'
149
176
150 pats = patlist.split(',')
177 pats = patlist.split(',')
151 res.extend(once_filter(recfind(pth, pats)))
178 res.extend(once_filter(recfind(pth, pats)))
152 # filelist
179 # filelist
153 elif ent.startswith('@') and os.path.isfile(ent[1:]):
180 elif ent.startswith('@') and os.path.isfile(ent[1:]):
154 res.extend(once_filter(open(ent[1:]).read().splitlines()))
181 res.extend(once_filter(open(ent[1:]).read().splitlines()))
155 # exclusion
182 # exclusion
156 elif ent.startswith('!'):
183 elif ent.startswith('!'):
157 denied_set.add(ent[1:])
184 denied_set.add(ent[1:])
158 # glob only dirs
185 # glob only dirs
159 elif ent.lower().startswith('dir:'):
186 elif ent.lower().startswith('dir:'):
160 res.extend(once_filter(filter(os.path.isdir,glob.glob(ent[4:]))))
187 res.extend(once_filter(filter(os.path.isdir,glob.glob(ent[4:]))))
161
188 elif ent.lower().startswith('cont:'):
189 cont_set.add(ent[5:])
162 # get all files in the specified dir
190 # get all files in the specified dir
163 elif os.path.isdir(ent) and exp_dirs:
191 elif os.path.isdir(ent) and exp_dirs:
164 res.extend(once_filter(filter(os.path.isfile,glob.glob(ent + os.sep+"*"))))
192 res.extend(once_filter(filter(os.path.isfile,glob.glob(ent + os.sep+"*"))))
165
193
166 # glob only files
194 # glob only files
167
195
168 elif '*' in ent or '?' in ent:
196 elif '*' in ent or '?' in ent:
169 res.extend(once_filter(filter(os.path.isfile,glob.glob(ent))))
197 res.extend(once_filter(filter(os.path.isfile,glob.glob(ent))))
170
198
171 else:
199 else:
172 res.extend(once_filter([ent]))
200 res.extend(once_filter([ent]))
173 return res
201 return res
174
202
175
203
176 def test():
204 def test():
177 assert (
205 assert (
178 expand("*.py ~/.ipython/*.py rec:/usr/share/doc-base") ==
206 expand("*.py ~/.ipython/*.py rec:/usr/share/doc-base") ==
179 expand( ['*.py', '~/.ipython/*.py', 'rec:/usr/share/doc-base'] )
207 expand( ['*.py', '~/.ipython/*.py', 'rec:/usr/share/doc-base'] )
180 )
208 )
181
209
182 def main():
210 def main():
183 if len(sys.argv) < 2:
211 if len(sys.argv) < 2:
184 print globsyntax
212 print globsyntax
185 return
213 return
186
214
187 print "\n".join(expand(sys.argv[1:])),
215 print "\n".join(expand(sys.argv[1:])),
188
216
189 def mglob_f(self, arg):
217 def mglob_f(self, arg):
190 from IPython.genutils import SList
218 from IPython.genutils import SList
191 if arg.strip():
219 if arg.strip():
192 return SList(expand(arg))
220 return SList(expand(arg))
193 print "Please specify pattern!"
221 print "Please specify pattern!"
194 print globsyntax
222 print globsyntax
195
223
196 def init_ipython(ip):
224 def init_ipython(ip):
197 """ register %mglob for IPython """
225 """ register %mglob for IPython """
198 mglob_f.__doc__ = globsyntax
226 mglob_f.__doc__ = globsyntax
199 ip.expose_magic("mglob",mglob_f)
227 ip.expose_magic("mglob",mglob_f)
200
228
201 # test()
229 # test()
202 if __name__ == "__main__":
230 if __name__ == "__main__":
203 main()
231 main()
1 NO CONTENT: modified file
NO CONTENT: modified file
@@ -1,2097 +1,2124 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 General purpose utilities.
3 General purpose utilities.
4
4
5 This is a grab-bag of stuff I find useful in most programs I write. Some of
5 This is a grab-bag of stuff I find useful in most programs I write. Some of
6 these things are also convenient when working at the command line.
6 these things are also convenient when working at the command line.
7
7
8 $Id: genutils.py 2998 2008-01-31 10:06:04Z vivainio $"""
8 $Id: genutils.py 2998 2008-01-31 10:06:04Z vivainio $"""
9
9
10 #*****************************************************************************
10 #*****************************************************************************
11 # Copyright (C) 2001-2006 Fernando Perez. <fperez@colorado.edu>
11 # Copyright (C) 2001-2006 Fernando Perez. <fperez@colorado.edu>
12 #
12 #
13 # Distributed under the terms of the BSD License. The full license is in
13 # Distributed under the terms of the BSD License. The full license is in
14 # the file COPYING, distributed as part of this software.
14 # the file COPYING, distributed as part of this software.
15 #*****************************************************************************
15 #*****************************************************************************
16
16
17 from IPython import Release
17 from IPython import Release
18 __author__ = '%s <%s>' % Release.authors['Fernando']
18 __author__ = '%s <%s>' % Release.authors['Fernando']
19 __license__ = Release.license
19 __license__ = Release.license
20
20
21 #****************************************************************************
21 #****************************************************************************
22 # required modules from the Python standard library
22 # required modules from the Python standard library
23 import __main__
23 import __main__
24 import commands
24 import commands
25 try:
25 try:
26 import doctest
26 import doctest
27 except ImportError:
27 except ImportError:
28 pass
28 pass
29 import os
29 import os
30 import platform
30 import platform
31 import re
31 import re
32 import shlex
32 import shlex
33 import shutil
33 import shutil
34 import subprocess
34 import subprocess
35 import sys
35 import sys
36 import tempfile
36 import tempfile
37 import time
37 import time
38 import types
38 import types
39 import warnings
39 import warnings
40
40
41 # Curses and termios are Unix-only modules
41 # Curses and termios are Unix-only modules
42 try:
42 try:
43 import curses
43 import curses
44 # We need termios as well, so if its import happens to raise, we bail on
44 # We need termios as well, so if its import happens to raise, we bail on
45 # using curses altogether.
45 # using curses altogether.
46 import termios
46 import termios
47 except ImportError:
47 except ImportError:
48 USE_CURSES = False
48 USE_CURSES = False
49 else:
49 else:
50 # Curses on Solaris may not be complete, so we can't use it there
50 # Curses on Solaris may not be complete, so we can't use it there
51 USE_CURSES = hasattr(curses,'initscr')
51 USE_CURSES = hasattr(curses,'initscr')
52
52
53 # Other IPython utilities
53 # Other IPython utilities
54 import IPython
54 import IPython
55 from IPython.Itpl import Itpl,itpl,printpl
55 from IPython.Itpl import Itpl,itpl,printpl
56 from IPython import DPyGetOpt, platutils
56 from IPython import DPyGetOpt, platutils
57 from IPython.generics import result_display
57 from IPython.generics import result_display
58 import IPython.ipapi
58 import IPython.ipapi
59 from IPython.external.path import path
59 from IPython.external.path import path
60 if os.name == "nt":
60 if os.name == "nt":
61 from IPython.winconsole import get_console_size
61 from IPython.winconsole import get_console_size
62
62
63 try:
63 try:
64 set
64 set
65 except:
65 except:
66 from sets import Set as set
66 from sets import Set as set
67
67
68
68
69 #****************************************************************************
69 #****************************************************************************
70 # Exceptions
70 # Exceptions
71 class Error(Exception):
71 class Error(Exception):
72 """Base class for exceptions in this module."""
72 """Base class for exceptions in this module."""
73 pass
73 pass
74
74
75 #----------------------------------------------------------------------------
75 #----------------------------------------------------------------------------
76 class IOStream:
76 class IOStream:
77 def __init__(self,stream,fallback):
77 def __init__(self,stream,fallback):
78 if not hasattr(stream,'write') or not hasattr(stream,'flush'):
78 if not hasattr(stream,'write') or not hasattr(stream,'flush'):
79 stream = fallback
79 stream = fallback
80 self.stream = stream
80 self.stream = stream
81 self._swrite = stream.write
81 self._swrite = stream.write
82 self.flush = stream.flush
82 self.flush = stream.flush
83
83
84 def write(self,data):
84 def write(self,data):
85 try:
85 try:
86 self._swrite(data)
86 self._swrite(data)
87 except:
87 except:
88 try:
88 try:
89 # print handles some unicode issues which may trip a plain
89 # print handles some unicode issues which may trip a plain
90 # write() call. Attempt to emulate write() by using a
90 # write() call. Attempt to emulate write() by using a
91 # trailing comma
91 # trailing comma
92 print >> self.stream, data,
92 print >> self.stream, data,
93 except:
93 except:
94 # if we get here, something is seriously broken.
94 # if we get here, something is seriously broken.
95 print >> sys.stderr, \
95 print >> sys.stderr, \
96 'ERROR - failed to write data to stream:', self.stream
96 'ERROR - failed to write data to stream:', self.stream
97
97
98 def close(self):
98 def close(self):
99 pass
99 pass
100
100
101
101
102 class IOTerm:
102 class IOTerm:
103 """ Term holds the file or file-like objects for handling I/O operations.
103 """ Term holds the file or file-like objects for handling I/O operations.
104
104
105 These are normally just sys.stdin, sys.stdout and sys.stderr but for
105 These are normally just sys.stdin, sys.stdout and sys.stderr but for
106 Windows they can can replaced to allow editing the strings before they are
106 Windows they can can replaced to allow editing the strings before they are
107 displayed."""
107 displayed."""
108
108
109 # In the future, having IPython channel all its I/O operations through
109 # In the future, having IPython channel all its I/O operations through
110 # this class will make it easier to embed it into other environments which
110 # this class will make it easier to embed it into other environments which
111 # are not a normal terminal (such as a GUI-based shell)
111 # are not a normal terminal (such as a GUI-based shell)
112 def __init__(self,cin=None,cout=None,cerr=None):
112 def __init__(self,cin=None,cout=None,cerr=None):
113 self.cin = IOStream(cin,sys.stdin)
113 self.cin = IOStream(cin,sys.stdin)
114 self.cout = IOStream(cout,sys.stdout)
114 self.cout = IOStream(cout,sys.stdout)
115 self.cerr = IOStream(cerr,sys.stderr)
115 self.cerr = IOStream(cerr,sys.stderr)
116
116
117 # Global variable to be used for all I/O
117 # Global variable to be used for all I/O
118 Term = IOTerm()
118 Term = IOTerm()
119
119
120 import IPython.rlineimpl as readline
120 import IPython.rlineimpl as readline
121 # Remake Term to use the readline i/o facilities
121 # Remake Term to use the readline i/o facilities
122 if sys.platform == 'win32' and readline.have_readline:
122 if sys.platform == 'win32' and readline.have_readline:
123
123
124 Term = IOTerm(cout=readline._outputfile,cerr=readline._outputfile)
124 Term = IOTerm(cout=readline._outputfile,cerr=readline._outputfile)
125
125
126
126
127 #****************************************************************************
127 #****************************************************************************
128 # Generic warning/error printer, used by everything else
128 # Generic warning/error printer, used by everything else
129 def warn(msg,level=2,exit_val=1):
129 def warn(msg,level=2,exit_val=1):
130 """Standard warning printer. Gives formatting consistency.
130 """Standard warning printer. Gives formatting consistency.
131
131
132 Output is sent to Term.cerr (sys.stderr by default).
132 Output is sent to Term.cerr (sys.stderr by default).
133
133
134 Options:
134 Options:
135
135
136 -level(2): allows finer control:
136 -level(2): allows finer control:
137 0 -> Do nothing, dummy function.
137 0 -> Do nothing, dummy function.
138 1 -> Print message.
138 1 -> Print message.
139 2 -> Print 'WARNING:' + message. (Default level).
139 2 -> Print 'WARNING:' + message. (Default level).
140 3 -> Print 'ERROR:' + message.
140 3 -> Print 'ERROR:' + message.
141 4 -> Print 'FATAL ERROR:' + message and trigger a sys.exit(exit_val).
141 4 -> Print 'FATAL ERROR:' + message and trigger a sys.exit(exit_val).
142
142
143 -exit_val (1): exit value returned by sys.exit() for a level 4
143 -exit_val (1): exit value returned by sys.exit() for a level 4
144 warning. Ignored for all other levels."""
144 warning. Ignored for all other levels."""
145
145
146 if level>0:
146 if level>0:
147 header = ['','','WARNING: ','ERROR: ','FATAL ERROR: ']
147 header = ['','','WARNING: ','ERROR: ','FATAL ERROR: ']
148 print >> Term.cerr, '%s%s' % (header[level],msg)
148 print >> Term.cerr, '%s%s' % (header[level],msg)
149 if level == 4:
149 if level == 4:
150 print >> Term.cerr,'Exiting.\n'
150 print >> Term.cerr,'Exiting.\n'
151 sys.exit(exit_val)
151 sys.exit(exit_val)
152
152
153 def info(msg):
153 def info(msg):
154 """Equivalent to warn(msg,level=1)."""
154 """Equivalent to warn(msg,level=1)."""
155
155
156 warn(msg,level=1)
156 warn(msg,level=1)
157
157
158 def error(msg):
158 def error(msg):
159 """Equivalent to warn(msg,level=3)."""
159 """Equivalent to warn(msg,level=3)."""
160
160
161 warn(msg,level=3)
161 warn(msg,level=3)
162
162
163 def fatal(msg,exit_val=1):
163 def fatal(msg,exit_val=1):
164 """Equivalent to warn(msg,exit_val=exit_val,level=4)."""
164 """Equivalent to warn(msg,exit_val=exit_val,level=4)."""
165
165
166 warn(msg,exit_val=exit_val,level=4)
166 warn(msg,exit_val=exit_val,level=4)
167
167
168 #---------------------------------------------------------------------------
168 #---------------------------------------------------------------------------
169 # Debugging routines
169 # Debugging routines
170 #
170 #
171 def debugx(expr,pre_msg=''):
171 def debugx(expr,pre_msg=''):
172 """Print the value of an expression from the caller's frame.
172 """Print the value of an expression from the caller's frame.
173
173
174 Takes an expression, evaluates it in the caller's frame and prints both
174 Takes an expression, evaluates it in the caller's frame and prints both
175 the given expression and the resulting value (as well as a debug mark
175 the given expression and the resulting value (as well as a debug mark
176 indicating the name of the calling function. The input must be of a form
176 indicating the name of the calling function. The input must be of a form
177 suitable for eval().
177 suitable for eval().
178
178
179 An optional message can be passed, which will be prepended to the printed
179 An optional message can be passed, which will be prepended to the printed
180 expr->value pair."""
180 expr->value pair."""
181
181
182 cf = sys._getframe(1)
182 cf = sys._getframe(1)
183 print '[DBG:%s] %s%s -> %r' % (cf.f_code.co_name,pre_msg,expr,
183 print '[DBG:%s] %s%s -> %r' % (cf.f_code.co_name,pre_msg,expr,
184 eval(expr,cf.f_globals,cf.f_locals))
184 eval(expr,cf.f_globals,cf.f_locals))
185
185
186 # deactivate it by uncommenting the following line, which makes it a no-op
186 # deactivate it by uncommenting the following line, which makes it a no-op
187 #def debugx(expr,pre_msg=''): pass
187 #def debugx(expr,pre_msg=''): pass
188
188
189 #----------------------------------------------------------------------------
189 #----------------------------------------------------------------------------
190 StringTypes = types.StringTypes
190 StringTypes = types.StringTypes
191
191
192 # Basic timing functionality
192 # Basic timing functionality
193
193
194 # If possible (Unix), use the resource module instead of time.clock()
194 # If possible (Unix), use the resource module instead of time.clock()
195 try:
195 try:
196 import resource
196 import resource
197 def clocku():
197 def clocku():
198 """clocku() -> floating point number
198 """clocku() -> floating point number
199
199
200 Return the *USER* CPU time in seconds since the start of the process.
200 Return the *USER* CPU time in seconds since the start of the process.
201 This is done via a call to resource.getrusage, so it avoids the
201 This is done via a call to resource.getrusage, so it avoids the
202 wraparound problems in time.clock()."""
202 wraparound problems in time.clock()."""
203
203
204 return resource.getrusage(resource.RUSAGE_SELF)[0]
204 return resource.getrusage(resource.RUSAGE_SELF)[0]
205
205
206 def clocks():
206 def clocks():
207 """clocks() -> floating point number
207 """clocks() -> floating point number
208
208
209 Return the *SYSTEM* CPU time in seconds since the start of the process.
209 Return the *SYSTEM* CPU time in seconds since the start of the process.
210 This is done via a call to resource.getrusage, so it avoids the
210 This is done via a call to resource.getrusage, so it avoids the
211 wraparound problems in time.clock()."""
211 wraparound problems in time.clock()."""
212
212
213 return resource.getrusage(resource.RUSAGE_SELF)[1]
213 return resource.getrusage(resource.RUSAGE_SELF)[1]
214
214
215 def clock():
215 def clock():
216 """clock() -> floating point number
216 """clock() -> floating point number
217
217
218 Return the *TOTAL USER+SYSTEM* CPU time in seconds since the start of
218 Return the *TOTAL USER+SYSTEM* CPU time in seconds since the start of
219 the process. This is done via a call to resource.getrusage, so it
219 the process. This is done via a call to resource.getrusage, so it
220 avoids the wraparound problems in time.clock()."""
220 avoids the wraparound problems in time.clock()."""
221
221
222 u,s = resource.getrusage(resource.RUSAGE_SELF)[:2]
222 u,s = resource.getrusage(resource.RUSAGE_SELF)[:2]
223 return u+s
223 return u+s
224
224
225 def clock2():
225 def clock2():
226 """clock2() -> (t_user,t_system)
226 """clock2() -> (t_user,t_system)
227
227
228 Similar to clock(), but return a tuple of user/system times."""
228 Similar to clock(), but return a tuple of user/system times."""
229 return resource.getrusage(resource.RUSAGE_SELF)[:2]
229 return resource.getrusage(resource.RUSAGE_SELF)[:2]
230
230
231 except ImportError:
231 except ImportError:
232 # There is no distinction of user/system time under windows, so we just use
232 # There is no distinction of user/system time under windows, so we just use
233 # time.clock() for everything...
233 # time.clock() for everything...
234 clocku = clocks = clock = time.clock
234 clocku = clocks = clock = time.clock
235 def clock2():
235 def clock2():
236 """Under windows, system CPU time can't be measured.
236 """Under windows, system CPU time can't be measured.
237
237
238 This just returns clock() and zero."""
238 This just returns clock() and zero."""
239 return time.clock(),0.0
239 return time.clock(),0.0
240
240
241 def timings_out(reps,func,*args,**kw):
241 def timings_out(reps,func,*args,**kw):
242 """timings_out(reps,func,*args,**kw) -> (t_total,t_per_call,output)
242 """timings_out(reps,func,*args,**kw) -> (t_total,t_per_call,output)
243
243
244 Execute a function reps times, return a tuple with the elapsed total
244 Execute a function reps times, return a tuple with the elapsed total
245 CPU time in seconds, the time per call and the function's output.
245 CPU time in seconds, the time per call and the function's output.
246
246
247 Under Unix, the return value is the sum of user+system time consumed by
247 Under Unix, the return value is the sum of user+system time consumed by
248 the process, computed via the resource module. This prevents problems
248 the process, computed via the resource module. This prevents problems
249 related to the wraparound effect which the time.clock() function has.
249 related to the wraparound effect which the time.clock() function has.
250
250
251 Under Windows the return value is in wall clock seconds. See the
251 Under Windows the return value is in wall clock seconds. See the
252 documentation for the time module for more details."""
252 documentation for the time module for more details."""
253
253
254 reps = int(reps)
254 reps = int(reps)
255 assert reps >=1, 'reps must be >= 1'
255 assert reps >=1, 'reps must be >= 1'
256 if reps==1:
256 if reps==1:
257 start = clock()
257 start = clock()
258 out = func(*args,**kw)
258 out = func(*args,**kw)
259 tot_time = clock()-start
259 tot_time = clock()-start
260 else:
260 else:
261 rng = xrange(reps-1) # the last time is executed separately to store output
261 rng = xrange(reps-1) # the last time is executed separately to store output
262 start = clock()
262 start = clock()
263 for dummy in rng: func(*args,**kw)
263 for dummy in rng: func(*args,**kw)
264 out = func(*args,**kw) # one last time
264 out = func(*args,**kw) # one last time
265 tot_time = clock()-start
265 tot_time = clock()-start
266 av_time = tot_time / reps
266 av_time = tot_time / reps
267 return tot_time,av_time,out
267 return tot_time,av_time,out
268
268
269 def timings(reps,func,*args,**kw):
269 def timings(reps,func,*args,**kw):
270 """timings(reps,func,*args,**kw) -> (t_total,t_per_call)
270 """timings(reps,func,*args,**kw) -> (t_total,t_per_call)
271
271
272 Execute a function reps times, return a tuple with the elapsed total CPU
272 Execute a function reps times, return a tuple with the elapsed total CPU
273 time in seconds and the time per call. These are just the first two values
273 time in seconds and the time per call. These are just the first two values
274 in timings_out()."""
274 in timings_out()."""
275
275
276 return timings_out(reps,func,*args,**kw)[0:2]
276 return timings_out(reps,func,*args,**kw)[0:2]
277
277
278 def timing(func,*args,**kw):
278 def timing(func,*args,**kw):
279 """timing(func,*args,**kw) -> t_total
279 """timing(func,*args,**kw) -> t_total
280
280
281 Execute a function once, return the elapsed total CPU time in
281 Execute a function once, return the elapsed total CPU time in
282 seconds. This is just the first value in timings_out()."""
282 seconds. This is just the first value in timings_out()."""
283
283
284 return timings_out(1,func,*args,**kw)[0]
284 return timings_out(1,func,*args,**kw)[0]
285
285
286 #****************************************************************************
286 #****************************************************************************
287 # file and system
287 # file and system
288
288
289 def arg_split(s,posix=False):
289 def arg_split(s,posix=False):
290 """Split a command line's arguments in a shell-like manner.
290 """Split a command line's arguments in a shell-like manner.
291
291
292 This is a modified version of the standard library's shlex.split()
292 This is a modified version of the standard library's shlex.split()
293 function, but with a default of posix=False for splitting, so that quotes
293 function, but with a default of posix=False for splitting, so that quotes
294 in inputs are respected."""
294 in inputs are respected."""
295
295
296 # XXX - there may be unicode-related problems here!!! I'm not sure that
296 # XXX - there may be unicode-related problems here!!! I'm not sure that
297 # shlex is truly unicode-safe, so it might be necessary to do
297 # shlex is truly unicode-safe, so it might be necessary to do
298 #
298 #
299 # s = s.encode(sys.stdin.encoding)
299 # s = s.encode(sys.stdin.encoding)
300 #
300 #
301 # first, to ensure that shlex gets a normal string. Input from anyone who
301 # first, to ensure that shlex gets a normal string. Input from anyone who
302 # knows more about unicode and shlex than I would be good to have here...
302 # knows more about unicode and shlex than I would be good to have here...
303 lex = shlex.shlex(s, posix=posix)
303 lex = shlex.shlex(s, posix=posix)
304 lex.whitespace_split = True
304 lex.whitespace_split = True
305 return list(lex)
305 return list(lex)
306
306
307 def system(cmd,verbose=0,debug=0,header=''):
307 def system(cmd,verbose=0,debug=0,header=''):
308 """Execute a system command, return its exit status.
308 """Execute a system command, return its exit status.
309
309
310 Options:
310 Options:
311
311
312 - verbose (0): print the command to be executed.
312 - verbose (0): print the command to be executed.
313
313
314 - debug (0): only print, do not actually execute.
314 - debug (0): only print, do not actually execute.
315
315
316 - header (''): Header to print on screen prior to the executed command (it
316 - header (''): Header to print on screen prior to the executed command (it
317 is only prepended to the command, no newlines are added).
317 is only prepended to the command, no newlines are added).
318
318
319 Note: a stateful version of this function is available through the
319 Note: a stateful version of this function is available through the
320 SystemExec class."""
320 SystemExec class."""
321
321
322 stat = 0
322 stat = 0
323 if verbose or debug: print header+cmd
323 if verbose or debug: print header+cmd
324 sys.stdout.flush()
324 sys.stdout.flush()
325 if not debug: stat = os.system(cmd)
325 if not debug: stat = os.system(cmd)
326 return stat
326 return stat
327
327
328 def abbrev_cwd():
328 def abbrev_cwd():
329 """ Return abbreviated version of cwd, e.g. d:mydir """
329 """ Return abbreviated version of cwd, e.g. d:mydir """
330 cwd = os.getcwd().replace('\\','/')
330 cwd = os.getcwd().replace('\\','/')
331 drivepart = ''
331 drivepart = ''
332 tail = cwd
332 tail = cwd
333 if sys.platform == 'win32':
333 if sys.platform == 'win32':
334 if len(cwd) < 4:
334 if len(cwd) < 4:
335 return cwd
335 return cwd
336 drivepart,tail = os.path.splitdrive(cwd)
336 drivepart,tail = os.path.splitdrive(cwd)
337
337
338
338
339 parts = tail.split('/')
339 parts = tail.split('/')
340 if len(parts) > 2:
340 if len(parts) > 2:
341 tail = '/'.join(parts[-2:])
341 tail = '/'.join(parts[-2:])
342
342
343 return (drivepart + (
343 return (drivepart + (
344 cwd == '/' and '/' or tail))
344 cwd == '/' and '/' or tail))
345
345
346
346
347 # This function is used by ipython in a lot of places to make system calls.
347 # This function is used by ipython in a lot of places to make system calls.
348 # We need it to be slightly different under win32, due to the vagaries of
348 # We need it to be slightly different under win32, due to the vagaries of
349 # 'network shares'. A win32 override is below.
349 # 'network shares'. A win32 override is below.
350
350
351 def shell(cmd,verbose=0,debug=0,header=''):
351 def shell(cmd,verbose=0,debug=0,header=''):
352 """Execute a command in the system shell, always return None.
352 """Execute a command in the system shell, always return None.
353
353
354 Options:
354 Options:
355
355
356 - verbose (0): print the command to be executed.
356 - verbose (0): print the command to be executed.
357
357
358 - debug (0): only print, do not actually execute.
358 - debug (0): only print, do not actually execute.
359
359
360 - header (''): Header to print on screen prior to the executed command (it
360 - header (''): Header to print on screen prior to the executed command (it
361 is only prepended to the command, no newlines are added).
361 is only prepended to the command, no newlines are added).
362
362
363 Note: this is similar to genutils.system(), but it returns None so it can
363 Note: this is similar to genutils.system(), but it returns None so it can
364 be conveniently used in interactive loops without getting the return value
364 be conveniently used in interactive loops without getting the return value
365 (typically 0) printed many times."""
365 (typically 0) printed many times."""
366
366
367 stat = 0
367 stat = 0
368 if verbose or debug: print header+cmd
368 if verbose or debug: print header+cmd
369 # flush stdout so we don't mangle python's buffering
369 # flush stdout so we don't mangle python's buffering
370 sys.stdout.flush()
370 sys.stdout.flush()
371
371
372 if not debug:
372 if not debug:
373 platutils.set_term_title("IPy " + cmd)
373 platutils.set_term_title("IPy " + cmd)
374 os.system(cmd)
374 os.system(cmd)
375 platutils.set_term_title("IPy " + abbrev_cwd())
375 platutils.set_term_title("IPy " + abbrev_cwd())
376
376
377 # override shell() for win32 to deal with network shares
377 # override shell() for win32 to deal with network shares
378 if os.name in ('nt','dos'):
378 if os.name in ('nt','dos'):
379
379
380 shell_ori = shell
380 shell_ori = shell
381
381
382 def shell(cmd,verbose=0,debug=0,header=''):
382 def shell(cmd,verbose=0,debug=0,header=''):
383 if os.getcwd().startswith(r"\\"):
383 if os.getcwd().startswith(r"\\"):
384 path = os.getcwd()
384 path = os.getcwd()
385 # change to c drive (cannot be on UNC-share when issuing os.system,
385 # change to c drive (cannot be on UNC-share when issuing os.system,
386 # as cmd.exe cannot handle UNC addresses)
386 # as cmd.exe cannot handle UNC addresses)
387 os.chdir("c:")
387 os.chdir("c:")
388 # issue pushd to the UNC-share and then run the command
388 # issue pushd to the UNC-share and then run the command
389 try:
389 try:
390 shell_ori('"pushd %s&&"'%path+cmd,verbose,debug,header)
390 shell_ori('"pushd %s&&"'%path+cmd,verbose,debug,header)
391 finally:
391 finally:
392 os.chdir(path)
392 os.chdir(path)
393 else:
393 else:
394 shell_ori(cmd,verbose,debug,header)
394 shell_ori(cmd,verbose,debug,header)
395
395
396 shell.__doc__ = shell_ori.__doc__
396 shell.__doc__ = shell_ori.__doc__
397
397
398 def getoutput(cmd,verbose=0,debug=0,header='',split=0):
398 def getoutput(cmd,verbose=0,debug=0,header='',split=0):
399 """Dummy substitute for perl's backquotes.
399 """Dummy substitute for perl's backquotes.
400
400
401 Executes a command and returns the output.
401 Executes a command and returns the output.
402
402
403 Accepts the same arguments as system(), plus:
403 Accepts the same arguments as system(), plus:
404
404
405 - split(0): if true, the output is returned as a list split on newlines.
405 - split(0): if true, the output is returned as a list split on newlines.
406
406
407 Note: a stateful version of this function is available through the
407 Note: a stateful version of this function is available through the
408 SystemExec class.
408 SystemExec class.
409
409
410 This is pretty much deprecated and rarely used,
410 This is pretty much deprecated and rarely used,
411 genutils.getoutputerror may be what you need.
411 genutils.getoutputerror may be what you need.
412
412
413 """
413 """
414
414
415 if verbose or debug: print header+cmd
415 if verbose or debug: print header+cmd
416 if not debug:
416 if not debug:
417 output = os.popen(cmd).read()
417 output = os.popen(cmd).read()
418 # stipping last \n is here for backwards compat.
418 # stipping last \n is here for backwards compat.
419 if output.endswith('\n'):
419 if output.endswith('\n'):
420 output = output[:-1]
420 output = output[:-1]
421 if split:
421 if split:
422 return output.split('\n')
422 return output.split('\n')
423 else:
423 else:
424 return output
424 return output
425
425
426 def getoutputerror(cmd,verbose=0,debug=0,header='',split=0):
426 def getoutputerror(cmd,verbose=0,debug=0,header='',split=0):
427 """Return (standard output,standard error) of executing cmd in a shell.
427 """Return (standard output,standard error) of executing cmd in a shell.
428
428
429 Accepts the same arguments as system(), plus:
429 Accepts the same arguments as system(), plus:
430
430
431 - split(0): if true, each of stdout/err is returned as a list split on
431 - split(0): if true, each of stdout/err is returned as a list split on
432 newlines.
432 newlines.
433
433
434 Note: a stateful version of this function is available through the
434 Note: a stateful version of this function is available through the
435 SystemExec class."""
435 SystemExec class."""
436
436
437 if verbose or debug: print header+cmd
437 if verbose or debug: print header+cmd
438 if not cmd:
438 if not cmd:
439 if split:
439 if split:
440 return [],[]
440 return [],[]
441 else:
441 else:
442 return '',''
442 return '',''
443 if not debug:
443 if not debug:
444 pin,pout,perr = os.popen3(cmd)
444 pin,pout,perr = os.popen3(cmd)
445 tout = pout.read().rstrip()
445 tout = pout.read().rstrip()
446 terr = perr.read().rstrip()
446 terr = perr.read().rstrip()
447 pin.close()
447 pin.close()
448 pout.close()
448 pout.close()
449 perr.close()
449 perr.close()
450 if split:
450 if split:
451 return tout.split('\n'),terr.split('\n')
451 return tout.split('\n'),terr.split('\n')
452 else:
452 else:
453 return tout,terr
453 return tout,terr
454
454
455 # for compatibility with older naming conventions
455 # for compatibility with older naming conventions
456 xsys = system
456 xsys = system
457 bq = getoutput
457 bq = getoutput
458
458
459 class SystemExec:
459 class SystemExec:
460 """Access the system and getoutput functions through a stateful interface.
460 """Access the system and getoutput functions through a stateful interface.
461
461
462 Note: here we refer to the system and getoutput functions from this
462 Note: here we refer to the system and getoutput functions from this
463 library, not the ones from the standard python library.
463 library, not the ones from the standard python library.
464
464
465 This class offers the system and getoutput functions as methods, but the
465 This class offers the system and getoutput functions as methods, but the
466 verbose, debug and header parameters can be set for the instance (at
466 verbose, debug and header parameters can be set for the instance (at
467 creation time or later) so that they don't need to be specified on each
467 creation time or later) so that they don't need to be specified on each
468 call.
468 call.
469
469
470 For efficiency reasons, there's no way to override the parameters on a
470 For efficiency reasons, there's no way to override the parameters on a
471 per-call basis other than by setting instance attributes. If you need
471 per-call basis other than by setting instance attributes. If you need
472 local overrides, it's best to directly call system() or getoutput().
472 local overrides, it's best to directly call system() or getoutput().
473
473
474 The following names are provided as alternate options:
474 The following names are provided as alternate options:
475 - xsys: alias to system
475 - xsys: alias to system
476 - bq: alias to getoutput
476 - bq: alias to getoutput
477
477
478 An instance can then be created as:
478 An instance can then be created as:
479 >>> sysexec = SystemExec(verbose=1,debug=0,header='Calling: ')
479 >>> sysexec = SystemExec(verbose=1,debug=0,header='Calling: ')
480
480
481 And used as:
481 And used as:
482 >>> sysexec.xsys('pwd')
482 >>> sysexec.xsys('pwd')
483 >>> dirlist = sysexec.bq('ls -l')
483 >>> dirlist = sysexec.bq('ls -l')
484 """
484 """
485
485
486 def __init__(self,verbose=0,debug=0,header='',split=0):
486 def __init__(self,verbose=0,debug=0,header='',split=0):
487 """Specify the instance's values for verbose, debug and header."""
487 """Specify the instance's values for verbose, debug and header."""
488 setattr_list(self,'verbose debug header split')
488 setattr_list(self,'verbose debug header split')
489
489
490 def system(self,cmd):
490 def system(self,cmd):
491 """Stateful interface to system(), with the same keyword parameters."""
491 """Stateful interface to system(), with the same keyword parameters."""
492
492
493 system(cmd,self.verbose,self.debug,self.header)
493 system(cmd,self.verbose,self.debug,self.header)
494
494
495 def shell(self,cmd):
495 def shell(self,cmd):
496 """Stateful interface to shell(), with the same keyword parameters."""
496 """Stateful interface to shell(), with the same keyword parameters."""
497
497
498 shell(cmd,self.verbose,self.debug,self.header)
498 shell(cmd,self.verbose,self.debug,self.header)
499
499
500 xsys = system # alias
500 xsys = system # alias
501
501
502 def getoutput(self,cmd):
502 def getoutput(self,cmd):
503 """Stateful interface to getoutput()."""
503 """Stateful interface to getoutput()."""
504
504
505 return getoutput(cmd,self.verbose,self.debug,self.header,self.split)
505 return getoutput(cmd,self.verbose,self.debug,self.header,self.split)
506
506
507 def getoutputerror(self,cmd):
507 def getoutputerror(self,cmd):
508 """Stateful interface to getoutputerror()."""
508 """Stateful interface to getoutputerror()."""
509
509
510 return getoutputerror(cmd,self.verbose,self.debug,self.header,self.split)
510 return getoutputerror(cmd,self.verbose,self.debug,self.header,self.split)
511
511
512 bq = getoutput # alias
512 bq = getoutput # alias
513
513
514 #-----------------------------------------------------------------------------
514 #-----------------------------------------------------------------------------
515 def mutex_opts(dict,ex_op):
515 def mutex_opts(dict,ex_op):
516 """Check for presence of mutually exclusive keys in a dict.
516 """Check for presence of mutually exclusive keys in a dict.
517
517
518 Call: mutex_opts(dict,[[op1a,op1b],[op2a,op2b]...]"""
518 Call: mutex_opts(dict,[[op1a,op1b],[op2a,op2b]...]"""
519 for op1,op2 in ex_op:
519 for op1,op2 in ex_op:
520 if op1 in dict and op2 in dict:
520 if op1 in dict and op2 in dict:
521 raise ValueError,'\n*** ERROR in Arguments *** '\
521 raise ValueError,'\n*** ERROR in Arguments *** '\
522 'Options '+op1+' and '+op2+' are mutually exclusive.'
522 'Options '+op1+' and '+op2+' are mutually exclusive.'
523
523
524 #-----------------------------------------------------------------------------
524 #-----------------------------------------------------------------------------
525 def get_py_filename(name):
525 def get_py_filename(name):
526 """Return a valid python filename in the current directory.
526 """Return a valid python filename in the current directory.
527
527
528 If the given name is not a file, it adds '.py' and searches again.
528 If the given name is not a file, it adds '.py' and searches again.
529 Raises IOError with an informative message if the file isn't found."""
529 Raises IOError with an informative message if the file isn't found."""
530
530
531 name = os.path.expanduser(name)
531 name = os.path.expanduser(name)
532 if not os.path.isfile(name) and not name.endswith('.py'):
532 if not os.path.isfile(name) and not name.endswith('.py'):
533 name += '.py'
533 name += '.py'
534 if os.path.isfile(name):
534 if os.path.isfile(name):
535 return name
535 return name
536 else:
536 else:
537 raise IOError,'File `%s` not found.' % name
537 raise IOError,'File `%s` not found.' % name
538
538
539 #-----------------------------------------------------------------------------
539 #-----------------------------------------------------------------------------
540 def filefind(fname,alt_dirs = None):
540 def filefind(fname,alt_dirs = None):
541 """Return the given filename either in the current directory, if it
541 """Return the given filename either in the current directory, if it
542 exists, or in a specified list of directories.
542 exists, or in a specified list of directories.
543
543
544 ~ expansion is done on all file and directory names.
544 ~ expansion is done on all file and directory names.
545
545
546 Upon an unsuccessful search, raise an IOError exception."""
546 Upon an unsuccessful search, raise an IOError exception."""
547
547
548 if alt_dirs is None:
548 if alt_dirs is None:
549 try:
549 try:
550 alt_dirs = get_home_dir()
550 alt_dirs = get_home_dir()
551 except HomeDirError:
551 except HomeDirError:
552 alt_dirs = os.getcwd()
552 alt_dirs = os.getcwd()
553 search = [fname] + list_strings(alt_dirs)
553 search = [fname] + list_strings(alt_dirs)
554 search = map(os.path.expanduser,search)
554 search = map(os.path.expanduser,search)
555 #print 'search list for',fname,'list:',search # dbg
555 #print 'search list for',fname,'list:',search # dbg
556 fname = search[0]
556 fname = search[0]
557 if os.path.isfile(fname):
557 if os.path.isfile(fname):
558 return fname
558 return fname
559 for direc in search[1:]:
559 for direc in search[1:]:
560 testname = os.path.join(direc,fname)
560 testname = os.path.join(direc,fname)
561 #print 'testname',testname # dbg
561 #print 'testname',testname # dbg
562 if os.path.isfile(testname):
562 if os.path.isfile(testname):
563 return testname
563 return testname
564 raise IOError,'File' + `fname` + \
564 raise IOError,'File' + `fname` + \
565 ' not found in current or supplied directories:' + `alt_dirs`
565 ' not found in current or supplied directories:' + `alt_dirs`
566
566
567 #----------------------------------------------------------------------------
567 #----------------------------------------------------------------------------
568 def file_read(filename):
568 def file_read(filename):
569 """Read a file and close it. Returns the file source."""
569 """Read a file and close it. Returns the file source."""
570 fobj = open(filename,'r');
570 fobj = open(filename,'r');
571 source = fobj.read();
571 source = fobj.read();
572 fobj.close()
572 fobj.close()
573 return source
573 return source
574
574
575 def file_readlines(filename):
575 def file_readlines(filename):
576 """Read a file and close it. Returns the file source using readlines()."""
576 """Read a file and close it. Returns the file source using readlines()."""
577 fobj = open(filename,'r');
577 fobj = open(filename,'r');
578 lines = fobj.readlines();
578 lines = fobj.readlines();
579 fobj.close()
579 fobj.close()
580 return lines
580 return lines
581
581
582 #----------------------------------------------------------------------------
582 #----------------------------------------------------------------------------
583 def target_outdated(target,deps):
583 def target_outdated(target,deps):
584 """Determine whether a target is out of date.
584 """Determine whether a target is out of date.
585
585
586 target_outdated(target,deps) -> 1/0
586 target_outdated(target,deps) -> 1/0
587
587
588 deps: list of filenames which MUST exist.
588 deps: list of filenames which MUST exist.
589 target: single filename which may or may not exist.
589 target: single filename which may or may not exist.
590
590
591 If target doesn't exist or is older than any file listed in deps, return
591 If target doesn't exist or is older than any file listed in deps, return
592 true, otherwise return false.
592 true, otherwise return false.
593 """
593 """
594 try:
594 try:
595 target_time = os.path.getmtime(target)
595 target_time = os.path.getmtime(target)
596 except os.error:
596 except os.error:
597 return 1
597 return 1
598 for dep in deps:
598 for dep in deps:
599 dep_time = os.path.getmtime(dep)
599 dep_time = os.path.getmtime(dep)
600 if dep_time > target_time:
600 if dep_time > target_time:
601 #print "For target",target,"Dep failed:",dep # dbg
601 #print "For target",target,"Dep failed:",dep # dbg
602 #print "times (dep,tar):",dep_time,target_time # dbg
602 #print "times (dep,tar):",dep_time,target_time # dbg
603 return 1
603 return 1
604 return 0
604 return 0
605
605
606 #-----------------------------------------------------------------------------
606 #-----------------------------------------------------------------------------
607 def target_update(target,deps,cmd):
607 def target_update(target,deps,cmd):
608 """Update a target with a given command given a list of dependencies.
608 """Update a target with a given command given a list of dependencies.
609
609
610 target_update(target,deps,cmd) -> runs cmd if target is outdated.
610 target_update(target,deps,cmd) -> runs cmd if target is outdated.
611
611
612 This is just a wrapper around target_outdated() which calls the given
612 This is just a wrapper around target_outdated() which calls the given
613 command if target is outdated."""
613 command if target is outdated."""
614
614
615 if target_outdated(target,deps):
615 if target_outdated(target,deps):
616 xsys(cmd)
616 xsys(cmd)
617
617
618 #----------------------------------------------------------------------------
618 #----------------------------------------------------------------------------
619 def unquote_ends(istr):
619 def unquote_ends(istr):
620 """Remove a single pair of quotes from the endpoints of a string."""
620 """Remove a single pair of quotes from the endpoints of a string."""
621
621
622 if not istr:
622 if not istr:
623 return istr
623 return istr
624 if (istr[0]=="'" and istr[-1]=="'") or \
624 if (istr[0]=="'" and istr[-1]=="'") or \
625 (istr[0]=='"' and istr[-1]=='"'):
625 (istr[0]=='"' and istr[-1]=='"'):
626 return istr[1:-1]
626 return istr[1:-1]
627 else:
627 else:
628 return istr
628 return istr
629
629
630 #----------------------------------------------------------------------------
630 #----------------------------------------------------------------------------
631 def process_cmdline(argv,names=[],defaults={},usage=''):
631 def process_cmdline(argv,names=[],defaults={},usage=''):
632 """ Process command-line options and arguments.
632 """ Process command-line options and arguments.
633
633
634 Arguments:
634 Arguments:
635
635
636 - argv: list of arguments, typically sys.argv.
636 - argv: list of arguments, typically sys.argv.
637
637
638 - names: list of option names. See DPyGetOpt docs for details on options
638 - names: list of option names. See DPyGetOpt docs for details on options
639 syntax.
639 syntax.
640
640
641 - defaults: dict of default values.
641 - defaults: dict of default values.
642
642
643 - usage: optional usage notice to print if a wrong argument is passed.
643 - usage: optional usage notice to print if a wrong argument is passed.
644
644
645 Return a dict of options and a list of free arguments."""
645 Return a dict of options and a list of free arguments."""
646
646
647 getopt = DPyGetOpt.DPyGetOpt()
647 getopt = DPyGetOpt.DPyGetOpt()
648 getopt.setIgnoreCase(0)
648 getopt.setIgnoreCase(0)
649 getopt.parseConfiguration(names)
649 getopt.parseConfiguration(names)
650
650
651 try:
651 try:
652 getopt.processArguments(argv)
652 getopt.processArguments(argv)
653 except DPyGetOpt.ArgumentError, exc:
653 except DPyGetOpt.ArgumentError, exc:
654 print usage
654 print usage
655 warn('"%s"' % exc,level=4)
655 warn('"%s"' % exc,level=4)
656
656
657 defaults.update(getopt.optionValues)
657 defaults.update(getopt.optionValues)
658 args = getopt.freeValues
658 args = getopt.freeValues
659
659
660 return defaults,args
660 return defaults,args
661
661
662 #----------------------------------------------------------------------------
662 #----------------------------------------------------------------------------
663 def optstr2types(ostr):
663 def optstr2types(ostr):
664 """Convert a string of option names to a dict of type mappings.
664 """Convert a string of option names to a dict of type mappings.
665
665
666 optstr2types(str) -> {None:'string_opts',int:'int_opts',float:'float_opts'}
666 optstr2types(str) -> {None:'string_opts',int:'int_opts',float:'float_opts'}
667
667
668 This is used to get the types of all the options in a string formatted
668 This is used to get the types of all the options in a string formatted
669 with the conventions of DPyGetOpt. The 'type' None is used for options
669 with the conventions of DPyGetOpt. The 'type' None is used for options
670 which are strings (they need no further conversion). This function's main
670 which are strings (they need no further conversion). This function's main
671 use is to get a typemap for use with read_dict().
671 use is to get a typemap for use with read_dict().
672 """
672 """
673
673
674 typeconv = {None:'',int:'',float:''}
674 typeconv = {None:'',int:'',float:''}
675 typemap = {'s':None,'i':int,'f':float}
675 typemap = {'s':None,'i':int,'f':float}
676 opt_re = re.compile(r'([\w]*)([^:=]*:?=?)([sif]?)')
676 opt_re = re.compile(r'([\w]*)([^:=]*:?=?)([sif]?)')
677
677
678 for w in ostr.split():
678 for w in ostr.split():
679 oname,alias,otype = opt_re.match(w).groups()
679 oname,alias,otype = opt_re.match(w).groups()
680 if otype == '' or alias == '!': # simple switches are integers too
680 if otype == '' or alias == '!': # simple switches are integers too
681 otype = 'i'
681 otype = 'i'
682 typeconv[typemap[otype]] += oname + ' '
682 typeconv[typemap[otype]] += oname + ' '
683 return typeconv
683 return typeconv
684
684
685 #----------------------------------------------------------------------------
685 #----------------------------------------------------------------------------
686 def read_dict(filename,type_conv=None,**opt):
686 def read_dict(filename,type_conv=None,**opt):
687
687
688 """Read a dictionary of key=value pairs from an input file, optionally
688 """Read a dictionary of key=value pairs from an input file, optionally
689 performing conversions on the resulting values.
689 performing conversions on the resulting values.
690
690
691 read_dict(filename,type_conv,**opt) -> dict
691 read_dict(filename,type_conv,**opt) -> dict
692
692
693 Only one value per line is accepted, the format should be
693 Only one value per line is accepted, the format should be
694 # optional comments are ignored
694 # optional comments are ignored
695 key value\n
695 key value\n
696
696
697 Args:
697 Args:
698
698
699 - type_conv: A dictionary specifying which keys need to be converted to
699 - type_conv: A dictionary specifying which keys need to be converted to
700 which types. By default all keys are read as strings. This dictionary
700 which types. By default all keys are read as strings. This dictionary
701 should have as its keys valid conversion functions for strings
701 should have as its keys valid conversion functions for strings
702 (int,long,float,complex, or your own). The value for each key
702 (int,long,float,complex, or your own). The value for each key
703 (converter) should be a whitespace separated string containing the names
703 (converter) should be a whitespace separated string containing the names
704 of all the entries in the file to be converted using that function. For
704 of all the entries in the file to be converted using that function. For
705 keys to be left alone, use None as the conversion function (only needed
705 keys to be left alone, use None as the conversion function (only needed
706 with purge=1, see below).
706 with purge=1, see below).
707
707
708 - opt: dictionary with extra options as below (default in parens)
708 - opt: dictionary with extra options as below (default in parens)
709
709
710 purge(0): if set to 1, all keys *not* listed in type_conv are purged out
710 purge(0): if set to 1, all keys *not* listed in type_conv are purged out
711 of the dictionary to be returned. If purge is going to be used, the
711 of the dictionary to be returned. If purge is going to be used, the
712 set of keys to be left as strings also has to be explicitly specified
712 set of keys to be left as strings also has to be explicitly specified
713 using the (non-existent) conversion function None.
713 using the (non-existent) conversion function None.
714
714
715 fs(None): field separator. This is the key/value separator to be used
715 fs(None): field separator. This is the key/value separator to be used
716 when parsing the file. The None default means any whitespace [behavior
716 when parsing the file. The None default means any whitespace [behavior
717 of string.split()].
717 of string.split()].
718
718
719 strip(0): if 1, strip string values of leading/trailinig whitespace.
719 strip(0): if 1, strip string values of leading/trailinig whitespace.
720
720
721 warn(1): warning level if requested keys are not found in file.
721 warn(1): warning level if requested keys are not found in file.
722 - 0: silently ignore.
722 - 0: silently ignore.
723 - 1: inform but proceed.
723 - 1: inform but proceed.
724 - 2: raise KeyError exception.
724 - 2: raise KeyError exception.
725
725
726 no_empty(0): if 1, remove keys with whitespace strings as a value.
726 no_empty(0): if 1, remove keys with whitespace strings as a value.
727
727
728 unique([]): list of keys (or space separated string) which can't be
728 unique([]): list of keys (or space separated string) which can't be
729 repeated. If one such key is found in the file, each new instance
729 repeated. If one such key is found in the file, each new instance
730 overwrites the previous one. For keys not listed here, the behavior is
730 overwrites the previous one. For keys not listed here, the behavior is
731 to make a list of all appearances.
731 to make a list of all appearances.
732
732
733 Example:
733 Example:
734 If the input file test.ini has:
734 If the input file test.ini has:
735 i 3
735 i 3
736 x 4.5
736 x 4.5
737 y 5.5
737 y 5.5
738 s hi ho
738 s hi ho
739 Then:
739 Then:
740
740
741 >>> type_conv={int:'i',float:'x',None:'s'}
741 >>> type_conv={int:'i',float:'x',None:'s'}
742 >>> read_dict('test.ini')
742 >>> read_dict('test.ini')
743 {'i': '3', 's': 'hi ho', 'x': '4.5', 'y': '5.5'}
743 {'i': '3', 's': 'hi ho', 'x': '4.5', 'y': '5.5'}
744 >>> read_dict('test.ini',type_conv)
744 >>> read_dict('test.ini',type_conv)
745 {'i': 3, 's': 'hi ho', 'x': 4.5, 'y': '5.5'}
745 {'i': 3, 's': 'hi ho', 'x': 4.5, 'y': '5.5'}
746 >>> read_dict('test.ini',type_conv,purge=1)
746 >>> read_dict('test.ini',type_conv,purge=1)
747 {'i': 3, 's': 'hi ho', 'x': 4.5}
747 {'i': 3, 's': 'hi ho', 'x': 4.5}
748 """
748 """
749
749
750 # starting config
750 # starting config
751 opt.setdefault('purge',0)
751 opt.setdefault('purge',0)
752 opt.setdefault('fs',None) # field sep defaults to any whitespace
752 opt.setdefault('fs',None) # field sep defaults to any whitespace
753 opt.setdefault('strip',0)
753 opt.setdefault('strip',0)
754 opt.setdefault('warn',1)
754 opt.setdefault('warn',1)
755 opt.setdefault('no_empty',0)
755 opt.setdefault('no_empty',0)
756 opt.setdefault('unique','')
756 opt.setdefault('unique','')
757 if type(opt['unique']) in StringTypes:
757 if type(opt['unique']) in StringTypes:
758 unique_keys = qw(opt['unique'])
758 unique_keys = qw(opt['unique'])
759 elif type(opt['unique']) in (types.TupleType,types.ListType):
759 elif type(opt['unique']) in (types.TupleType,types.ListType):
760 unique_keys = opt['unique']
760 unique_keys = opt['unique']
761 else:
761 else:
762 raise ValueError, 'Unique keys must be given as a string, List or Tuple'
762 raise ValueError, 'Unique keys must be given as a string, List or Tuple'
763
763
764 dict = {}
764 dict = {}
765 # first read in table of values as strings
765 # first read in table of values as strings
766 file = open(filename,'r')
766 file = open(filename,'r')
767 for line in file.readlines():
767 for line in file.readlines():
768 line = line.strip()
768 line = line.strip()
769 if len(line) and line[0]=='#': continue
769 if len(line) and line[0]=='#': continue
770 if len(line)>0:
770 if len(line)>0:
771 lsplit = line.split(opt['fs'],1)
771 lsplit = line.split(opt['fs'],1)
772 try:
772 try:
773 key,val = lsplit
773 key,val = lsplit
774 except ValueError:
774 except ValueError:
775 key,val = lsplit[0],''
775 key,val = lsplit[0],''
776 key = key.strip()
776 key = key.strip()
777 if opt['strip']: val = val.strip()
777 if opt['strip']: val = val.strip()
778 if val == "''" or val == '""': val = ''
778 if val == "''" or val == '""': val = ''
779 if opt['no_empty'] and (val=='' or val.isspace()):
779 if opt['no_empty'] and (val=='' or val.isspace()):
780 continue
780 continue
781 # if a key is found more than once in the file, build a list
781 # if a key is found more than once in the file, build a list
782 # unless it's in the 'unique' list. In that case, last found in file
782 # unless it's in the 'unique' list. In that case, last found in file
783 # takes precedence. User beware.
783 # takes precedence. User beware.
784 try:
784 try:
785 if dict[key] and key in unique_keys:
785 if dict[key] and key in unique_keys:
786 dict[key] = val
786 dict[key] = val
787 elif type(dict[key]) is types.ListType:
787 elif type(dict[key]) is types.ListType:
788 dict[key].append(val)
788 dict[key].append(val)
789 else:
789 else:
790 dict[key] = [dict[key],val]
790 dict[key] = [dict[key],val]
791 except KeyError:
791 except KeyError:
792 dict[key] = val
792 dict[key] = val
793 # purge if requested
793 # purge if requested
794 if opt['purge']:
794 if opt['purge']:
795 accepted_keys = qwflat(type_conv.values())
795 accepted_keys = qwflat(type_conv.values())
796 for key in dict.keys():
796 for key in dict.keys():
797 if key in accepted_keys: continue
797 if key in accepted_keys: continue
798 del(dict[key])
798 del(dict[key])
799 # now convert if requested
799 # now convert if requested
800 if type_conv==None: return dict
800 if type_conv==None: return dict
801 conversions = type_conv.keys()
801 conversions = type_conv.keys()
802 try: conversions.remove(None)
802 try: conversions.remove(None)
803 except: pass
803 except: pass
804 for convert in conversions:
804 for convert in conversions:
805 for val in qw(type_conv[convert]):
805 for val in qw(type_conv[convert]):
806 try:
806 try:
807 dict[val] = convert(dict[val])
807 dict[val] = convert(dict[val])
808 except KeyError,e:
808 except KeyError,e:
809 if opt['warn'] == 0:
809 if opt['warn'] == 0:
810 pass
810 pass
811 elif opt['warn'] == 1:
811 elif opt['warn'] == 1:
812 print >>sys.stderr, 'Warning: key',val,\
812 print >>sys.stderr, 'Warning: key',val,\
813 'not found in file',filename
813 'not found in file',filename
814 elif opt['warn'] == 2:
814 elif opt['warn'] == 2:
815 raise KeyError,e
815 raise KeyError,e
816 else:
816 else:
817 raise ValueError,'Warning level must be 0,1 or 2'
817 raise ValueError,'Warning level must be 0,1 or 2'
818
818
819 return dict
819 return dict
820
820
821 #----------------------------------------------------------------------------
821 #----------------------------------------------------------------------------
822 def flag_calls(func):
822 def flag_calls(func):
823 """Wrap a function to detect and flag when it gets called.
823 """Wrap a function to detect and flag when it gets called.
824
824
825 This is a decorator which takes a function and wraps it in a function with
825 This is a decorator which takes a function and wraps it in a function with
826 a 'called' attribute. wrapper.called is initialized to False.
826 a 'called' attribute. wrapper.called is initialized to False.
827
827
828 The wrapper.called attribute is set to False right before each call to the
828 The wrapper.called attribute is set to False right before each call to the
829 wrapped function, so if the call fails it remains False. After the call
829 wrapped function, so if the call fails it remains False. After the call
830 completes, wrapper.called is set to True and the output is returned.
830 completes, wrapper.called is set to True and the output is returned.
831
831
832 Testing for truth in wrapper.called allows you to determine if a call to
832 Testing for truth in wrapper.called allows you to determine if a call to
833 func() was attempted and succeeded."""
833 func() was attempted and succeeded."""
834
834
835 def wrapper(*args,**kw):
835 def wrapper(*args,**kw):
836 wrapper.called = False
836 wrapper.called = False
837 out = func(*args,**kw)
837 out = func(*args,**kw)
838 wrapper.called = True
838 wrapper.called = True
839 return out
839 return out
840
840
841 wrapper.called = False
841 wrapper.called = False
842 wrapper.__doc__ = func.__doc__
842 wrapper.__doc__ = func.__doc__
843 return wrapper
843 return wrapper
844
844
845 #----------------------------------------------------------------------------
845 #----------------------------------------------------------------------------
846 def dhook_wrap(func,*a,**k):
846 def dhook_wrap(func,*a,**k):
847 """Wrap a function call in a sys.displayhook controller.
847 """Wrap a function call in a sys.displayhook controller.
848
848
849 Returns a wrapper around func which calls func, with all its arguments and
849 Returns a wrapper around func which calls func, with all its arguments and
850 keywords unmodified, using the default sys.displayhook. Since IPython
850 keywords unmodified, using the default sys.displayhook. Since IPython
851 modifies sys.displayhook, it breaks the behavior of certain systems that
851 modifies sys.displayhook, it breaks the behavior of certain systems that
852 rely on the default behavior, notably doctest.
852 rely on the default behavior, notably doctest.
853 """
853 """
854
854
855 def f(*a,**k):
855 def f(*a,**k):
856
856
857 dhook_s = sys.displayhook
857 dhook_s = sys.displayhook
858 sys.displayhook = sys.__displayhook__
858 sys.displayhook = sys.__displayhook__
859 try:
859 try:
860 out = func(*a,**k)
860 out = func(*a,**k)
861 finally:
861 finally:
862 sys.displayhook = dhook_s
862 sys.displayhook = dhook_s
863
863
864 return out
864 return out
865
865
866 f.__doc__ = func.__doc__
866 f.__doc__ = func.__doc__
867 return f
867 return f
868
868
869 #----------------------------------------------------------------------------
869 #----------------------------------------------------------------------------
870 def doctest_reload():
870 def doctest_reload():
871 """Properly reload doctest to reuse it interactively.
871 """Properly reload doctest to reuse it interactively.
872
872
873 This routine:
873 This routine:
874
874
875 - reloads doctest
875 - reloads doctest
876
876
877 - resets its global 'master' attribute to None, so that multiple uses of
877 - resets its global 'master' attribute to None, so that multiple uses of
878 the module interactively don't produce cumulative reports.
878 the module interactively don't produce cumulative reports.
879
879
880 - Monkeypatches its core test runner method to protect it from IPython's
880 - Monkeypatches its core test runner method to protect it from IPython's
881 modified displayhook. Doctest expects the default displayhook behavior
881 modified displayhook. Doctest expects the default displayhook behavior
882 deep down, so our modification breaks it completely. For this reason, a
882 deep down, so our modification breaks it completely. For this reason, a
883 hard monkeypatch seems like a reasonable solution rather than asking
883 hard monkeypatch seems like a reasonable solution rather than asking
884 users to manually use a different doctest runner when under IPython."""
884 users to manually use a different doctest runner when under IPython."""
885
885
886 import doctest
886 import doctest
887 reload(doctest)
887 reload(doctest)
888 doctest.master=None
888 doctest.master=None
889
889
890 try:
890 try:
891 doctest.DocTestRunner
891 doctest.DocTestRunner
892 except AttributeError:
892 except AttributeError:
893 # This is only for python 2.3 compatibility, remove once we move to
893 # This is only for python 2.3 compatibility, remove once we move to
894 # 2.4 only.
894 # 2.4 only.
895 pass
895 pass
896 else:
896 else:
897 doctest.DocTestRunner.run = dhook_wrap(doctest.DocTestRunner.run)
897 doctest.DocTestRunner.run = dhook_wrap(doctest.DocTestRunner.run)
898
898
899 #----------------------------------------------------------------------------
899 #----------------------------------------------------------------------------
900 class HomeDirError(Error):
900 class HomeDirError(Error):
901 pass
901 pass
902
902
903 def get_home_dir():
903 def get_home_dir():
904 """Return the closest possible equivalent to a 'home' directory.
904 """Return the closest possible equivalent to a 'home' directory.
905
905
906 We first try $HOME. Absent that, on NT it's $HOMEDRIVE\$HOMEPATH.
906 We first try $HOME. Absent that, on NT it's $HOMEDRIVE\$HOMEPATH.
907
907
908 Currently only Posix and NT are implemented, a HomeDirError exception is
908 Currently only Posix and NT are implemented, a HomeDirError exception is
909 raised for all other OSes. """
909 raised for all other OSes. """
910
910
911 isdir = os.path.isdir
911 isdir = os.path.isdir
912 env = os.environ
912 env = os.environ
913
913
914 # first, check py2exe distribution root directory for _ipython.
914 # first, check py2exe distribution root directory for _ipython.
915 # This overrides all. Normally does not exist.
915 # This overrides all. Normally does not exist.
916
916
917 if '\\library.zip\\' in IPython.__file__.lower():
917 if '\\library.zip\\' in IPython.__file__.lower():
918 root, rest = IPython.__file__.lower().split('library.zip')
918 root, rest = IPython.__file__.lower().split('library.zip')
919 if isdir(root + '_ipython'):
919 if isdir(root + '_ipython'):
920 os.environ["IPYKITROOT"] = root.rstrip('\\')
920 os.environ["IPYKITROOT"] = root.rstrip('\\')
921 return root
921 return root
922
922
923 try:
923 try:
924 homedir = env['HOME']
924 homedir = env['HOME']
925 if not isdir(homedir):
925 if not isdir(homedir):
926 # in case a user stuck some string which does NOT resolve to a
926 # in case a user stuck some string which does NOT resolve to a
927 # valid path, it's as good as if we hadn't foud it
927 # valid path, it's as good as if we hadn't foud it
928 raise KeyError
928 raise KeyError
929 return homedir
929 return homedir
930 except KeyError:
930 except KeyError:
931 if os.name == 'posix':
931 if os.name == 'posix':
932 raise HomeDirError,'undefined $HOME, IPython can not proceed.'
932 raise HomeDirError,'undefined $HOME, IPython can not proceed.'
933 elif os.name == 'nt':
933 elif os.name == 'nt':
934 # For some strange reason, win9x returns 'nt' for os.name.
934 # For some strange reason, win9x returns 'nt' for os.name.
935 try:
935 try:
936 homedir = os.path.join(env['HOMEDRIVE'],env['HOMEPATH'])
936 homedir = os.path.join(env['HOMEDRIVE'],env['HOMEPATH'])
937 if not isdir(homedir):
937 if not isdir(homedir):
938 homedir = os.path.join(env['USERPROFILE'])
938 homedir = os.path.join(env['USERPROFILE'])
939 if not isdir(homedir):
939 if not isdir(homedir):
940 raise HomeDirError
940 raise HomeDirError
941 return homedir
941 return homedir
942 except:
942 except:
943 try:
943 try:
944 # Use the registry to get the 'My Documents' folder.
944 # Use the registry to get the 'My Documents' folder.
945 import _winreg as wreg
945 import _winreg as wreg
946 key = wreg.OpenKey(wreg.HKEY_CURRENT_USER,
946 key = wreg.OpenKey(wreg.HKEY_CURRENT_USER,
947 "Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders")
947 "Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders")
948 homedir = wreg.QueryValueEx(key,'Personal')[0]
948 homedir = wreg.QueryValueEx(key,'Personal')[0]
949 key.Close()
949 key.Close()
950 if not isdir(homedir):
950 if not isdir(homedir):
951 e = ('Invalid "Personal" folder registry key '
951 e = ('Invalid "Personal" folder registry key '
952 'typically "My Documents".\n'
952 'typically "My Documents".\n'
953 'Value: %s\n'
953 'Value: %s\n'
954 'This is not a valid directory on your system.' %
954 'This is not a valid directory on your system.' %
955 homedir)
955 homedir)
956 raise HomeDirError(e)
956 raise HomeDirError(e)
957 return homedir
957 return homedir
958 except HomeDirError:
958 except HomeDirError:
959 raise
959 raise
960 except:
960 except:
961 return 'C:\\'
961 return 'C:\\'
962 elif os.name == 'dos':
962 elif os.name == 'dos':
963 # Desperate, may do absurd things in classic MacOS. May work under DOS.
963 # Desperate, may do absurd things in classic MacOS. May work under DOS.
964 return 'C:\\'
964 return 'C:\\'
965 else:
965 else:
966 raise HomeDirError,'support for your operating system not implemented.'
966 raise HomeDirError,'support for your operating system not implemented.'
967
967
968 #****************************************************************************
968 #****************************************************************************
969 # strings and text
969 # strings and text
970
970
971 class LSString(str):
971 class LSString(str):
972 """String derivative with a special access attributes.
972 """String derivative with a special access attributes.
973
973
974 These are normal strings, but with the special attributes:
974 These are normal strings, but with the special attributes:
975
975
976 .l (or .list) : value as list (split on newlines).
976 .l (or .list) : value as list (split on newlines).
977 .n (or .nlstr): original value (the string itself).
977 .n (or .nlstr): original value (the string itself).
978 .s (or .spstr): value as whitespace-separated string.
978 .s (or .spstr): value as whitespace-separated string.
979 .p (or .paths): list of path objects
979 .p (or .paths): list of path objects
980
980
981 Any values which require transformations are computed only once and
981 Any values which require transformations are computed only once and
982 cached.
982 cached.
983
983
984 Such strings are very useful to efficiently interact with the shell, which
984 Such strings are very useful to efficiently interact with the shell, which
985 typically only understands whitespace-separated options for commands."""
985 typically only understands whitespace-separated options for commands."""
986
986
987 def get_list(self):
987 def get_list(self):
988 try:
988 try:
989 return self.__list
989 return self.__list
990 except AttributeError:
990 except AttributeError:
991 self.__list = self.split('\n')
991 self.__list = self.split('\n')
992 return self.__list
992 return self.__list
993
993
994 l = list = property(get_list)
994 l = list = property(get_list)
995
995
996 def get_spstr(self):
996 def get_spstr(self):
997 try:
997 try:
998 return self.__spstr
998 return self.__spstr
999 except AttributeError:
999 except AttributeError:
1000 self.__spstr = self.replace('\n',' ')
1000 self.__spstr = self.replace('\n',' ')
1001 return self.__spstr
1001 return self.__spstr
1002
1002
1003 s = spstr = property(get_spstr)
1003 s = spstr = property(get_spstr)
1004
1004
1005 def get_nlstr(self):
1005 def get_nlstr(self):
1006 return self
1006 return self
1007
1007
1008 n = nlstr = property(get_nlstr)
1008 n = nlstr = property(get_nlstr)
1009
1009
1010 def get_paths(self):
1010 def get_paths(self):
1011 try:
1011 try:
1012 return self.__paths
1012 return self.__paths
1013 except AttributeError:
1013 except AttributeError:
1014 self.__paths = [path(p) for p in self.split('\n') if os.path.exists(p)]
1014 self.__paths = [path(p) for p in self.split('\n') if os.path.exists(p)]
1015 return self.__paths
1015 return self.__paths
1016
1016
1017 p = paths = property(get_paths)
1017 p = paths = property(get_paths)
1018
1018
1019 def print_lsstring(arg):
1019 def print_lsstring(arg):
1020 """ Prettier (non-repr-like) and more informative printer for LSString """
1020 """ Prettier (non-repr-like) and more informative printer for LSString """
1021 print "LSString (.p, .n, .l, .s available). Value:"
1021 print "LSString (.p, .n, .l, .s available). Value:"
1022 print arg
1022 print arg
1023
1023
1024 print_lsstring = result_display.when_type(LSString)(print_lsstring)
1024 print_lsstring = result_display.when_type(LSString)(print_lsstring)
1025
1025
1026 #----------------------------------------------------------------------------
1026 #----------------------------------------------------------------------------
1027 class SList(list):
1027 class SList(list):
1028 """List derivative with a special access attributes.
1028 """List derivative with a special access attributes.
1029
1029
1030 These are normal lists, but with the special attributes:
1030 These are normal lists, but with the special attributes:
1031
1031
1032 .l (or .list) : value as list (the list itself).
1032 .l (or .list) : value as list (the list itself).
1033 .n (or .nlstr): value as a string, joined on newlines.
1033 .n (or .nlstr): value as a string, joined on newlines.
1034 .s (or .spstr): value as a string, joined on spaces.
1034 .s (or .spstr): value as a string, joined on spaces.
1035 .p (or .paths): list of path objects
1035 .p (or .paths): list of path objects
1036
1036
1037 Any values which require transformations are computed only once and
1037 Any values which require transformations are computed only once and
1038 cached."""
1038 cached."""
1039
1039
1040 def get_list(self):
1040 def get_list(self):
1041 return self
1041 return self
1042
1042
1043 l = list = property(get_list)
1043 l = list = property(get_list)
1044
1044
1045 def get_spstr(self):
1045 def get_spstr(self):
1046 try:
1046 try:
1047 return self.__spstr
1047 return self.__spstr
1048 except AttributeError:
1048 except AttributeError:
1049 self.__spstr = ' '.join(self)
1049 self.__spstr = ' '.join(self)
1050 return self.__spstr
1050 return self.__spstr
1051
1051
1052 s = spstr = property(get_spstr)
1052 s = spstr = property(get_spstr)
1053
1053
1054 def get_nlstr(self):
1054 def get_nlstr(self):
1055 try:
1055 try:
1056 return self.__nlstr
1056 return self.__nlstr
1057 except AttributeError:
1057 except AttributeError:
1058 self.__nlstr = '\n'.join(self)
1058 self.__nlstr = '\n'.join(self)
1059 return self.__nlstr
1059 return self.__nlstr
1060
1060
1061 n = nlstr = property(get_nlstr)
1061 n = nlstr = property(get_nlstr)
1062
1062
1063 def get_paths(self):
1063 def get_paths(self):
1064 try:
1064 try:
1065 return self.__paths
1065 return self.__paths
1066 except AttributeError:
1066 except AttributeError:
1067 self.__paths = [path(p) for p in self if os.path.exists(p)]
1067 self.__paths = [path(p) for p in self if os.path.exists(p)]
1068 return self.__paths
1068 return self.__paths
1069
1069
1070 p = paths = property(get_paths)
1070 p = paths = property(get_paths)
1071
1071
1072 def grep(self, pattern, prune = False, field = None):
1072 def grep(self, pattern, prune = False, field = None):
1073 """ Return all strings matching 'pattern' (a regex or callable)
1073 """ Return all strings matching 'pattern' (a regex or callable)
1074
1074
1075 This is case-insensitive. If prune is true, return all items
1075 This is case-insensitive. If prune is true, return all items
1076 NOT matching the pattern.
1076 NOT matching the pattern.
1077
1077
1078 If field is specified, the match must occur in the specified
1078 If field is specified, the match must occur in the specified
1079 whitespace-separated field.
1079 whitespace-separated field.
1080
1080
1081 Examples::
1081 Examples::
1082
1082
1083 a.grep( lambda x: x.startswith('C') )
1083 a.grep( lambda x: x.startswith('C') )
1084 a.grep('Cha.*log', prune=1)
1084 a.grep('Cha.*log', prune=1)
1085 a.grep('chm', field=-1)
1085 a.grep('chm', field=-1)
1086 """
1086 """
1087
1087
1088 def match_target(s):
1088 def match_target(s):
1089 if field is None:
1089 if field is None:
1090 return s
1090 return s
1091 parts = s.split()
1091 parts = s.split()
1092 try:
1092 try:
1093 tgt = parts[field]
1093 tgt = parts[field]
1094 return tgt
1094 return tgt
1095 except IndexError:
1095 except IndexError:
1096 return ""
1096 return ""
1097
1097
1098 if isinstance(pattern, basestring):
1098 if isinstance(pattern, basestring):
1099 pred = lambda x : re.search(pattern, x, re.IGNORECASE)
1099 pred = lambda x : re.search(pattern, x, re.IGNORECASE)
1100 else:
1100 else:
1101 pred = pattern
1101 pred = pattern
1102 if not prune:
1102 if not prune:
1103 return SList([el for el in self if pred(match_target(el))])
1103 return SList([el for el in self if pred(match_target(el))])
1104 else:
1104 else:
1105 return SList([el for el in self if not pred(match_target(el))])
1105 return SList([el for el in self if not pred(match_target(el))])
1106 def fields(self, *fields):
1106 def fields(self, *fields):
1107 """ Collect whitespace-separated fields from string list
1107 """ Collect whitespace-separated fields from string list
1108
1108
1109 Allows quick awk-like usage of string lists.
1109 Allows quick awk-like usage of string lists.
1110
1110
1111 Example data (in var a, created by 'a = !ls -l')::
1111 Example data (in var a, created by 'a = !ls -l')::
1112 -rwxrwxrwx 1 ville None 18 Dec 14 2006 ChangeLog
1112 -rwxrwxrwx 1 ville None 18 Dec 14 2006 ChangeLog
1113 drwxrwxrwx+ 6 ville None 0 Oct 24 18:05 IPython
1113 drwxrwxrwx+ 6 ville None 0 Oct 24 18:05 IPython
1114
1114
1115 a.fields(0) is ['-rwxrwxrwx', 'drwxrwxrwx+']
1115 a.fields(0) is ['-rwxrwxrwx', 'drwxrwxrwx+']
1116 a.fields(1,0) is ['1 -rwxrwxrwx', '6 drwxrwxrwx+']
1116 a.fields(1,0) is ['1 -rwxrwxrwx', '6 drwxrwxrwx+']
1117 (note the joining by space).
1117 (note the joining by space).
1118 a.fields(-1) is ['ChangeLog', 'IPython']
1118 a.fields(-1) is ['ChangeLog', 'IPython']
1119
1119
1120 IndexErrors are ignored.
1120 IndexErrors are ignored.
1121
1121
1122 Without args, fields() just split()'s the strings.
1122 Without args, fields() just split()'s the strings.
1123 """
1123 """
1124 if len(fields) == 0:
1124 if len(fields) == 0:
1125 return [el.split() for el in self]
1125 return [el.split() for el in self]
1126
1126
1127 res = SList()
1127 res = SList()
1128 for el in [f.split() for f in self]:
1128 for el in [f.split() for f in self]:
1129 lineparts = []
1129 lineparts = []
1130
1130
1131 for fd in fields:
1131 for fd in fields:
1132 try:
1132 try:
1133 lineparts.append(el[fd])
1133 lineparts.append(el[fd])
1134 except IndexError:
1134 except IndexError:
1135 pass
1135 pass
1136 if lineparts:
1136 if lineparts:
1137 res.append(" ".join(lineparts))
1137 res.append(" ".join(lineparts))
1138
1138
1139 return res
1139 return res
1140 def sort(self,field= None, nums = False):
1141 """ sort by specified fields (see fields())
1140
1142
1143 Example::
1144 a.sort(1, nums = True)
1141
1145
1146 Sorts a by second field, in numerical order (so that 21 > 3)
1147
1148 """
1149
1150 #decorate, sort, undecorate
1151 if field is not None:
1152 dsu = [[SList([line]).fields(field), line] for line in self]
1153 else:
1154 dsu = [[line, line] for line in self]
1155 if nums:
1156 for i in range(len(dsu)):
1157 numstr = "".join([ch for ch in dsu[i][0] if ch.isdigit()])
1158 try:
1159 n = int(numstr)
1160 except ValueError:
1161 n = 0;
1162 dsu[i][0] = n
1142
1163
1143
1164
1165 dsu.sort()
1166 return SList([t[1] for t in dsu])
1144
1167
1145 def print_slist(arg):
1168 def print_slist(arg):
1146 """ Prettier (non-repr-like) and more informative printer for SList """
1169 """ Prettier (non-repr-like) and more informative printer for SList """
1147 print "SList (.p, .n, .l, .s, .grep(), .fields() available). Value:"
1170 print "SList (.p, .n, .l, .s, .grep(), .fields(), sort() available):"
1171 if hasattr(arg, 'hideonce') and arg.hideonce:
1172 arg.hideonce = False
1173 return
1174
1148 nlprint(arg)
1175 nlprint(arg)
1149
1176
1150 print_slist = result_display.when_type(SList)(print_slist)
1177 print_slist = result_display.when_type(SList)(print_slist)
1151
1178
1152
1179
1153
1180
1154 #----------------------------------------------------------------------------
1181 #----------------------------------------------------------------------------
1155 def esc_quotes(strng):
1182 def esc_quotes(strng):
1156 """Return the input string with single and double quotes escaped out"""
1183 """Return the input string with single and double quotes escaped out"""
1157
1184
1158 return strng.replace('"','\\"').replace("'","\\'")
1185 return strng.replace('"','\\"').replace("'","\\'")
1159
1186
1160 #----------------------------------------------------------------------------
1187 #----------------------------------------------------------------------------
1161 def make_quoted_expr(s):
1188 def make_quoted_expr(s):
1162 """Return string s in appropriate quotes, using raw string if possible.
1189 """Return string s in appropriate quotes, using raw string if possible.
1163
1190
1164 Effectively this turns string: cd \ao\ao\
1191 Effectively this turns string: cd \ao\ao\
1165 to: r"cd \ao\ao\_"[:-1]
1192 to: r"cd \ao\ao\_"[:-1]
1166
1193
1167 Note the use of raw string and padding at the end to allow trailing backslash.
1194 Note the use of raw string and padding at the end to allow trailing backslash.
1168
1195
1169 """
1196 """
1170
1197
1171 tail = ''
1198 tail = ''
1172 tailpadding = ''
1199 tailpadding = ''
1173 raw = ''
1200 raw = ''
1174 if "\\" in s:
1201 if "\\" in s:
1175 raw = 'r'
1202 raw = 'r'
1176 if s.endswith('\\'):
1203 if s.endswith('\\'):
1177 tail = '[:-1]'
1204 tail = '[:-1]'
1178 tailpadding = '_'
1205 tailpadding = '_'
1179 if '"' not in s:
1206 if '"' not in s:
1180 quote = '"'
1207 quote = '"'
1181 elif "'" not in s:
1208 elif "'" not in s:
1182 quote = "'"
1209 quote = "'"
1183 elif '"""' not in s and not s.endswith('"'):
1210 elif '"""' not in s and not s.endswith('"'):
1184 quote = '"""'
1211 quote = '"""'
1185 elif "'''" not in s and not s.endswith("'"):
1212 elif "'''" not in s and not s.endswith("'"):
1186 quote = "'''"
1213 quote = "'''"
1187 else:
1214 else:
1188 # give up, backslash-escaped string will do
1215 # give up, backslash-escaped string will do
1189 return '"%s"' % esc_quotes(s)
1216 return '"%s"' % esc_quotes(s)
1190 res = raw + quote + s + tailpadding + quote + tail
1217 res = raw + quote + s + tailpadding + quote + tail
1191 return res
1218 return res
1192
1219
1193
1220
1194 #----------------------------------------------------------------------------
1221 #----------------------------------------------------------------------------
1195 def raw_input_multi(header='', ps1='==> ', ps2='..> ',terminate_str = '.'):
1222 def raw_input_multi(header='', ps1='==> ', ps2='..> ',terminate_str = '.'):
1196 """Take multiple lines of input.
1223 """Take multiple lines of input.
1197
1224
1198 A list with each line of input as a separate element is returned when a
1225 A list with each line of input as a separate element is returned when a
1199 termination string is entered (defaults to a single '.'). Input can also
1226 termination string is entered (defaults to a single '.'). Input can also
1200 terminate via EOF (^D in Unix, ^Z-RET in Windows).
1227 terminate via EOF (^D in Unix, ^Z-RET in Windows).
1201
1228
1202 Lines of input which end in \\ are joined into single entries (and a
1229 Lines of input which end in \\ are joined into single entries (and a
1203 secondary continuation prompt is issued as long as the user terminates
1230 secondary continuation prompt is issued as long as the user terminates
1204 lines with \\). This allows entering very long strings which are still
1231 lines with \\). This allows entering very long strings which are still
1205 meant to be treated as single entities.
1232 meant to be treated as single entities.
1206 """
1233 """
1207
1234
1208 try:
1235 try:
1209 if header:
1236 if header:
1210 header += '\n'
1237 header += '\n'
1211 lines = [raw_input(header + ps1)]
1238 lines = [raw_input(header + ps1)]
1212 except EOFError:
1239 except EOFError:
1213 return []
1240 return []
1214 terminate = [terminate_str]
1241 terminate = [terminate_str]
1215 try:
1242 try:
1216 while lines[-1:] != terminate:
1243 while lines[-1:] != terminate:
1217 new_line = raw_input(ps1)
1244 new_line = raw_input(ps1)
1218 while new_line.endswith('\\'):
1245 while new_line.endswith('\\'):
1219 new_line = new_line[:-1] + raw_input(ps2)
1246 new_line = new_line[:-1] + raw_input(ps2)
1220 lines.append(new_line)
1247 lines.append(new_line)
1221
1248
1222 return lines[:-1] # don't return the termination command
1249 return lines[:-1] # don't return the termination command
1223 except EOFError:
1250 except EOFError:
1224 print
1251 print
1225 return lines
1252 return lines
1226
1253
1227 #----------------------------------------------------------------------------
1254 #----------------------------------------------------------------------------
1228 def raw_input_ext(prompt='', ps2='... '):
1255 def raw_input_ext(prompt='', ps2='... '):
1229 """Similar to raw_input(), but accepts extended lines if input ends with \\."""
1256 """Similar to raw_input(), but accepts extended lines if input ends with \\."""
1230
1257
1231 line = raw_input(prompt)
1258 line = raw_input(prompt)
1232 while line.endswith('\\'):
1259 while line.endswith('\\'):
1233 line = line[:-1] + raw_input(ps2)
1260 line = line[:-1] + raw_input(ps2)
1234 return line
1261 return line
1235
1262
1236 #----------------------------------------------------------------------------
1263 #----------------------------------------------------------------------------
1237 def ask_yes_no(prompt,default=None):
1264 def ask_yes_no(prompt,default=None):
1238 """Asks a question and returns a boolean (y/n) answer.
1265 """Asks a question and returns a boolean (y/n) answer.
1239
1266
1240 If default is given (one of 'y','n'), it is used if the user input is
1267 If default is given (one of 'y','n'), it is used if the user input is
1241 empty. Otherwise the question is repeated until an answer is given.
1268 empty. Otherwise the question is repeated until an answer is given.
1242
1269
1243 An EOF is treated as the default answer. If there is no default, an
1270 An EOF is treated as the default answer. If there is no default, an
1244 exception is raised to prevent infinite loops.
1271 exception is raised to prevent infinite loops.
1245
1272
1246 Valid answers are: y/yes/n/no (match is not case sensitive)."""
1273 Valid answers are: y/yes/n/no (match is not case sensitive)."""
1247
1274
1248 answers = {'y':True,'n':False,'yes':True,'no':False}
1275 answers = {'y':True,'n':False,'yes':True,'no':False}
1249 ans = None
1276 ans = None
1250 while ans not in answers.keys():
1277 while ans not in answers.keys():
1251 try:
1278 try:
1252 ans = raw_input(prompt+' ').lower()
1279 ans = raw_input(prompt+' ').lower()
1253 if not ans: # response was an empty string
1280 if not ans: # response was an empty string
1254 ans = default
1281 ans = default
1255 except KeyboardInterrupt:
1282 except KeyboardInterrupt:
1256 pass
1283 pass
1257 except EOFError:
1284 except EOFError:
1258 if default in answers.keys():
1285 if default in answers.keys():
1259 ans = default
1286 ans = default
1260 print
1287 print
1261 else:
1288 else:
1262 raise
1289 raise
1263
1290
1264 return answers[ans]
1291 return answers[ans]
1265
1292
1266 #----------------------------------------------------------------------------
1293 #----------------------------------------------------------------------------
1267 def marquee(txt='',width=78,mark='*'):
1294 def marquee(txt='',width=78,mark='*'):
1268 """Return the input string centered in a 'marquee'."""
1295 """Return the input string centered in a 'marquee'."""
1269 if not txt:
1296 if not txt:
1270 return (mark*width)[:width]
1297 return (mark*width)[:width]
1271 nmark = (width-len(txt)-2)/len(mark)/2
1298 nmark = (width-len(txt)-2)/len(mark)/2
1272 if nmark < 0: nmark =0
1299 if nmark < 0: nmark =0
1273 marks = mark*nmark
1300 marks = mark*nmark
1274 return '%s %s %s' % (marks,txt,marks)
1301 return '%s %s %s' % (marks,txt,marks)
1275
1302
1276 #----------------------------------------------------------------------------
1303 #----------------------------------------------------------------------------
1277 class EvalDict:
1304 class EvalDict:
1278 """
1305 """
1279 Emulate a dict which evaluates its contents in the caller's frame.
1306 Emulate a dict which evaluates its contents in the caller's frame.
1280
1307
1281 Usage:
1308 Usage:
1282 >>>number = 19
1309 >>>number = 19
1283 >>>text = "python"
1310 >>>text = "python"
1284 >>>print "%(text.capitalize())s %(number/9.0).1f rules!" % EvalDict()
1311 >>>print "%(text.capitalize())s %(number/9.0).1f rules!" % EvalDict()
1285 """
1312 """
1286
1313
1287 # This version is due to sismex01@hebmex.com on c.l.py, and is basically a
1314 # This version is due to sismex01@hebmex.com on c.l.py, and is basically a
1288 # modified (shorter) version of:
1315 # modified (shorter) version of:
1289 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66018 by
1316 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66018 by
1290 # Skip Montanaro (skip@pobox.com).
1317 # Skip Montanaro (skip@pobox.com).
1291
1318
1292 def __getitem__(self, name):
1319 def __getitem__(self, name):
1293 frame = sys._getframe(1)
1320 frame = sys._getframe(1)
1294 return eval(name, frame.f_globals, frame.f_locals)
1321 return eval(name, frame.f_globals, frame.f_locals)
1295
1322
1296 EvalString = EvalDict # for backwards compatibility
1323 EvalString = EvalDict # for backwards compatibility
1297 #----------------------------------------------------------------------------
1324 #----------------------------------------------------------------------------
1298 def qw(words,flat=0,sep=None,maxsplit=-1):
1325 def qw(words,flat=0,sep=None,maxsplit=-1):
1299 """Similar to Perl's qw() operator, but with some more options.
1326 """Similar to Perl's qw() operator, but with some more options.
1300
1327
1301 qw(words,flat=0,sep=' ',maxsplit=-1) -> words.split(sep,maxsplit)
1328 qw(words,flat=0,sep=' ',maxsplit=-1) -> words.split(sep,maxsplit)
1302
1329
1303 words can also be a list itself, and with flat=1, the output will be
1330 words can also be a list itself, and with flat=1, the output will be
1304 recursively flattened. Examples:
1331 recursively flattened. Examples:
1305
1332
1306 >>> qw('1 2')
1333 >>> qw('1 2')
1307 ['1', '2']
1334 ['1', '2']
1308 >>> qw(['a b','1 2',['m n','p q']])
1335 >>> qw(['a b','1 2',['m n','p q']])
1309 [['a', 'b'], ['1', '2'], [['m', 'n'], ['p', 'q']]]
1336 [['a', 'b'], ['1', '2'], [['m', 'n'], ['p', 'q']]]
1310 >>> qw(['a b','1 2',['m n','p q']],flat=1)
1337 >>> qw(['a b','1 2',['m n','p q']],flat=1)
1311 ['a', 'b', '1', '2', 'm', 'n', 'p', 'q'] """
1338 ['a', 'b', '1', '2', 'm', 'n', 'p', 'q'] """
1312
1339
1313 if type(words) in StringTypes:
1340 if type(words) in StringTypes:
1314 return [word.strip() for word in words.split(sep,maxsplit)
1341 return [word.strip() for word in words.split(sep,maxsplit)
1315 if word and not word.isspace() ]
1342 if word and not word.isspace() ]
1316 if flat:
1343 if flat:
1317 return flatten(map(qw,words,[1]*len(words)))
1344 return flatten(map(qw,words,[1]*len(words)))
1318 return map(qw,words)
1345 return map(qw,words)
1319
1346
1320 #----------------------------------------------------------------------------
1347 #----------------------------------------------------------------------------
1321 def qwflat(words,sep=None,maxsplit=-1):
1348 def qwflat(words,sep=None,maxsplit=-1):
1322 """Calls qw(words) in flat mode. It's just a convenient shorthand."""
1349 """Calls qw(words) in flat mode. It's just a convenient shorthand."""
1323 return qw(words,1,sep,maxsplit)
1350 return qw(words,1,sep,maxsplit)
1324
1351
1325 #----------------------------------------------------------------------------
1352 #----------------------------------------------------------------------------
1326 def qw_lol(indata):
1353 def qw_lol(indata):
1327 """qw_lol('a b') -> [['a','b']],
1354 """qw_lol('a b') -> [['a','b']],
1328 otherwise it's just a call to qw().
1355 otherwise it's just a call to qw().
1329
1356
1330 We need this to make sure the modules_some keys *always* end up as a
1357 We need this to make sure the modules_some keys *always* end up as a
1331 list of lists."""
1358 list of lists."""
1332
1359
1333 if type(indata) in StringTypes:
1360 if type(indata) in StringTypes:
1334 return [qw(indata)]
1361 return [qw(indata)]
1335 else:
1362 else:
1336 return qw(indata)
1363 return qw(indata)
1337
1364
1338 #-----------------------------------------------------------------------------
1365 #-----------------------------------------------------------------------------
1339 def list_strings(arg):
1366 def list_strings(arg):
1340 """Always return a list of strings, given a string or list of strings
1367 """Always return a list of strings, given a string or list of strings
1341 as input."""
1368 as input."""
1342
1369
1343 if type(arg) in StringTypes: return [arg]
1370 if type(arg) in StringTypes: return [arg]
1344 else: return arg
1371 else: return arg
1345
1372
1346 #----------------------------------------------------------------------------
1373 #----------------------------------------------------------------------------
1347 def grep(pat,list,case=1):
1374 def grep(pat,list,case=1):
1348 """Simple minded grep-like function.
1375 """Simple minded grep-like function.
1349 grep(pat,list) returns occurrences of pat in list, None on failure.
1376 grep(pat,list) returns occurrences of pat in list, None on failure.
1350
1377
1351 It only does simple string matching, with no support for regexps. Use the
1378 It only does simple string matching, with no support for regexps. Use the
1352 option case=0 for case-insensitive matching."""
1379 option case=0 for case-insensitive matching."""
1353
1380
1354 # This is pretty crude. At least it should implement copying only references
1381 # This is pretty crude. At least it should implement copying only references
1355 # to the original data in case it's big. Now it copies the data for output.
1382 # to the original data in case it's big. Now it copies the data for output.
1356 out=[]
1383 out=[]
1357 if case:
1384 if case:
1358 for term in list:
1385 for term in list:
1359 if term.find(pat)>-1: out.append(term)
1386 if term.find(pat)>-1: out.append(term)
1360 else:
1387 else:
1361 lpat=pat.lower()
1388 lpat=pat.lower()
1362 for term in list:
1389 for term in list:
1363 if term.lower().find(lpat)>-1: out.append(term)
1390 if term.lower().find(lpat)>-1: out.append(term)
1364
1391
1365 if len(out): return out
1392 if len(out): return out
1366 else: return None
1393 else: return None
1367
1394
1368 #----------------------------------------------------------------------------
1395 #----------------------------------------------------------------------------
1369 def dgrep(pat,*opts):
1396 def dgrep(pat,*opts):
1370 """Return grep() on dir()+dir(__builtins__).
1397 """Return grep() on dir()+dir(__builtins__).
1371
1398
1372 A very common use of grep() when working interactively."""
1399 A very common use of grep() when working interactively."""
1373
1400
1374 return grep(pat,dir(__main__)+dir(__main__.__builtins__),*opts)
1401 return grep(pat,dir(__main__)+dir(__main__.__builtins__),*opts)
1375
1402
1376 #----------------------------------------------------------------------------
1403 #----------------------------------------------------------------------------
1377 def idgrep(pat):
1404 def idgrep(pat):
1378 """Case-insensitive dgrep()"""
1405 """Case-insensitive dgrep()"""
1379
1406
1380 return dgrep(pat,0)
1407 return dgrep(pat,0)
1381
1408
1382 #----------------------------------------------------------------------------
1409 #----------------------------------------------------------------------------
1383 def igrep(pat,list):
1410 def igrep(pat,list):
1384 """Synonym for case-insensitive grep."""
1411 """Synonym for case-insensitive grep."""
1385
1412
1386 return grep(pat,list,case=0)
1413 return grep(pat,list,case=0)
1387
1414
1388 #----------------------------------------------------------------------------
1415 #----------------------------------------------------------------------------
1389 def indent(str,nspaces=4,ntabs=0):
1416 def indent(str,nspaces=4,ntabs=0):
1390 """Indent a string a given number of spaces or tabstops.
1417 """Indent a string a given number of spaces or tabstops.
1391
1418
1392 indent(str,nspaces=4,ntabs=0) -> indent str by ntabs+nspaces.
1419 indent(str,nspaces=4,ntabs=0) -> indent str by ntabs+nspaces.
1393 """
1420 """
1394 if str is None:
1421 if str is None:
1395 return
1422 return
1396 ind = '\t'*ntabs+' '*nspaces
1423 ind = '\t'*ntabs+' '*nspaces
1397 outstr = '%s%s' % (ind,str.replace(os.linesep,os.linesep+ind))
1424 outstr = '%s%s' % (ind,str.replace(os.linesep,os.linesep+ind))
1398 if outstr.endswith(os.linesep+ind):
1425 if outstr.endswith(os.linesep+ind):
1399 return outstr[:-len(ind)]
1426 return outstr[:-len(ind)]
1400 else:
1427 else:
1401 return outstr
1428 return outstr
1402
1429
1403 #-----------------------------------------------------------------------------
1430 #-----------------------------------------------------------------------------
1404 def native_line_ends(filename,backup=1):
1431 def native_line_ends(filename,backup=1):
1405 """Convert (in-place) a file to line-ends native to the current OS.
1432 """Convert (in-place) a file to line-ends native to the current OS.
1406
1433
1407 If the optional backup argument is given as false, no backup of the
1434 If the optional backup argument is given as false, no backup of the
1408 original file is left. """
1435 original file is left. """
1409
1436
1410 backup_suffixes = {'posix':'~','dos':'.bak','nt':'.bak','mac':'.bak'}
1437 backup_suffixes = {'posix':'~','dos':'.bak','nt':'.bak','mac':'.bak'}
1411
1438
1412 bak_filename = filename + backup_suffixes[os.name]
1439 bak_filename = filename + backup_suffixes[os.name]
1413
1440
1414 original = open(filename).read()
1441 original = open(filename).read()
1415 shutil.copy2(filename,bak_filename)
1442 shutil.copy2(filename,bak_filename)
1416 try:
1443 try:
1417 new = open(filename,'wb')
1444 new = open(filename,'wb')
1418 new.write(os.linesep.join(original.splitlines()))
1445 new.write(os.linesep.join(original.splitlines()))
1419 new.write(os.linesep) # ALWAYS put an eol at the end of the file
1446 new.write(os.linesep) # ALWAYS put an eol at the end of the file
1420 new.close()
1447 new.close()
1421 except:
1448 except:
1422 os.rename(bak_filename,filename)
1449 os.rename(bak_filename,filename)
1423 if not backup:
1450 if not backup:
1424 try:
1451 try:
1425 os.remove(bak_filename)
1452 os.remove(bak_filename)
1426 except:
1453 except:
1427 pass
1454 pass
1428
1455
1429 #----------------------------------------------------------------------------
1456 #----------------------------------------------------------------------------
1430 def get_pager_cmd(pager_cmd = None):
1457 def get_pager_cmd(pager_cmd = None):
1431 """Return a pager command.
1458 """Return a pager command.
1432
1459
1433 Makes some attempts at finding an OS-correct one."""
1460 Makes some attempts at finding an OS-correct one."""
1434
1461
1435 if os.name == 'posix':
1462 if os.name == 'posix':
1436 default_pager_cmd = 'less -r' # -r for color control sequences
1463 default_pager_cmd = 'less -r' # -r for color control sequences
1437 elif os.name in ['nt','dos']:
1464 elif os.name in ['nt','dos']:
1438 default_pager_cmd = 'type'
1465 default_pager_cmd = 'type'
1439
1466
1440 if pager_cmd is None:
1467 if pager_cmd is None:
1441 try:
1468 try:
1442 pager_cmd = os.environ['PAGER']
1469 pager_cmd = os.environ['PAGER']
1443 except:
1470 except:
1444 pager_cmd = default_pager_cmd
1471 pager_cmd = default_pager_cmd
1445 return pager_cmd
1472 return pager_cmd
1446
1473
1447 #-----------------------------------------------------------------------------
1474 #-----------------------------------------------------------------------------
1448 def get_pager_start(pager,start):
1475 def get_pager_start(pager,start):
1449 """Return the string for paging files with an offset.
1476 """Return the string for paging files with an offset.
1450
1477
1451 This is the '+N' argument which less and more (under Unix) accept.
1478 This is the '+N' argument which less and more (under Unix) accept.
1452 """
1479 """
1453
1480
1454 if pager in ['less','more']:
1481 if pager in ['less','more']:
1455 if start:
1482 if start:
1456 start_string = '+' + str(start)
1483 start_string = '+' + str(start)
1457 else:
1484 else:
1458 start_string = ''
1485 start_string = ''
1459 else:
1486 else:
1460 start_string = ''
1487 start_string = ''
1461 return start_string
1488 return start_string
1462
1489
1463 #----------------------------------------------------------------------------
1490 #----------------------------------------------------------------------------
1464 # (X)emacs on W32 doesn't like to be bypassed with msvcrt.getch()
1491 # (X)emacs on W32 doesn't like to be bypassed with msvcrt.getch()
1465 if os.name == 'nt' and os.environ.get('TERM','dumb') != 'emacs':
1492 if os.name == 'nt' and os.environ.get('TERM','dumb') != 'emacs':
1466 import msvcrt
1493 import msvcrt
1467 def page_more():
1494 def page_more():
1468 """ Smart pausing between pages
1495 """ Smart pausing between pages
1469
1496
1470 @return: True if need print more lines, False if quit
1497 @return: True if need print more lines, False if quit
1471 """
1498 """
1472 Term.cout.write('---Return to continue, q to quit--- ')
1499 Term.cout.write('---Return to continue, q to quit--- ')
1473 ans = msvcrt.getch()
1500 ans = msvcrt.getch()
1474 if ans in ("q", "Q"):
1501 if ans in ("q", "Q"):
1475 result = False
1502 result = False
1476 else:
1503 else:
1477 result = True
1504 result = True
1478 Term.cout.write("\b"*37 + " "*37 + "\b"*37)
1505 Term.cout.write("\b"*37 + " "*37 + "\b"*37)
1479 return result
1506 return result
1480 else:
1507 else:
1481 def page_more():
1508 def page_more():
1482 ans = raw_input('---Return to continue, q to quit--- ')
1509 ans = raw_input('---Return to continue, q to quit--- ')
1483 if ans.lower().startswith('q'):
1510 if ans.lower().startswith('q'):
1484 return False
1511 return False
1485 else:
1512 else:
1486 return True
1513 return True
1487
1514
1488 esc_re = re.compile(r"(\x1b[^m]+m)")
1515 esc_re = re.compile(r"(\x1b[^m]+m)")
1489
1516
1490 def page_dumb(strng,start=0,screen_lines=25):
1517 def page_dumb(strng,start=0,screen_lines=25):
1491 """Very dumb 'pager' in Python, for when nothing else works.
1518 """Very dumb 'pager' in Python, for when nothing else works.
1492
1519
1493 Only moves forward, same interface as page(), except for pager_cmd and
1520 Only moves forward, same interface as page(), except for pager_cmd and
1494 mode."""
1521 mode."""
1495
1522
1496 out_ln = strng.splitlines()[start:]
1523 out_ln = strng.splitlines()[start:]
1497 screens = chop(out_ln,screen_lines-1)
1524 screens = chop(out_ln,screen_lines-1)
1498 if len(screens) == 1:
1525 if len(screens) == 1:
1499 print >>Term.cout, os.linesep.join(screens[0])
1526 print >>Term.cout, os.linesep.join(screens[0])
1500 else:
1527 else:
1501 last_escape = ""
1528 last_escape = ""
1502 for scr in screens[0:-1]:
1529 for scr in screens[0:-1]:
1503 hunk = os.linesep.join(scr)
1530 hunk = os.linesep.join(scr)
1504 print >>Term.cout, last_escape + hunk
1531 print >>Term.cout, last_escape + hunk
1505 if not page_more():
1532 if not page_more():
1506 return
1533 return
1507 esc_list = esc_re.findall(hunk)
1534 esc_list = esc_re.findall(hunk)
1508 if len(esc_list) > 0:
1535 if len(esc_list) > 0:
1509 last_escape = esc_list[-1]
1536 last_escape = esc_list[-1]
1510 print >>Term.cout, last_escape + os.linesep.join(screens[-1])
1537 print >>Term.cout, last_escape + os.linesep.join(screens[-1])
1511
1538
1512 #----------------------------------------------------------------------------
1539 #----------------------------------------------------------------------------
1513 def page(strng,start=0,screen_lines=0,pager_cmd = None):
1540 def page(strng,start=0,screen_lines=0,pager_cmd = None):
1514 """Print a string, piping through a pager after a certain length.
1541 """Print a string, piping through a pager after a certain length.
1515
1542
1516 The screen_lines parameter specifies the number of *usable* lines of your
1543 The screen_lines parameter specifies the number of *usable* lines of your
1517 terminal screen (total lines minus lines you need to reserve to show other
1544 terminal screen (total lines minus lines you need to reserve to show other
1518 information).
1545 information).
1519
1546
1520 If you set screen_lines to a number <=0, page() will try to auto-determine
1547 If you set screen_lines to a number <=0, page() will try to auto-determine
1521 your screen size and will only use up to (screen_size+screen_lines) for
1548 your screen size and will only use up to (screen_size+screen_lines) for
1522 printing, paging after that. That is, if you want auto-detection but need
1549 printing, paging after that. That is, if you want auto-detection but need
1523 to reserve the bottom 3 lines of the screen, use screen_lines = -3, and for
1550 to reserve the bottom 3 lines of the screen, use screen_lines = -3, and for
1524 auto-detection without any lines reserved simply use screen_lines = 0.
1551 auto-detection without any lines reserved simply use screen_lines = 0.
1525
1552
1526 If a string won't fit in the allowed lines, it is sent through the
1553 If a string won't fit in the allowed lines, it is sent through the
1527 specified pager command. If none given, look for PAGER in the environment,
1554 specified pager command. If none given, look for PAGER in the environment,
1528 and ultimately default to less.
1555 and ultimately default to less.
1529
1556
1530 If no system pager works, the string is sent through a 'dumb pager'
1557 If no system pager works, the string is sent through a 'dumb pager'
1531 written in python, very simplistic.
1558 written in python, very simplistic.
1532 """
1559 """
1533
1560
1534 # Some routines may auto-compute start offsets incorrectly and pass a
1561 # Some routines may auto-compute start offsets incorrectly and pass a
1535 # negative value. Offset to 0 for robustness.
1562 # negative value. Offset to 0 for robustness.
1536 start = max(0,start)
1563 start = max(0,start)
1537
1564
1538 # first, try the hook
1565 # first, try the hook
1539 ip = IPython.ipapi.get()
1566 ip = IPython.ipapi.get()
1540 if ip:
1567 if ip:
1541 try:
1568 try:
1542 ip.IP.hooks.show_in_pager(strng)
1569 ip.IP.hooks.show_in_pager(strng)
1543 return
1570 return
1544 except IPython.ipapi.TryNext:
1571 except IPython.ipapi.TryNext:
1545 pass
1572 pass
1546
1573
1547 # Ugly kludge, but calling curses.initscr() flat out crashes in emacs
1574 # Ugly kludge, but calling curses.initscr() flat out crashes in emacs
1548 TERM = os.environ.get('TERM','dumb')
1575 TERM = os.environ.get('TERM','dumb')
1549 if TERM in ['dumb','emacs'] and os.name != 'nt':
1576 if TERM in ['dumb','emacs'] and os.name != 'nt':
1550 print strng
1577 print strng
1551 return
1578 return
1552 # chop off the topmost part of the string we don't want to see
1579 # chop off the topmost part of the string we don't want to see
1553 str_lines = strng.split(os.linesep)[start:]
1580 str_lines = strng.split(os.linesep)[start:]
1554 str_toprint = os.linesep.join(str_lines)
1581 str_toprint = os.linesep.join(str_lines)
1555 num_newlines = len(str_lines)
1582 num_newlines = len(str_lines)
1556 len_str = len(str_toprint)
1583 len_str = len(str_toprint)
1557
1584
1558 # Dumb heuristics to guesstimate number of on-screen lines the string
1585 # Dumb heuristics to guesstimate number of on-screen lines the string
1559 # takes. Very basic, but good enough for docstrings in reasonable
1586 # takes. Very basic, but good enough for docstrings in reasonable
1560 # terminals. If someone later feels like refining it, it's not hard.
1587 # terminals. If someone later feels like refining it, it's not hard.
1561 numlines = max(num_newlines,int(len_str/80)+1)
1588 numlines = max(num_newlines,int(len_str/80)+1)
1562
1589
1563 if os.name == "nt":
1590 if os.name == "nt":
1564 screen_lines_def = get_console_size(defaulty=25)[1]
1591 screen_lines_def = get_console_size(defaulty=25)[1]
1565 else:
1592 else:
1566 screen_lines_def = 25 # default value if we can't auto-determine
1593 screen_lines_def = 25 # default value if we can't auto-determine
1567
1594
1568 # auto-determine screen size
1595 # auto-determine screen size
1569 if screen_lines <= 0:
1596 if screen_lines <= 0:
1570 if TERM=='xterm':
1597 if TERM=='xterm':
1571 use_curses = USE_CURSES
1598 use_curses = USE_CURSES
1572 else:
1599 else:
1573 # curses causes problems on many terminals other than xterm.
1600 # curses causes problems on many terminals other than xterm.
1574 use_curses = False
1601 use_curses = False
1575 if use_curses:
1602 if use_curses:
1576 # There is a bug in curses, where *sometimes* it fails to properly
1603 # There is a bug in curses, where *sometimes* it fails to properly
1577 # initialize, and then after the endwin() call is made, the
1604 # initialize, and then after the endwin() call is made, the
1578 # terminal is left in an unusable state. Rather than trying to
1605 # terminal is left in an unusable state. Rather than trying to
1579 # check everytime for this (by requesting and comparing termios
1606 # check everytime for this (by requesting and comparing termios
1580 # flags each time), we just save the initial terminal state and
1607 # flags each time), we just save the initial terminal state and
1581 # unconditionally reset it every time. It's cheaper than making
1608 # unconditionally reset it every time. It's cheaper than making
1582 # the checks.
1609 # the checks.
1583 term_flags = termios.tcgetattr(sys.stdout)
1610 term_flags = termios.tcgetattr(sys.stdout)
1584 scr = curses.initscr()
1611 scr = curses.initscr()
1585 screen_lines_real,screen_cols = scr.getmaxyx()
1612 screen_lines_real,screen_cols = scr.getmaxyx()
1586 curses.endwin()
1613 curses.endwin()
1587 # Restore terminal state in case endwin() didn't.
1614 # Restore terminal state in case endwin() didn't.
1588 termios.tcsetattr(sys.stdout,termios.TCSANOW,term_flags)
1615 termios.tcsetattr(sys.stdout,termios.TCSANOW,term_flags)
1589 # Now we have what we needed: the screen size in rows/columns
1616 # Now we have what we needed: the screen size in rows/columns
1590 screen_lines += screen_lines_real
1617 screen_lines += screen_lines_real
1591 #print '***Screen size:',screen_lines_real,'lines x',\
1618 #print '***Screen size:',screen_lines_real,'lines x',\
1592 #screen_cols,'columns.' # dbg
1619 #screen_cols,'columns.' # dbg
1593 else:
1620 else:
1594 screen_lines += screen_lines_def
1621 screen_lines += screen_lines_def
1595
1622
1596 #print 'numlines',numlines,'screenlines',screen_lines # dbg
1623 #print 'numlines',numlines,'screenlines',screen_lines # dbg
1597 if numlines <= screen_lines :
1624 if numlines <= screen_lines :
1598 #print '*** normal print' # dbg
1625 #print '*** normal print' # dbg
1599 print >>Term.cout, str_toprint
1626 print >>Term.cout, str_toprint
1600 else:
1627 else:
1601 # Try to open pager and default to internal one if that fails.
1628 # Try to open pager and default to internal one if that fails.
1602 # All failure modes are tagged as 'retval=1', to match the return
1629 # All failure modes are tagged as 'retval=1', to match the return
1603 # value of a failed system command. If any intermediate attempt
1630 # value of a failed system command. If any intermediate attempt
1604 # sets retval to 1, at the end we resort to our own page_dumb() pager.
1631 # sets retval to 1, at the end we resort to our own page_dumb() pager.
1605 pager_cmd = get_pager_cmd(pager_cmd)
1632 pager_cmd = get_pager_cmd(pager_cmd)
1606 pager_cmd += ' ' + get_pager_start(pager_cmd,start)
1633 pager_cmd += ' ' + get_pager_start(pager_cmd,start)
1607 if os.name == 'nt':
1634 if os.name == 'nt':
1608 if pager_cmd.startswith('type'):
1635 if pager_cmd.startswith('type'):
1609 # The default WinXP 'type' command is failing on complex strings.
1636 # The default WinXP 'type' command is failing on complex strings.
1610 retval = 1
1637 retval = 1
1611 else:
1638 else:
1612 tmpname = tempfile.mktemp('.txt')
1639 tmpname = tempfile.mktemp('.txt')
1613 tmpfile = file(tmpname,'wt')
1640 tmpfile = file(tmpname,'wt')
1614 tmpfile.write(strng)
1641 tmpfile.write(strng)
1615 tmpfile.close()
1642 tmpfile.close()
1616 cmd = "%s < %s" % (pager_cmd,tmpname)
1643 cmd = "%s < %s" % (pager_cmd,tmpname)
1617 if os.system(cmd):
1644 if os.system(cmd):
1618 retval = 1
1645 retval = 1
1619 else:
1646 else:
1620 retval = None
1647 retval = None
1621 os.remove(tmpname)
1648 os.remove(tmpname)
1622 else:
1649 else:
1623 try:
1650 try:
1624 retval = None
1651 retval = None
1625 # if I use popen4, things hang. No idea why.
1652 # if I use popen4, things hang. No idea why.
1626 #pager,shell_out = os.popen4(pager_cmd)
1653 #pager,shell_out = os.popen4(pager_cmd)
1627 pager = os.popen(pager_cmd,'w')
1654 pager = os.popen(pager_cmd,'w')
1628 pager.write(strng)
1655 pager.write(strng)
1629 pager.close()
1656 pager.close()
1630 retval = pager.close() # success returns None
1657 retval = pager.close() # success returns None
1631 except IOError,msg: # broken pipe when user quits
1658 except IOError,msg: # broken pipe when user quits
1632 if msg.args == (32,'Broken pipe'):
1659 if msg.args == (32,'Broken pipe'):
1633 retval = None
1660 retval = None
1634 else:
1661 else:
1635 retval = 1
1662 retval = 1
1636 except OSError:
1663 except OSError:
1637 # Other strange problems, sometimes seen in Win2k/cygwin
1664 # Other strange problems, sometimes seen in Win2k/cygwin
1638 retval = 1
1665 retval = 1
1639 if retval is not None:
1666 if retval is not None:
1640 page_dumb(strng,screen_lines=screen_lines)
1667 page_dumb(strng,screen_lines=screen_lines)
1641
1668
1642 #----------------------------------------------------------------------------
1669 #----------------------------------------------------------------------------
1643 def page_file(fname,start = 0, pager_cmd = None):
1670 def page_file(fname,start = 0, pager_cmd = None):
1644 """Page a file, using an optional pager command and starting line.
1671 """Page a file, using an optional pager command and starting line.
1645 """
1672 """
1646
1673
1647 pager_cmd = get_pager_cmd(pager_cmd)
1674 pager_cmd = get_pager_cmd(pager_cmd)
1648 pager_cmd += ' ' + get_pager_start(pager_cmd,start)
1675 pager_cmd += ' ' + get_pager_start(pager_cmd,start)
1649
1676
1650 try:
1677 try:
1651 if os.environ['TERM'] in ['emacs','dumb']:
1678 if os.environ['TERM'] in ['emacs','dumb']:
1652 raise EnvironmentError
1679 raise EnvironmentError
1653 xsys(pager_cmd + ' ' + fname)
1680 xsys(pager_cmd + ' ' + fname)
1654 except:
1681 except:
1655 try:
1682 try:
1656 if start > 0:
1683 if start > 0:
1657 start -= 1
1684 start -= 1
1658 page(open(fname).read(),start)
1685 page(open(fname).read(),start)
1659 except:
1686 except:
1660 print 'Unable to show file',`fname`
1687 print 'Unable to show file',`fname`
1661
1688
1662
1689
1663 #----------------------------------------------------------------------------
1690 #----------------------------------------------------------------------------
1664 def snip_print(str,width = 75,print_full = 0,header = ''):
1691 def snip_print(str,width = 75,print_full = 0,header = ''):
1665 """Print a string snipping the midsection to fit in width.
1692 """Print a string snipping the midsection to fit in width.
1666
1693
1667 print_full: mode control:
1694 print_full: mode control:
1668 - 0: only snip long strings
1695 - 0: only snip long strings
1669 - 1: send to page() directly.
1696 - 1: send to page() directly.
1670 - 2: snip long strings and ask for full length viewing with page()
1697 - 2: snip long strings and ask for full length viewing with page()
1671 Return 1 if snipping was necessary, 0 otherwise."""
1698 Return 1 if snipping was necessary, 0 otherwise."""
1672
1699
1673 if print_full == 1:
1700 if print_full == 1:
1674 page(header+str)
1701 page(header+str)
1675 return 0
1702 return 0
1676
1703
1677 print header,
1704 print header,
1678 if len(str) < width:
1705 if len(str) < width:
1679 print str
1706 print str
1680 snip = 0
1707 snip = 0
1681 else:
1708 else:
1682 whalf = int((width -5)/2)
1709 whalf = int((width -5)/2)
1683 print str[:whalf] + ' <...> ' + str[-whalf:]
1710 print str[:whalf] + ' <...> ' + str[-whalf:]
1684 snip = 1
1711 snip = 1
1685 if snip and print_full == 2:
1712 if snip and print_full == 2:
1686 if raw_input(header+' Snipped. View (y/n)? [N]').lower() == 'y':
1713 if raw_input(header+' Snipped. View (y/n)? [N]').lower() == 'y':
1687 page(str)
1714 page(str)
1688 return snip
1715 return snip
1689
1716
1690 #****************************************************************************
1717 #****************************************************************************
1691 # lists, dicts and structures
1718 # lists, dicts and structures
1692
1719
1693 def belong(candidates,checklist):
1720 def belong(candidates,checklist):
1694 """Check whether a list of items appear in a given list of options.
1721 """Check whether a list of items appear in a given list of options.
1695
1722
1696 Returns a list of 1 and 0, one for each candidate given."""
1723 Returns a list of 1 and 0, one for each candidate given."""
1697
1724
1698 return [x in checklist for x in candidates]
1725 return [x in checklist for x in candidates]
1699
1726
1700 #----------------------------------------------------------------------------
1727 #----------------------------------------------------------------------------
1701 def uniq_stable(elems):
1728 def uniq_stable(elems):
1702 """uniq_stable(elems) -> list
1729 """uniq_stable(elems) -> list
1703
1730
1704 Return from an iterable, a list of all the unique elements in the input,
1731 Return from an iterable, a list of all the unique elements in the input,
1705 but maintaining the order in which they first appear.
1732 but maintaining the order in which they first appear.
1706
1733
1707 A naive solution to this problem which just makes a dictionary with the
1734 A naive solution to this problem which just makes a dictionary with the
1708 elements as keys fails to respect the stability condition, since
1735 elements as keys fails to respect the stability condition, since
1709 dictionaries are unsorted by nature.
1736 dictionaries are unsorted by nature.
1710
1737
1711 Note: All elements in the input must be valid dictionary keys for this
1738 Note: All elements in the input must be valid dictionary keys for this
1712 routine to work, as it internally uses a dictionary for efficiency
1739 routine to work, as it internally uses a dictionary for efficiency
1713 reasons."""
1740 reasons."""
1714
1741
1715 unique = []
1742 unique = []
1716 unique_dict = {}
1743 unique_dict = {}
1717 for nn in elems:
1744 for nn in elems:
1718 if nn not in unique_dict:
1745 if nn not in unique_dict:
1719 unique.append(nn)
1746 unique.append(nn)
1720 unique_dict[nn] = None
1747 unique_dict[nn] = None
1721 return unique
1748 return unique
1722
1749
1723 #----------------------------------------------------------------------------
1750 #----------------------------------------------------------------------------
1724 class NLprinter:
1751 class NLprinter:
1725 """Print an arbitrarily nested list, indicating index numbers.
1752 """Print an arbitrarily nested list, indicating index numbers.
1726
1753
1727 An instance of this class called nlprint is available and callable as a
1754 An instance of this class called nlprint is available and callable as a
1728 function.
1755 function.
1729
1756
1730 nlprint(list,indent=' ',sep=': ') -> prints indenting each level by 'indent'
1757 nlprint(list,indent=' ',sep=': ') -> prints indenting each level by 'indent'
1731 and using 'sep' to separate the index from the value. """
1758 and using 'sep' to separate the index from the value. """
1732
1759
1733 def __init__(self):
1760 def __init__(self):
1734 self.depth = 0
1761 self.depth = 0
1735
1762
1736 def __call__(self,lst,pos='',**kw):
1763 def __call__(self,lst,pos='',**kw):
1737 """Prints the nested list numbering levels."""
1764 """Prints the nested list numbering levels."""
1738 kw.setdefault('indent',' ')
1765 kw.setdefault('indent',' ')
1739 kw.setdefault('sep',': ')
1766 kw.setdefault('sep',': ')
1740 kw.setdefault('start',0)
1767 kw.setdefault('start',0)
1741 kw.setdefault('stop',len(lst))
1768 kw.setdefault('stop',len(lst))
1742 # we need to remove start and stop from kw so they don't propagate
1769 # we need to remove start and stop from kw so they don't propagate
1743 # into a recursive call for a nested list.
1770 # into a recursive call for a nested list.
1744 start = kw['start']; del kw['start']
1771 start = kw['start']; del kw['start']
1745 stop = kw['stop']; del kw['stop']
1772 stop = kw['stop']; del kw['stop']
1746 if self.depth == 0 and 'header' in kw.keys():
1773 if self.depth == 0 and 'header' in kw.keys():
1747 print kw['header']
1774 print kw['header']
1748
1775
1749 for idx in range(start,stop):
1776 for idx in range(start,stop):
1750 elem = lst[idx]
1777 elem = lst[idx]
1751 if type(elem)==type([]):
1778 if type(elem)==type([]):
1752 self.depth += 1
1779 self.depth += 1
1753 self.__call__(elem,itpl('$pos$idx,'),**kw)
1780 self.__call__(elem,itpl('$pos$idx,'),**kw)
1754 self.depth -= 1
1781 self.depth -= 1
1755 else:
1782 else:
1756 printpl(kw['indent']*self.depth+'$pos$idx$kw["sep"]$elem')
1783 printpl(kw['indent']*self.depth+'$pos$idx$kw["sep"]$elem')
1757
1784
1758 nlprint = NLprinter()
1785 nlprint = NLprinter()
1759 #----------------------------------------------------------------------------
1786 #----------------------------------------------------------------------------
1760 def all_belong(candidates,checklist):
1787 def all_belong(candidates,checklist):
1761 """Check whether a list of items ALL appear in a given list of options.
1788 """Check whether a list of items ALL appear in a given list of options.
1762
1789
1763 Returns a single 1 or 0 value."""
1790 Returns a single 1 or 0 value."""
1764
1791
1765 return 1-(0 in [x in checklist for x in candidates])
1792 return 1-(0 in [x in checklist for x in candidates])
1766
1793
1767 #----------------------------------------------------------------------------
1794 #----------------------------------------------------------------------------
1768 def sort_compare(lst1,lst2,inplace = 1):
1795 def sort_compare(lst1,lst2,inplace = 1):
1769 """Sort and compare two lists.
1796 """Sort and compare two lists.
1770
1797
1771 By default it does it in place, thus modifying the lists. Use inplace = 0
1798 By default it does it in place, thus modifying the lists. Use inplace = 0
1772 to avoid that (at the cost of temporary copy creation)."""
1799 to avoid that (at the cost of temporary copy creation)."""
1773 if not inplace:
1800 if not inplace:
1774 lst1 = lst1[:]
1801 lst1 = lst1[:]
1775 lst2 = lst2[:]
1802 lst2 = lst2[:]
1776 lst1.sort(); lst2.sort()
1803 lst1.sort(); lst2.sort()
1777 return lst1 == lst2
1804 return lst1 == lst2
1778
1805
1779 #----------------------------------------------------------------------------
1806 #----------------------------------------------------------------------------
1780 def mkdict(**kwargs):
1807 def mkdict(**kwargs):
1781 """Return a dict from a keyword list.
1808 """Return a dict from a keyword list.
1782
1809
1783 It's just syntactic sugar for making ditcionary creation more convenient:
1810 It's just syntactic sugar for making ditcionary creation more convenient:
1784 # the standard way
1811 # the standard way
1785 >>>data = { 'red' : 1, 'green' : 2, 'blue' : 3 }
1812 >>>data = { 'red' : 1, 'green' : 2, 'blue' : 3 }
1786 # a cleaner way
1813 # a cleaner way
1787 >>>data = dict(red=1, green=2, blue=3)
1814 >>>data = dict(red=1, green=2, blue=3)
1788
1815
1789 If you need more than this, look at the Struct() class."""
1816 If you need more than this, look at the Struct() class."""
1790
1817
1791 return kwargs
1818 return kwargs
1792
1819
1793 #----------------------------------------------------------------------------
1820 #----------------------------------------------------------------------------
1794 def list2dict(lst):
1821 def list2dict(lst):
1795 """Takes a list of (key,value) pairs and turns it into a dict."""
1822 """Takes a list of (key,value) pairs and turns it into a dict."""
1796
1823
1797 dic = {}
1824 dic = {}
1798 for k,v in lst: dic[k] = v
1825 for k,v in lst: dic[k] = v
1799 return dic
1826 return dic
1800
1827
1801 #----------------------------------------------------------------------------
1828 #----------------------------------------------------------------------------
1802 def list2dict2(lst,default=''):
1829 def list2dict2(lst,default=''):
1803 """Takes a list and turns it into a dict.
1830 """Takes a list and turns it into a dict.
1804 Much slower than list2dict, but more versatile. This version can take
1831 Much slower than list2dict, but more versatile. This version can take
1805 lists with sublists of arbitrary length (including sclars)."""
1832 lists with sublists of arbitrary length (including sclars)."""
1806
1833
1807 dic = {}
1834 dic = {}
1808 for elem in lst:
1835 for elem in lst:
1809 if type(elem) in (types.ListType,types.TupleType):
1836 if type(elem) in (types.ListType,types.TupleType):
1810 size = len(elem)
1837 size = len(elem)
1811 if size == 0:
1838 if size == 0:
1812 pass
1839 pass
1813 elif size == 1:
1840 elif size == 1:
1814 dic[elem] = default
1841 dic[elem] = default
1815 else:
1842 else:
1816 k,v = elem[0], elem[1:]
1843 k,v = elem[0], elem[1:]
1817 if len(v) == 1: v = v[0]
1844 if len(v) == 1: v = v[0]
1818 dic[k] = v
1845 dic[k] = v
1819 else:
1846 else:
1820 dic[elem] = default
1847 dic[elem] = default
1821 return dic
1848 return dic
1822
1849
1823 #----------------------------------------------------------------------------
1850 #----------------------------------------------------------------------------
1824 def flatten(seq):
1851 def flatten(seq):
1825 """Flatten a list of lists (NOT recursive, only works for 2d lists)."""
1852 """Flatten a list of lists (NOT recursive, only works for 2d lists)."""
1826
1853
1827 return [x for subseq in seq for x in subseq]
1854 return [x for subseq in seq for x in subseq]
1828
1855
1829 #----------------------------------------------------------------------------
1856 #----------------------------------------------------------------------------
1830 def get_slice(seq,start=0,stop=None,step=1):
1857 def get_slice(seq,start=0,stop=None,step=1):
1831 """Get a slice of a sequence with variable step. Specify start,stop,step."""
1858 """Get a slice of a sequence with variable step. Specify start,stop,step."""
1832 if stop == None:
1859 if stop == None:
1833 stop = len(seq)
1860 stop = len(seq)
1834 item = lambda i: seq[i]
1861 item = lambda i: seq[i]
1835 return map(item,xrange(start,stop,step))
1862 return map(item,xrange(start,stop,step))
1836
1863
1837 #----------------------------------------------------------------------------
1864 #----------------------------------------------------------------------------
1838 def chop(seq,size):
1865 def chop(seq,size):
1839 """Chop a sequence into chunks of the given size."""
1866 """Chop a sequence into chunks of the given size."""
1840 chunk = lambda i: seq[i:i+size]
1867 chunk = lambda i: seq[i:i+size]
1841 return map(chunk,xrange(0,len(seq),size))
1868 return map(chunk,xrange(0,len(seq),size))
1842
1869
1843 #----------------------------------------------------------------------------
1870 #----------------------------------------------------------------------------
1844 # with is a keyword as of python 2.5, so this function is renamed to withobj
1871 # with is a keyword as of python 2.5, so this function is renamed to withobj
1845 # from its old 'with' name.
1872 # from its old 'with' name.
1846 def with_obj(object, **args):
1873 def with_obj(object, **args):
1847 """Set multiple attributes for an object, similar to Pascal's with.
1874 """Set multiple attributes for an object, similar to Pascal's with.
1848
1875
1849 Example:
1876 Example:
1850 with_obj(jim,
1877 with_obj(jim,
1851 born = 1960,
1878 born = 1960,
1852 haircolour = 'Brown',
1879 haircolour = 'Brown',
1853 eyecolour = 'Green')
1880 eyecolour = 'Green')
1854
1881
1855 Credit: Greg Ewing, in
1882 Credit: Greg Ewing, in
1856 http://mail.python.org/pipermail/python-list/2001-May/040703.html.
1883 http://mail.python.org/pipermail/python-list/2001-May/040703.html.
1857
1884
1858 NOTE: up until IPython 0.7.2, this was called simply 'with', but 'with'
1885 NOTE: up until IPython 0.7.2, this was called simply 'with', but 'with'
1859 has become a keyword for Python 2.5, so we had to rename it."""
1886 has become a keyword for Python 2.5, so we had to rename it."""
1860
1887
1861 object.__dict__.update(args)
1888 object.__dict__.update(args)
1862
1889
1863 #----------------------------------------------------------------------------
1890 #----------------------------------------------------------------------------
1864 def setattr_list(obj,alist,nspace = None):
1891 def setattr_list(obj,alist,nspace = None):
1865 """Set a list of attributes for an object taken from a namespace.
1892 """Set a list of attributes for an object taken from a namespace.
1866
1893
1867 setattr_list(obj,alist,nspace) -> sets in obj all the attributes listed in
1894 setattr_list(obj,alist,nspace) -> sets in obj all the attributes listed in
1868 alist with their values taken from nspace, which must be a dict (something
1895 alist with their values taken from nspace, which must be a dict (something
1869 like locals() will often do) If nspace isn't given, locals() of the
1896 like locals() will often do) If nspace isn't given, locals() of the
1870 *caller* is used, so in most cases you can omit it.
1897 *caller* is used, so in most cases you can omit it.
1871
1898
1872 Note that alist can be given as a string, which will be automatically
1899 Note that alist can be given as a string, which will be automatically
1873 split into a list on whitespace. If given as a list, it must be a list of
1900 split into a list on whitespace. If given as a list, it must be a list of
1874 *strings* (the variable names themselves), not of variables."""
1901 *strings* (the variable names themselves), not of variables."""
1875
1902
1876 # this grabs the local variables from the *previous* call frame -- that is
1903 # this grabs the local variables from the *previous* call frame -- that is
1877 # the locals from the function that called setattr_list().
1904 # the locals from the function that called setattr_list().
1878 # - snipped from weave.inline()
1905 # - snipped from weave.inline()
1879 if nspace is None:
1906 if nspace is None:
1880 call_frame = sys._getframe().f_back
1907 call_frame = sys._getframe().f_back
1881 nspace = call_frame.f_locals
1908 nspace = call_frame.f_locals
1882
1909
1883 if type(alist) in StringTypes:
1910 if type(alist) in StringTypes:
1884 alist = alist.split()
1911 alist = alist.split()
1885 for attr in alist:
1912 for attr in alist:
1886 val = eval(attr,nspace)
1913 val = eval(attr,nspace)
1887 setattr(obj,attr,val)
1914 setattr(obj,attr,val)
1888
1915
1889 #----------------------------------------------------------------------------
1916 #----------------------------------------------------------------------------
1890 def getattr_list(obj,alist,*args):
1917 def getattr_list(obj,alist,*args):
1891 """getattr_list(obj,alist[, default]) -> attribute list.
1918 """getattr_list(obj,alist[, default]) -> attribute list.
1892
1919
1893 Get a list of named attributes for an object. When a default argument is
1920 Get a list of named attributes for an object. When a default argument is
1894 given, it is returned when the attribute doesn't exist; without it, an
1921 given, it is returned when the attribute doesn't exist; without it, an
1895 exception is raised in that case.
1922 exception is raised in that case.
1896
1923
1897 Note that alist can be given as a string, which will be automatically
1924 Note that alist can be given as a string, which will be automatically
1898 split into a list on whitespace. If given as a list, it must be a list of
1925 split into a list on whitespace. If given as a list, it must be a list of
1899 *strings* (the variable names themselves), not of variables."""
1926 *strings* (the variable names themselves), not of variables."""
1900
1927
1901 if type(alist) in StringTypes:
1928 if type(alist) in StringTypes:
1902 alist = alist.split()
1929 alist = alist.split()
1903 if args:
1930 if args:
1904 if len(args)==1:
1931 if len(args)==1:
1905 default = args[0]
1932 default = args[0]
1906 return map(lambda attr: getattr(obj,attr,default),alist)
1933 return map(lambda attr: getattr(obj,attr,default),alist)
1907 else:
1934 else:
1908 raise ValueError,'getattr_list() takes only one optional argument'
1935 raise ValueError,'getattr_list() takes only one optional argument'
1909 else:
1936 else:
1910 return map(lambda attr: getattr(obj,attr),alist)
1937 return map(lambda attr: getattr(obj,attr),alist)
1911
1938
1912 #----------------------------------------------------------------------------
1939 #----------------------------------------------------------------------------
1913 def map_method(method,object_list,*argseq,**kw):
1940 def map_method(method,object_list,*argseq,**kw):
1914 """map_method(method,object_list,*args,**kw) -> list
1941 """map_method(method,object_list,*args,**kw) -> list
1915
1942
1916 Return a list of the results of applying the methods to the items of the
1943 Return a list of the results of applying the methods to the items of the
1917 argument sequence(s). If more than one sequence is given, the method is
1944 argument sequence(s). If more than one sequence is given, the method is
1918 called with an argument list consisting of the corresponding item of each
1945 called with an argument list consisting of the corresponding item of each
1919 sequence. All sequences must be of the same length.
1946 sequence. All sequences must be of the same length.
1920
1947
1921 Keyword arguments are passed verbatim to all objects called.
1948 Keyword arguments are passed verbatim to all objects called.
1922
1949
1923 This is Python code, so it's not nearly as fast as the builtin map()."""
1950 This is Python code, so it's not nearly as fast as the builtin map()."""
1924
1951
1925 out_list = []
1952 out_list = []
1926 idx = 0
1953 idx = 0
1927 for object in object_list:
1954 for object in object_list:
1928 try:
1955 try:
1929 handler = getattr(object, method)
1956 handler = getattr(object, method)
1930 except AttributeError:
1957 except AttributeError:
1931 out_list.append(None)
1958 out_list.append(None)
1932 else:
1959 else:
1933 if argseq:
1960 if argseq:
1934 args = map(lambda lst:lst[idx],argseq)
1961 args = map(lambda lst:lst[idx],argseq)
1935 #print 'ob',object,'hand',handler,'ar',args # dbg
1962 #print 'ob',object,'hand',handler,'ar',args # dbg
1936 out_list.append(handler(args,**kw))
1963 out_list.append(handler(args,**kw))
1937 else:
1964 else:
1938 out_list.append(handler(**kw))
1965 out_list.append(handler(**kw))
1939 idx += 1
1966 idx += 1
1940 return out_list
1967 return out_list
1941
1968
1942 #----------------------------------------------------------------------------
1969 #----------------------------------------------------------------------------
1943 def get_class_members(cls):
1970 def get_class_members(cls):
1944 ret = dir(cls)
1971 ret = dir(cls)
1945 if hasattr(cls,'__bases__'):
1972 if hasattr(cls,'__bases__'):
1946 for base in cls.__bases__:
1973 for base in cls.__bases__:
1947 ret.extend(get_class_members(base))
1974 ret.extend(get_class_members(base))
1948 return ret
1975 return ret
1949
1976
1950 #----------------------------------------------------------------------------
1977 #----------------------------------------------------------------------------
1951 def dir2(obj):
1978 def dir2(obj):
1952 """dir2(obj) -> list of strings
1979 """dir2(obj) -> list of strings
1953
1980
1954 Extended version of the Python builtin dir(), which does a few extra
1981 Extended version of the Python builtin dir(), which does a few extra
1955 checks, and supports common objects with unusual internals that confuse
1982 checks, and supports common objects with unusual internals that confuse
1956 dir(), such as Traits and PyCrust.
1983 dir(), such as Traits and PyCrust.
1957
1984
1958 This version is guaranteed to return only a list of true strings, whereas
1985 This version is guaranteed to return only a list of true strings, whereas
1959 dir() returns anything that objects inject into themselves, even if they
1986 dir() returns anything that objects inject into themselves, even if they
1960 are later not really valid for attribute access (many extension libraries
1987 are later not really valid for attribute access (many extension libraries
1961 have such bugs).
1988 have such bugs).
1962 """
1989 """
1963
1990
1964 # Start building the attribute list via dir(), and then complete it
1991 # Start building the attribute list via dir(), and then complete it
1965 # with a few extra special-purpose calls.
1992 # with a few extra special-purpose calls.
1966 words = dir(obj)
1993 words = dir(obj)
1967
1994
1968 if hasattr(obj,'__class__'):
1995 if hasattr(obj,'__class__'):
1969 words.append('__class__')
1996 words.append('__class__')
1970 words.extend(get_class_members(obj.__class__))
1997 words.extend(get_class_members(obj.__class__))
1971 #if '__base__' in words: 1/0
1998 #if '__base__' in words: 1/0
1972
1999
1973 # Some libraries (such as traits) may introduce duplicates, we want to
2000 # Some libraries (such as traits) may introduce duplicates, we want to
1974 # track and clean this up if it happens
2001 # track and clean this up if it happens
1975 may_have_dupes = False
2002 may_have_dupes = False
1976
2003
1977 # this is the 'dir' function for objects with Enthought's traits
2004 # this is the 'dir' function for objects with Enthought's traits
1978 if hasattr(obj, 'trait_names'):
2005 if hasattr(obj, 'trait_names'):
1979 try:
2006 try:
1980 words.extend(obj.trait_names())
2007 words.extend(obj.trait_names())
1981 may_have_dupes = True
2008 may_have_dupes = True
1982 except TypeError:
2009 except TypeError:
1983 # This will happen if `obj` is a class and not an instance.
2010 # This will happen if `obj` is a class and not an instance.
1984 pass
2011 pass
1985
2012
1986 # Support for PyCrust-style _getAttributeNames magic method.
2013 # Support for PyCrust-style _getAttributeNames magic method.
1987 if hasattr(obj, '_getAttributeNames'):
2014 if hasattr(obj, '_getAttributeNames'):
1988 try:
2015 try:
1989 words.extend(obj._getAttributeNames())
2016 words.extend(obj._getAttributeNames())
1990 may_have_dupes = True
2017 may_have_dupes = True
1991 except TypeError:
2018 except TypeError:
1992 # `obj` is a class and not an instance. Ignore
2019 # `obj` is a class and not an instance. Ignore
1993 # this error.
2020 # this error.
1994 pass
2021 pass
1995
2022
1996 if may_have_dupes:
2023 if may_have_dupes:
1997 # eliminate possible duplicates, as some traits may also
2024 # eliminate possible duplicates, as some traits may also
1998 # appear as normal attributes in the dir() call.
2025 # appear as normal attributes in the dir() call.
1999 words = list(set(words))
2026 words = list(set(words))
2000 words.sort()
2027 words.sort()
2001
2028
2002 # filter out non-string attributes which may be stuffed by dir() calls
2029 # filter out non-string attributes which may be stuffed by dir() calls
2003 # and poor coding in third-party modules
2030 # and poor coding in third-party modules
2004 return [w for w in words if isinstance(w, basestring)]
2031 return [w for w in words if isinstance(w, basestring)]
2005
2032
2006 #----------------------------------------------------------------------------
2033 #----------------------------------------------------------------------------
2007 def import_fail_info(mod_name,fns=None):
2034 def import_fail_info(mod_name,fns=None):
2008 """Inform load failure for a module."""
2035 """Inform load failure for a module."""
2009
2036
2010 if fns == None:
2037 if fns == None:
2011 warn("Loading of %s failed.\n" % (mod_name,))
2038 warn("Loading of %s failed.\n" % (mod_name,))
2012 else:
2039 else:
2013 warn("Loading of %s from %s failed.\n" % (fns,mod_name))
2040 warn("Loading of %s from %s failed.\n" % (fns,mod_name))
2014
2041
2015 #----------------------------------------------------------------------------
2042 #----------------------------------------------------------------------------
2016 # Proposed popitem() extension, written as a method
2043 # Proposed popitem() extension, written as a method
2017
2044
2018
2045
2019 class NotGiven: pass
2046 class NotGiven: pass
2020
2047
2021 def popkey(dct,key,default=NotGiven):
2048 def popkey(dct,key,default=NotGiven):
2022 """Return dct[key] and delete dct[key].
2049 """Return dct[key] and delete dct[key].
2023
2050
2024 If default is given, return it if dct[key] doesn't exist, otherwise raise
2051 If default is given, return it if dct[key] doesn't exist, otherwise raise
2025 KeyError. """
2052 KeyError. """
2026
2053
2027 try:
2054 try:
2028 val = dct[key]
2055 val = dct[key]
2029 except KeyError:
2056 except KeyError:
2030 if default is NotGiven:
2057 if default is NotGiven:
2031 raise
2058 raise
2032 else:
2059 else:
2033 return default
2060 return default
2034 else:
2061 else:
2035 del dct[key]
2062 del dct[key]
2036 return val
2063 return val
2037
2064
2038 def wrap_deprecated(func, suggest = '<nothing>'):
2065 def wrap_deprecated(func, suggest = '<nothing>'):
2039 def newFunc(*args, **kwargs):
2066 def newFunc(*args, **kwargs):
2040 warnings.warn("Call to deprecated function %s, use %s instead" %
2067 warnings.warn("Call to deprecated function %s, use %s instead" %
2041 ( func.__name__, suggest),
2068 ( func.__name__, suggest),
2042 category=DeprecationWarning,
2069 category=DeprecationWarning,
2043 stacklevel = 2)
2070 stacklevel = 2)
2044 return func(*args, **kwargs)
2071 return func(*args, **kwargs)
2045 return newFunc
2072 return newFunc
2046
2073
2047
2074
2048 def _num_cpus_unix():
2075 def _num_cpus_unix():
2049 """Return the number of active CPUs on a Unix system."""
2076 """Return the number of active CPUs on a Unix system."""
2050 return os.sysconf("SC_NPROCESSORS_ONLN")
2077 return os.sysconf("SC_NPROCESSORS_ONLN")
2051
2078
2052
2079
2053 def _num_cpus_darwin():
2080 def _num_cpus_darwin():
2054 """Return the number of active CPUs on a Darwin system."""
2081 """Return the number of active CPUs on a Darwin system."""
2055 p = subprocess.Popen(['sysctl','-n','hw.ncpu'],stdout=subprocess.PIPE)
2082 p = subprocess.Popen(['sysctl','-n','hw.ncpu'],stdout=subprocess.PIPE)
2056 return p.stdout.read()
2083 return p.stdout.read()
2057
2084
2058
2085
2059 def _num_cpus_windows():
2086 def _num_cpus_windows():
2060 """Return the number of active CPUs on a Windows system."""
2087 """Return the number of active CPUs on a Windows system."""
2061 return os.environ.get("NUMBER_OF_PROCESSORS")
2088 return os.environ.get("NUMBER_OF_PROCESSORS")
2062
2089
2063
2090
2064 def num_cpus():
2091 def num_cpus():
2065 """Return the effective number of CPUs in the system as an integer.
2092 """Return the effective number of CPUs in the system as an integer.
2066
2093
2067 This cross-platform function makes an attempt at finding the total number of
2094 This cross-platform function makes an attempt at finding the total number of
2068 available CPUs in the system, as returned by various underlying system and
2095 available CPUs in the system, as returned by various underlying system and
2069 python calls.
2096 python calls.
2070
2097
2071 If it can't find a sensible answer, it returns 1 (though an error *may* make
2098 If it can't find a sensible answer, it returns 1 (though an error *may* make
2072 it return a large positive number that's actually incorrect).
2099 it return a large positive number that's actually incorrect).
2073 """
2100 """
2074
2101
2075 # Many thanks to the Parallel Python project (http://www.parallelpython.com)
2102 # Many thanks to the Parallel Python project (http://www.parallelpython.com)
2076 # for the names of the keys we needed to look up for this function. This
2103 # for the names of the keys we needed to look up for this function. This
2077 # code was inspired by their equivalent function.
2104 # code was inspired by their equivalent function.
2078
2105
2079 ncpufuncs = {'Linux':_num_cpus_unix,
2106 ncpufuncs = {'Linux':_num_cpus_unix,
2080 'Darwin':_num_cpus_darwin,
2107 'Darwin':_num_cpus_darwin,
2081 'Windows':_num_cpus_windows,
2108 'Windows':_num_cpus_windows,
2082 # On Vista, python < 2.5.2 has a bug and returns 'Microsoft'
2109 # On Vista, python < 2.5.2 has a bug and returns 'Microsoft'
2083 # See http://bugs.python.org/issue1082 for details.
2110 # See http://bugs.python.org/issue1082 for details.
2084 'Microsoft':_num_cpus_windows,
2111 'Microsoft':_num_cpus_windows,
2085 }
2112 }
2086
2113
2087 ncpufunc = ncpufuncs.get(platform.system(),
2114 ncpufunc = ncpufuncs.get(platform.system(),
2088 # default to unix version (Solaris, AIX, etc)
2115 # default to unix version (Solaris, AIX, etc)
2089 _num_cpus_unix)
2116 _num_cpus_unix)
2090
2117
2091 try:
2118 try:
2092 ncpus = max(1,int(ncpufunc()))
2119 ncpus = max(1,int(ncpufunc()))
2093 except:
2120 except:
2094 ncpus = 1
2121 ncpus = 1
2095 return ncpus
2122 return ncpus
2096
2123
2097 #*************************** end of file <genutils.py> **********************
2124 #*************************** end of file <genutils.py> **********************
@@ -1,41 +1,41 b''
1 # encoding: utf-8
1 # encoding: utf-8
2
2
3 """Asynchronous clients for the IPython controller.
3 """Asynchronous clients for the IPython controller.
4
4
5 This module has clients for using the various interfaces of the controller
5 This module has clients for using the various interfaces of the controller
6 in a fully asynchronous manner. This means that you will need to run the
6 in a fully asynchronous manner. This means that you will need to run the
7 Twisted reactor yourself and that all methods of the client classes return
7 Twisted reactor yourself and that all methods of the client classes return
8 deferreds to the result.
8 deferreds to the result.
9
9
10 The main methods are are `get_*_client` and `get_client`.
10 The main methods are are `get_*_client` and `get_client`.
11 """
11 """
12
12
13 __docformat__ = "restructuredtext en"
13 __docformat__ = "restructuredtext en"
14
14
15 #-------------------------------------------------------------------------------
15 #-------------------------------------------------------------------------------
16 # Copyright (C) 2008 The IPython Development Team
16 # Copyright (C) 2008 The IPython Development Team
17 #
17 #
18 # Distributed under the terms of the BSD License. The full license is in
18 # Distributed under the terms of the BSD License. The full license is in
19 # the file COPYING, distributed as part of this software.
19 # the file COPYING, distributed as part of this software.
20 #-------------------------------------------------------------------------------
20 #-------------------------------------------------------------------------------
21
21
22 #-------------------------------------------------------------------------------
22 #-------------------------------------------------------------------------------
23 # Imports
23 # Imports
24 #-------------------------------------------------------------------------------
24 #-------------------------------------------------------------------------------
25
25
26 from IPython.kernel import codeutil
26 from IPython.kernel import codeutil
27 from IPython.kernel.clientconnector import ClientConnector
27 from IPython.kernel.clientconnector import ClientConnector
28
28
29 # Other things that the user will need
29 # Other things that the user will need
30 from IPython.kernel.task import Task
30 from IPython.kernel.task import MapTask, StringTask
31 from IPython.kernel.error import CompositeError
31 from IPython.kernel.error import CompositeError
32
32
33 #-------------------------------------------------------------------------------
33 #-------------------------------------------------------------------------------
34 # Code
34 # Code
35 #-------------------------------------------------------------------------------
35 #-------------------------------------------------------------------------------
36
36
37 _client_tub = ClientConnector()
37 _client_tub = ClientConnector()
38 get_multiengine_client = _client_tub.get_multiengine_client
38 get_multiengine_client = _client_tub.get_multiengine_client
39 get_task_client = _client_tub.get_task_client
39 get_task_client = _client_tub.get_task_client
40 get_client = _client_tub.get_client
40 get_client = _client_tub.get_client
41
41
@@ -1,96 +1,96 b''
1 # encoding: utf-8
1 # encoding: utf-8
2
2
3 """This module contains blocking clients for the controller interfaces.
3 """This module contains blocking clients for the controller interfaces.
4
4
5 Unlike the clients in `asyncclient.py`, the clients in this module are fully
5 Unlike the clients in `asyncclient.py`, the clients in this module are fully
6 blocking. This means that methods on the clients return the actual results
6 blocking. This means that methods on the clients return the actual results
7 rather than a deferred to the result. Also, we manage the Twisted reactor
7 rather than a deferred to the result. Also, we manage the Twisted reactor
8 for you. This is done by running the reactor in a thread.
8 for you. This is done by running the reactor in a thread.
9
9
10 The main classes in this module are:
10 The main classes in this module are:
11
11
12 * MultiEngineClient
12 * MultiEngineClient
13 * TaskClient
13 * TaskClient
14 * Task
14 * Task
15 * CompositeError
15 * CompositeError
16 """
16 """
17
17
18 __docformat__ = "restructuredtext en"
18 __docformat__ = "restructuredtext en"
19
19
20 #-------------------------------------------------------------------------------
20 #-------------------------------------------------------------------------------
21 # Copyright (C) 2008 The IPython Development Team
21 # Copyright (C) 2008 The IPython Development Team
22 #
22 #
23 # Distributed under the terms of the BSD License. The full license is in
23 # Distributed under the terms of the BSD License. The full license is in
24 # the file COPYING, distributed as part of this software.
24 # the file COPYING, distributed as part of this software.
25 #-------------------------------------------------------------------------------
25 #-------------------------------------------------------------------------------
26
26
27 #-------------------------------------------------------------------------------
27 #-------------------------------------------------------------------------------
28 # Imports
28 # Imports
29 #-------------------------------------------------------------------------------
29 #-------------------------------------------------------------------------------
30
30
31 import sys
31 import sys
32
32
33 # from IPython.tools import growl
33 # from IPython.tools import growl
34 # growl.start("IPython1 Client")
34 # growl.start("IPython1 Client")
35
35
36
36
37 from twisted.internet import reactor
37 from twisted.internet import reactor
38 from IPython.kernel.clientconnector import ClientConnector
38 from IPython.kernel.clientconnector import ClientConnector
39 from IPython.kernel.twistedutil import ReactorInThread
39 from IPython.kernel.twistedutil import ReactorInThread
40 from IPython.kernel.twistedutil import blockingCallFromThread
40 from IPython.kernel.twistedutil import blockingCallFromThread
41
41
42 # These enable various things
42 # These enable various things
43 from IPython.kernel import codeutil
43 from IPython.kernel import codeutil
44 import IPython.kernel.magic
44 import IPython.kernel.magic
45
45
46 # Other things that the user will need
46 # Other things that the user will need
47 from IPython.kernel.task import Task
47 from IPython.kernel.task import MapTask, StringTask
48 from IPython.kernel.error import CompositeError
48 from IPython.kernel.error import CompositeError
49
49
50 #-------------------------------------------------------------------------------
50 #-------------------------------------------------------------------------------
51 # Code
51 # Code
52 #-------------------------------------------------------------------------------
52 #-------------------------------------------------------------------------------
53
53
54 _client_tub = ClientConnector()
54 _client_tub = ClientConnector()
55
55
56
56
57 def get_multiengine_client(furl_or_file=''):
57 def get_multiengine_client(furl_or_file=''):
58 """Get the blocking MultiEngine client.
58 """Get the blocking MultiEngine client.
59
59
60 :Parameters:
60 :Parameters:
61 furl_or_file : str
61 furl_or_file : str
62 A furl or a filename containing a furl. If empty, the
62 A furl or a filename containing a furl. If empty, the
63 default furl_file will be used
63 default furl_file will be used
64
64
65 :Returns:
65 :Returns:
66 The connected MultiEngineClient instance
66 The connected MultiEngineClient instance
67 """
67 """
68 client = blockingCallFromThread(_client_tub.get_multiengine_client,
68 client = blockingCallFromThread(_client_tub.get_multiengine_client,
69 furl_or_file)
69 furl_or_file)
70 return client.adapt_to_blocking_client()
70 return client.adapt_to_blocking_client()
71
71
72 def get_task_client(furl_or_file=''):
72 def get_task_client(furl_or_file=''):
73 """Get the blocking Task client.
73 """Get the blocking Task client.
74
74
75 :Parameters:
75 :Parameters:
76 furl_or_file : str
76 furl_or_file : str
77 A furl or a filename containing a furl. If empty, the
77 A furl or a filename containing a furl. If empty, the
78 default furl_file will be used
78 default furl_file will be used
79
79
80 :Returns:
80 :Returns:
81 The connected TaskClient instance
81 The connected TaskClient instance
82 """
82 """
83 client = blockingCallFromThread(_client_tub.get_task_client,
83 client = blockingCallFromThread(_client_tub.get_task_client,
84 furl_or_file)
84 furl_or_file)
85 return client.adapt_to_blocking_client()
85 return client.adapt_to_blocking_client()
86
86
87
87
88 MultiEngineClient = get_multiengine_client
88 MultiEngineClient = get_multiengine_client
89 TaskClient = get_task_client
89 TaskClient = get_task_client
90
90
91
91
92
92
93 # Now we start the reactor in a thread
93 # Now we start the reactor in a thread
94 rit = ReactorInThread()
94 rit = ReactorInThread()
95 rit.setDaemon(True)
95 rit.setDaemon(True)
96 rit.start() No newline at end of file
96 rit.start()
@@ -1,178 +1,180 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 # -*- test-case-name: IPython.kernel.test.test_contexts -*-
2 # -*- test-case-name: IPython.kernel.test.test_contexts -*-
3 """Context managers for IPython.
3 """Context managers for IPython.
4
4
5 Python 2.5 introduced the `with` statement, which is based on the context
5 Python 2.5 introduced the `with` statement, which is based on the context
6 manager protocol. This module offers a few context managers for common cases,
6 manager protocol. This module offers a few context managers for common cases,
7 which can also be useful as templates for writing new, application-specific
7 which can also be useful as templates for writing new, application-specific
8 managers.
8 managers.
9 """
9 """
10
10
11 from __future__ import with_statement
11 from __future__ import with_statement
12
12
13 __docformat__ = "restructuredtext en"
13 __docformat__ = "restructuredtext en"
14
14
15 #-------------------------------------------------------------------------------
15 #-------------------------------------------------------------------------------
16 # Copyright (C) 2008 The IPython Development Team
16 # Copyright (C) 2008 The IPython Development Team
17 #
17 #
18 # Distributed under the terms of the BSD License. The full license is in
18 # Distributed under the terms of the BSD License. The full license is in
19 # the file COPYING, distributed as part of this software.
19 # the file COPYING, distributed as part of this software.
20 #-------------------------------------------------------------------------------
20 #-------------------------------------------------------------------------------
21
21
22 #-------------------------------------------------------------------------------
22 #-------------------------------------------------------------------------------
23 # Imports
23 # Imports
24 #-------------------------------------------------------------------------------
24 #-------------------------------------------------------------------------------
25
25
26 import linecache
26 import linecache
27 import sys
27 import sys
28
28
29 from twisted.internet.error import ConnectionRefusedError
29 from twisted.internet.error import ConnectionRefusedError
30
30
31 from IPython.ultraTB import _fixed_getinnerframes, findsource
31 from IPython.ultraTB import _fixed_getinnerframes, findsource
32 from IPython import ipapi
32 from IPython import ipapi
33
33
34 from IPython.kernel import error
34 from IPython.kernel import error
35
35
36 #---------------------------------------------------------------------------
36 #---------------------------------------------------------------------------
37 # Utility functions needed by all context managers.
37 # Utility functions needed by all context managers.
38 #---------------------------------------------------------------------------
38 #---------------------------------------------------------------------------
39
39
40 def remote():
40 def remote():
41 """Raises a special exception meant to be caught by context managers.
41 """Raises a special exception meant to be caught by context managers.
42 """
42 """
43 m = 'Special exception to stop local execution of parallel code.'
43 m = 'Special exception to stop local execution of parallel code.'
44 raise error.StopLocalExecution(m)
44 raise error.StopLocalExecution(m)
45
45
46
46
47 def strip_whitespace(source,require_remote=True):
47 def strip_whitespace(source,require_remote=True):
48 """strip leading whitespace from input source.
48 """strip leading whitespace from input source.
49
49
50 :Parameters:
50 :Parameters:
51
51
52 """
52 """
53 remote_mark = 'remote()'
53 remote_mark = 'remote()'
54 # Expand tabs to avoid any confusion.
54 # Expand tabs to avoid any confusion.
55 wsource = [l.expandtabs(4) for l in source]
55 wsource = [l.expandtabs(4) for l in source]
56 # Detect the indentation level
56 # Detect the indentation level
57 done = False
57 done = False
58 for line in wsource:
58 for line in wsource:
59 if line.isspace():
59 if line.isspace():
60 continue
60 continue
61 for col,char in enumerate(line):
61 for col,char in enumerate(line):
62 if char != ' ':
62 if char != ' ':
63 done = True
63 done = True
64 break
64 break
65 if done:
65 if done:
66 break
66 break
67 # Now we know how much leading space there is in the code. Next, we
67 # Now we know how much leading space there is in the code. Next, we
68 # extract up to the first line that has less indentation.
68 # extract up to the first line that has less indentation.
69 # WARNINGS: we skip comments that may be misindented, but we do NOT yet
69 # WARNINGS: we skip comments that may be misindented, but we do NOT yet
70 # detect triple quoted strings that may have flush left text.
70 # detect triple quoted strings that may have flush left text.
71 for lno,line in enumerate(wsource):
71 for lno,line in enumerate(wsource):
72 lead = line[:col]
72 lead = line[:col]
73 if lead.isspace():
73 if lead.isspace():
74 continue
74 continue
75 else:
75 else:
76 if not lead.lstrip().startswith('#'):
76 if not lead.lstrip().startswith('#'):
77 break
77 break
78 # The real 'with' source is up to lno
78 # The real 'with' source is up to lno
79 src_lines = [l[col:] for l in wsource[:lno+1]]
79 src_lines = [l[col:] for l in wsource[:lno+1]]
80
80
81 # Finally, check that the source's first non-comment line begins with the
81 # Finally, check that the source's first non-comment line begins with the
82 # special call 'remote()'
82 # special call 'remote()'
83 if require_remote:
83 if require_remote:
84 for nline,line in enumerate(src_lines):
84 for nline,line in enumerate(src_lines):
85 if line.isspace() or line.startswith('#'):
85 if line.isspace() or line.startswith('#'):
86 continue
86 continue
87 if line.startswith(remote_mark):
87 if line.startswith(remote_mark):
88 break
88 break
89 else:
89 else:
90 raise ValueError('%s call missing at the start of code' %
90 raise ValueError('%s call missing at the start of code' %
91 remote_mark)
91 remote_mark)
92 out_lines = src_lines[nline+1:]
92 out_lines = src_lines[nline+1:]
93 else:
93 else:
94 # If the user specified that the remote() call wasn't mandatory
94 # If the user specified that the remote() call wasn't mandatory
95 out_lines = src_lines
95 out_lines = src_lines
96
96
97 # src = ''.join(out_lines) # dbg
97 # src = ''.join(out_lines) # dbg
98 #print 'SRC:\n<<<<<<<>>>>>>>\n%s<<<<<>>>>>>' % src # dbg
98 #print 'SRC:\n<<<<<<<>>>>>>>\n%s<<<<<>>>>>>' % src # dbg
99 return ''.join(out_lines)
99 return ''.join(out_lines)
100
100
101 class RemoteContextBase(object):
101 class RemoteContextBase(object):
102 def __init__(self):
102 def __init__(self):
103 self.ip = ipapi.get()
103 self.ip = ipapi.get()
104
104
105 def _findsource_file(self,f):
105 def _findsource_file(self,f):
106 linecache.checkcache()
106 linecache.checkcache()
107 s = findsource(f.f_code)
107 s = findsource(f.f_code)
108 lnum = f.f_lineno
108 lnum = f.f_lineno
109 wsource = s[0][f.f_lineno:]
109 wsource = s[0][f.f_lineno:]
110 return strip_whitespace(wsource)
110 return strip_whitespace(wsource)
111
111
112 def _findsource_ipython(self,f):
112 def _findsource_ipython(self,f):
113 from IPython import ipapi
113 from IPython import ipapi
114 self.ip = ipapi.get()
114 self.ip = ipapi.get()
115 buf = self.ip.IP.input_hist_raw[-1].splitlines()[1:]
115 buf = self.ip.IP.input_hist_raw[-1].splitlines()[1:]
116 wsource = [l+'\n' for l in buf ]
116 wsource = [l+'\n' for l in buf ]
117
117
118 return strip_whitespace(wsource)
118 return strip_whitespace(wsource)
119
119
120 def findsource(self,frame):
120 def findsource(self,frame):
121 local_ns = frame.f_locals
121 local_ns = frame.f_locals
122 global_ns = frame.f_globals
122 global_ns = frame.f_globals
123 if frame.f_code.co_filename == '<ipython console>':
123 if frame.f_code.co_filename == '<ipython console>':
124 src = self._findsource_ipython(frame)
124 src = self._findsource_ipython(frame)
125 else:
125 else:
126 src = self._findsource_file(frame)
126 src = self._findsource_file(frame)
127 return src
127 return src
128
128
129 def __enter__(self):
129 def __enter__(self):
130 raise NotImplementedError
130 raise NotImplementedError
131
131
132 def __exit__ (self, etype, value, tb):
132 def __exit__ (self, etype, value, tb):
133 if issubclass(etype,error.StopLocalExecution):
133 if issubclass(etype,error.StopLocalExecution):
134 return True
134 return True
135
135
136 class RemoteMultiEngine(RemoteContextBase):
136 class RemoteMultiEngine(RemoteContextBase):
137 def __init__(self,mec):
137 def __init__(self,mec):
138 self.mec = mec
138 self.mec = mec
139 RemoteContextBase.__init__(self)
139 RemoteContextBase.__init__(self)
140
140
141 def __enter__(self):
141 def __enter__(self):
142 src = self.findsource(sys._getframe(1))
142 src = self.findsource(sys._getframe(1))
143 return self.mec.execute(src)
143 return self.mec.execute(src)
144
144
145
145
146 # XXX - Temporary hackish testing, we'll move this into proper tests right
146 # XXX - Temporary hackish testing, we'll move this into proper tests right
147 # away
147 # away. This has been commented out as it doesn't run under Python 2.4
148
148 # because of the usage of the with statement below. We need to protect
149 if __name__ == '__main__':
149 # such things with a try:except.
150
150
151 # XXX - for now, we need a running cluster to be started separately. The
151 # if __name__ == '__main__':
152 # daemon work is almost finished, and will make much of this unnecessary.
152 #
153 from IPython.kernel import client
153 # # XXX - for now, we need a running cluster to be started separately. The
154 mec = client.MultiEngineClient(('127.0.0.1',10105))
154 # # daemon work is almost finished, and will make much of this unnecessary.
155
155 # from IPython.kernel import client
156 try:
156 # mec = client.MultiEngineClient(('127.0.0.1',10105))
157 mec.get_ids()
157 #
158 except ConnectionRefusedError:
158 # try:
159 import os, time
159 # mec.get_ids()
160 os.system('ipcluster -n 2 &')
160 # except ConnectionRefusedError:
161 time.sleep(2)
161 # import os, time
162 mec = client.MultiEngineClient(('127.0.0.1',10105))
162 # os.system('ipcluster -n 2 &')
163
163 # time.sleep(2)
164 mec.block = False
164 # mec = client.MultiEngineClient(('127.0.0.1',10105))
165
165 #
166 import itertools
166 # mec.block = False
167 c = itertools.count()
167 #
168
168 # import itertools
169 parallel = RemoteMultiEngine(mec)
169 # c = itertools.count()
170
170 #
171 with parallel as pr:
171 # parallel = RemoteMultiEngine(mec)
172 # A comment
172 #
173 remote() # this means the code below only runs remotely
173 # with parallel as pr:
174 print 'Hello remote world'
174 # # A comment
175 x = 3.14
175 # remote() # this means the code below only runs remotely
176 # Comments are OK
176 # print 'Hello remote world'
177 # Even misindented.
177 # x = 3.14
178 y = x+1
178 # # Comments are OK
179 # # Even misindented.
180 # y = x+1
@@ -1,171 +1,171 b''
1 # encoding: utf-8
1 # encoding: utf-8
2
2
3 """Magic command interface for interactive parallel work."""
3 """Magic command interface for interactive parallel work."""
4
4
5 __docformat__ = "restructuredtext en"
5 __docformat__ = "restructuredtext en"
6
6
7 #-------------------------------------------------------------------------------
7 #-------------------------------------------------------------------------------
8 # Copyright (C) 2008 The IPython Development Team
8 # Copyright (C) 2008 The IPython Development Team
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #-------------------------------------------------------------------------------
12 #-------------------------------------------------------------------------------
13
13
14 #-------------------------------------------------------------------------------
14 #-------------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #-------------------------------------------------------------------------------
16 #-------------------------------------------------------------------------------
17
17
18 import new
18 import new
19
19
20 from IPython.iplib import InteractiveShell
20 from IPython.iplib import InteractiveShell
21 from IPython.Shell import MTInteractiveShell
21 from IPython.Shell import MTInteractiveShell
22
22
23 from twisted.internet.defer import Deferred
23 from twisted.internet.defer import Deferred
24
24
25
25
26 #-------------------------------------------------------------------------------
26 #-------------------------------------------------------------------------------
27 # Definitions of magic functions for use with IPython
27 # Definitions of magic functions for use with IPython
28 #-------------------------------------------------------------------------------
28 #-------------------------------------------------------------------------------
29
29
30 NO_ACTIVE_CONTROLLER = """
30 NO_ACTIVE_CONTROLLER = """
31 Error: No Controller is activated
31 Error: No Controller is activated
32 Use activate() on a RemoteController object to activate it for magics.
32 Use activate() on a RemoteController object to activate it for magics.
33 """
33 """
34
34
35 def magic_result(self,parameter_s=''):
35 def magic_result(self,parameter_s=''):
36 """Print the result of command i on all engines of the active controller.
36 """Print the result of command i on all engines of the active controller.
37
37
38 To activate a controller in IPython, first create it and then call
38 To activate a controller in IPython, first create it and then call
39 the activate() method.
39 the activate() method.
40
40
41 Then you can do the following:
41 Then you can do the following:
42
42
43 >>> result # Print the latest result
43 >>> result # Print the latest result
44 Printing result...
44 Printing result...
45 [127.0.0.1:0] In [1]: b = 10
45 [127.0.0.1:0] In [1]: b = 10
46 [127.0.0.1:1] In [1]: b = 10
46 [127.0.0.1:1] In [1]: b = 10
47
47
48 >>> result 0 # Print result 0
48 >>> result 0 # Print result 0
49 In [14]: result 0
49 In [14]: result 0
50 Printing result...
50 Printing result...
51 [127.0.0.1:0] In [0]: a = 5
51 [127.0.0.1:0] In [0]: a = 5
52 [127.0.0.1:1] In [0]: a = 5
52 [127.0.0.1:1] In [0]: a = 5
53 """
53 """
54 try:
54 try:
55 activeController = __IPYTHON__.activeController
55 activeController = __IPYTHON__.activeController
56 except AttributeError:
56 except AttributeError:
57 print NO_ACTIVE_CONTROLLER
57 print NO_ACTIVE_CONTROLLER
58 else:
58 else:
59 try:
59 try:
60 index = int(parameter_s)
60 index = int(parameter_s)
61 except:
61 except:
62 index = None
62 index = None
63 result = activeController.get_result(index)
63 result = activeController.get_result(index)
64 return result
64 return result
65
65
66 def magic_px(self,parameter_s=''):
66 def magic_px(self,parameter_s=''):
67 """Executes the given python command on the active IPython Controller.
67 """Executes the given python command on the active IPython Controller.
68
68
69 To activate a Controller in IPython, first create it and then call
69 To activate a Controller in IPython, first create it and then call
70 the activate() method.
70 the activate() method.
71
71
72 Then you can do the following:
72 Then you can do the following:
73
73
74 >>> %px a = 5 # Runs a = 5 on all nodes
74 >>> %px a = 5 # Runs a = 5 on all nodes
75 """
75 """
76
76
77 try:
77 try:
78 activeController = __IPYTHON__.activeController
78 activeController = __IPYTHON__.activeController
79 except AttributeError:
79 except AttributeError:
80 print NO_ACTIVE_CONTROLLER
80 print NO_ACTIVE_CONTROLLER
81 else:
81 else:
82 print "Executing command on Controller"
82 print "Parallel execution on engines: %s" % activeController.targets
83 result = activeController.execute(parameter_s)
83 result = activeController.execute(parameter_s)
84 return result
84 return result
85
85
86 def pxrunsource(self, source, filename="<input>", symbol="single"):
86 def pxrunsource(self, source, filename="<input>", symbol="single"):
87
87
88 try:
88 try:
89 code = self.compile(source, filename, symbol)
89 code = self.compile(source, filename, symbol)
90 except (OverflowError, SyntaxError, ValueError):
90 except (OverflowError, SyntaxError, ValueError):
91 # Case 1
91 # Case 1
92 self.showsyntaxerror(filename)
92 self.showsyntaxerror(filename)
93 return None
93 return None
94
94
95 if code is None:
95 if code is None:
96 # Case 2
96 # Case 2
97 return True
97 return True
98
98
99 # Case 3
99 # Case 3
100 # Because autopx is enabled, we now call executeAll or disable autopx if
100 # Because autopx is enabled, we now call executeAll or disable autopx if
101 # %autopx or autopx has been called
101 # %autopx or autopx has been called
102 if '_ip.magic("%autopx' in source or '_ip.magic("autopx' in source:
102 if '_ip.magic("%autopx' in source or '_ip.magic("autopx' in source:
103 _disable_autopx(self)
103 _disable_autopx(self)
104 return False
104 return False
105 else:
105 else:
106 try:
106 try:
107 result = self.activeController.execute(source)
107 result = self.activeController.execute(source)
108 except:
108 except:
109 self.showtraceback()
109 self.showtraceback()
110 else:
110 else:
111 print result.__repr__()
111 print result.__repr__()
112 return False
112 return False
113
113
114 def magic_autopx(self, parameter_s=''):
114 def magic_autopx(self, parameter_s=''):
115 """Toggles auto parallel mode for the active IPython Controller.
115 """Toggles auto parallel mode for the active IPython Controller.
116
116
117 To activate a Controller in IPython, first create it and then call
117 To activate a Controller in IPython, first create it and then call
118 the activate() method.
118 the activate() method.
119
119
120 Then you can do the following:
120 Then you can do the following:
121
121
122 >>> %autopx # Now all commands are executed in parallel
122 >>> %autopx # Now all commands are executed in parallel
123 Auto Parallel Enabled
123 Auto Parallel Enabled
124 Type %autopx to disable
124 Type %autopx to disable
125 ...
125 ...
126 >>> %autopx # Now all commands are locally executed
126 >>> %autopx # Now all commands are locally executed
127 Auto Parallel Disabled
127 Auto Parallel Disabled
128 """
128 """
129
129
130 if hasattr(self, 'autopx'):
130 if hasattr(self, 'autopx'):
131 if self.autopx == True:
131 if self.autopx == True:
132 _disable_autopx(self)
132 _disable_autopx(self)
133 else:
133 else:
134 _enable_autopx(self)
134 _enable_autopx(self)
135 else:
135 else:
136 _enable_autopx(self)
136 _enable_autopx(self)
137
137
138 def _enable_autopx(self):
138 def _enable_autopx(self):
139 """Enable %autopx mode by saving the original runsource and installing
139 """Enable %autopx mode by saving the original runsource and installing
140 pxrunsource.
140 pxrunsource.
141 """
141 """
142 try:
142 try:
143 activeController = __IPYTHON__.activeController
143 activeController = __IPYTHON__.activeController
144 except AttributeError:
144 except AttributeError:
145 print "No active RemoteController found, use RemoteController.activate()."
145 print "No active RemoteController found, use RemoteController.activate()."
146 else:
146 else:
147 self._original_runsource = self.runsource
147 self._original_runsource = self.runsource
148 self.runsource = new.instancemethod(pxrunsource, self, self.__class__)
148 self.runsource = new.instancemethod(pxrunsource, self, self.__class__)
149 self.autopx = True
149 self.autopx = True
150 print "Auto Parallel Enabled\nType %autopx to disable"
150 print "Auto Parallel Enabled\nType %autopx to disable"
151
151
152 def _disable_autopx(self):
152 def _disable_autopx(self):
153 """Disable %autopx by restoring the original runsource."""
153 """Disable %autopx by restoring the original runsource."""
154 if hasattr(self, 'autopx'):
154 if hasattr(self, 'autopx'):
155 if self.autopx == True:
155 if self.autopx == True:
156 self.runsource = self._original_runsource
156 self.runsource = self._original_runsource
157 self.autopx = False
157 self.autopx = False
158 print "Auto Parallel Disabled"
158 print "Auto Parallel Disabled"
159
159
160 # Add the new magic function to the class dict:
160 # Add the new magic function to the class dict:
161
161
162 InteractiveShell.magic_result = magic_result
162 InteractiveShell.magic_result = magic_result
163 InteractiveShell.magic_px = magic_px
163 InteractiveShell.magic_px = magic_px
164 InteractiveShell.magic_autopx = magic_autopx
164 InteractiveShell.magic_autopx = magic_autopx
165
165
166 # And remove the global name to keep global namespace clean. Don't worry, the
166 # And remove the global name to keep global namespace clean. Don't worry, the
167 # copy bound to IPython stays, we're just removing the global name.
167 # copy bound to IPython stays, we're just removing the global name.
168 del magic_result
168 del magic_result
169 del magic_px
169 del magic_px
170 del magic_autopx
170 del magic_autopx
171
171
@@ -1,121 +1,121 b''
1 # encoding: utf-8
1 # encoding: utf-8
2
2
3 """Classes used in scattering and gathering sequences.
3 """Classes used in scattering and gathering sequences.
4
4
5 Scattering consists of partitioning a sequence and sending the various
5 Scattering consists of partitioning a sequence and sending the various
6 pieces to individual nodes in a cluster.
6 pieces to individual nodes in a cluster.
7 """
7 """
8
8
9 __docformat__ = "restructuredtext en"
9 __docformat__ = "restructuredtext en"
10
10
11 #-------------------------------------------------------------------------------
11 #-------------------------------------------------------------------------------
12 # Copyright (C) 2008 The IPython Development Team
12 # Copyright (C) 2008 The IPython Development Team
13 #
13 #
14 # Distributed under the terms of the BSD License. The full license is in
14 # Distributed under the terms of the BSD License. The full license is in
15 # the file COPYING, distributed as part of this software.
15 # the file COPYING, distributed as part of this software.
16 #-------------------------------------------------------------------------------
16 #-------------------------------------------------------------------------------
17
17
18 #-------------------------------------------------------------------------------
18 #-------------------------------------------------------------------------------
19 # Imports
19 # Imports
20 #-------------------------------------------------------------------------------
20 #-------------------------------------------------------------------------------
21
21
22 import types
22 import types
23
23
24 from IPython.genutils import flatten as genutil_flatten
24 from IPython.genutils import flatten as genutil_flatten
25
25
26 #-------------------------------------------------------------------------------
26 #-------------------------------------------------------------------------------
27 # Figure out which array packages are present and their array types
27 # Figure out which array packages are present and their array types
28 #-------------------------------------------------------------------------------
28 #-------------------------------------------------------------------------------
29
29
30 arrayModules = []
30 arrayModules = []
31 try:
31 try:
32 import Numeric
32 import Numeric
33 except ImportError:
33 except ImportError:
34 pass
34 pass
35 else:
35 else:
36 arrayModules.append({'module':Numeric, 'type':Numeric.arraytype})
36 arrayModules.append({'module':Numeric, 'type':Numeric.arraytype})
37 try:
37 try:
38 import numpy
38 import numpy
39 except ImportError:
39 except ImportError:
40 pass
40 pass
41 else:
41 else:
42 arrayModules.append({'module':numpy, 'type':numpy.ndarray})
42 arrayModules.append({'module':numpy, 'type':numpy.ndarray})
43 try:
43 try:
44 import numarray
44 import numarray
45 except ImportError:
45 except ImportError:
46 pass
46 pass
47 else:
47 else:
48 arrayModules.append({'module':numarray,
48 arrayModules.append({'module':numarray,
49 'type':numarray.numarraycore.NumArray})
49 'type':numarray.numarraycore.NumArray})
50
50
51 class Map:
51 class Map:
52 """A class for partitioning a sequence using a map."""
52 """A class for partitioning a sequence using a map."""
53
53
54 def getPartition(self, seq, p, q):
54 def getPartition(self, seq, p, q):
55 """Returns the pth partition of q partitions of seq."""
55 """Returns the pth partition of q partitions of seq."""
56
56
57 # Test for error conditions here
57 # Test for error conditions here
58 if p<0 or p>=q:
58 if p<0 or p>=q:
59 print "No partition exists."
59 print "No partition exists."
60 return
60 return
61
61
62 remainder = len(seq)%q
62 remainder = len(seq)%q
63 basesize = len(seq)/q
63 basesize = len(seq)/q
64 hi = []
64 hi = []
65 lo = []
65 lo = []
66 for n in range(q):
66 for n in range(q):
67 if n < remainder:
67 if n < remainder:
68 lo.append(n * (basesize + 1))
68 lo.append(n * (basesize + 1))
69 hi.append(lo[-1] + basesize + 1)
69 hi.append(lo[-1] + basesize + 1)
70 else:
70 else:
71 lo.append(n*basesize + remainder)
71 lo.append(n*basesize + remainder)
72 hi.append(lo[-1] + basesize)
72 hi.append(lo[-1] + basesize)
73
73
74
74
75 result = seq[lo[p]:hi[p]]
75 result = seq[lo[p]:hi[p]]
76 return result
76 return result
77
77
78 def joinPartitions(self, listOfPartitions):
78 def joinPartitions(self, listOfPartitions):
79 return self.concatenate(listOfPartitions)
79 return self.concatenate(listOfPartitions)
80
80
81 def concatenate(self, listOfPartitions):
81 def concatenate(self, listOfPartitions):
82 testObject = listOfPartitions[0]
82 testObject = listOfPartitions[0]
83 # First see if we have a known array type
83 # First see if we have a known array type
84 for m in arrayModules:
84 for m in arrayModules:
85 #print m
85 #print m
86 if isinstance(testObject, m['type']):
86 if isinstance(testObject, m['type']):
87 return m['module'].concatenate(listOfPartitions)
87 return m['module'].concatenate(listOfPartitions)
88 # Next try for Python sequence types
88 # Next try for Python sequence types
89 if isinstance(testObject, (types.ListType, types.TupleType)):
89 if isinstance(testObject, (types.ListType, types.TupleType)):
90 return genutil_flatten(listOfPartitions)
90 return genutil_flatten(listOfPartitions)
91 # If we have scalars, just return listOfPartitions
91 # If we have scalars, just return listOfPartitions
92 return listOfPartitions
92 return listOfPartitions
93
93
94 class RoundRobinMap(Map):
94 class RoundRobinMap(Map):
95 """Partitions a sequence in a roun robin fashion.
95 """Partitions a sequence in a roun robin fashion.
96
96
97 This currently does not work!
97 This currently does not work!
98 """
98 """
99
99
100 def getPartition(self, seq, p, q):
100 def getPartition(self, seq, p, q):
101 return seq[p:len(seq):q]
101 return seq[p:len(seq):q]
102 #result = []
102 #result = []
103 #for i in range(p,len(seq),q):
103 #for i in range(p,len(seq),q):
104 # result.append(seq[i])
104 # result.append(seq[i])
105 #return result
105 #return result
106
106
107 def joinPartitions(self, listOfPartitions):
107 def joinPartitions(self, listOfPartitions):
108 #lengths = [len(x) for x in listOfPartitions]
108 #lengths = [len(x) for x in listOfPartitions]
109 #maxPartitionLength = len(listOfPartitions[0])
109 #maxPartitionLength = len(listOfPartitions[0])
110 #numberOfPartitions = len(listOfPartitions)
110 #numberOfPartitions = len(listOfPartitions)
111 #concat = self.concatenate(listOfPartitions)
111 #concat = self.concatenate(listOfPartitions)
112 #totalLength = len(concat)
112 #totalLength = len(concat)
113 #result = []
113 #result = []
114 #for i in range(maxPartitionLength):
114 #for i in range(maxPartitionLength):
115 # result.append(concat[i:totalLength:maxPartitionLength])
115 # result.append(concat[i:totalLength:maxPartitionLength])
116 return self.concatenate(listOfPartitions)
116 return self.concatenate(listOfPartitions)
117
117
118 styles = {'basic':Map}
118 dists = {'b':Map}
119
119
120
120
121
121
@@ -1,780 +1,753 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 # -*- test-case-name: IPython.kernel.test.test_multiengine -*-
2 # -*- test-case-name: IPython.kernel.test.test_multiengine -*-
3
3
4 """Adapt the IPython ControllerServer to IMultiEngine.
4 """Adapt the IPython ControllerServer to IMultiEngine.
5
5
6 This module provides classes that adapt a ControllerService to the
6 This module provides classes that adapt a ControllerService to the
7 IMultiEngine interface. This interface is a basic interactive interface
7 IMultiEngine interface. This interface is a basic interactive interface
8 for working with a set of engines where it is desired to have explicit
8 for working with a set of engines where it is desired to have explicit
9 access to each registered engine.
9 access to each registered engine.
10
10
11 The classes here are exposed to the network in files like:
11 The classes here are exposed to the network in files like:
12
12
13 * multienginevanilla.py
13 * multienginevanilla.py
14 * multienginepb.py
14 * multienginepb.py
15 """
15 """
16
16
17 __docformat__ = "restructuredtext en"
17 __docformat__ = "restructuredtext en"
18
18
19 #-------------------------------------------------------------------------------
19 #-------------------------------------------------------------------------------
20 # Copyright (C) 2008 The IPython Development Team
20 # Copyright (C) 2008 The IPython Development Team
21 #
21 #
22 # Distributed under the terms of the BSD License. The full license is in
22 # Distributed under the terms of the BSD License. The full license is in
23 # the file COPYING, distributed as part of this software.
23 # the file COPYING, distributed as part of this software.
24 #-------------------------------------------------------------------------------
24 #-------------------------------------------------------------------------------
25
25
26 #-------------------------------------------------------------------------------
26 #-------------------------------------------------------------------------------
27 # Imports
27 # Imports
28 #-------------------------------------------------------------------------------
28 #-------------------------------------------------------------------------------
29
29
30 from new import instancemethod
30 from new import instancemethod
31 from types import FunctionType
31 from types import FunctionType
32
32
33 from twisted.application import service
33 from twisted.application import service
34 from twisted.internet import defer, reactor
34 from twisted.internet import defer, reactor
35 from twisted.python import log, components, failure
35 from twisted.python import log, components, failure
36 from zope.interface import Interface, implements, Attribute
36 from zope.interface import Interface, implements, Attribute
37
37
38 from IPython.tools import growl
38 from IPython.tools import growl
39 from IPython.kernel.util import printer
39 from IPython.kernel.util import printer
40 from IPython.kernel.twistedutil import gatherBoth
40 from IPython.kernel.twistedutil import gatherBoth
41 from IPython.kernel import map as Map
41 from IPython.kernel import map as Map
42 from IPython.kernel import error
42 from IPython.kernel import error
43 from IPython.kernel.pendingdeferred import PendingDeferredManager, two_phase
43 from IPython.kernel.pendingdeferred import PendingDeferredManager, two_phase
44 from IPython.kernel.controllerservice import \
44 from IPython.kernel.controllerservice import \
45 ControllerAdapterBase, \
45 ControllerAdapterBase, \
46 ControllerService, \
46 ControllerService, \
47 IControllerBase
47 IControllerBase
48
48
49
49
50 #-------------------------------------------------------------------------------
50 #-------------------------------------------------------------------------------
51 # Interfaces for the MultiEngine representation of a controller
51 # Interfaces for the MultiEngine representation of a controller
52 #-------------------------------------------------------------------------------
52 #-------------------------------------------------------------------------------
53
53
54 class IEngineMultiplexer(Interface):
54 class IEngineMultiplexer(Interface):
55 """Interface to multiple engines implementing IEngineCore/Serialized/Queued.
55 """Interface to multiple engines implementing IEngineCore/Serialized/Queued.
56
56
57 This class simply acts as a multiplexer of methods that are in the
57 This class simply acts as a multiplexer of methods that are in the
58 various IEngines* interfaces. Thus the methods here are jut like those
58 various IEngines* interfaces. Thus the methods here are jut like those
59 in the IEngine* interfaces, but with an extra first argument, targets.
59 in the IEngine* interfaces, but with an extra first argument, targets.
60 The targets argument can have the following forms:
60 The targets argument can have the following forms:
61
61
62 * targets = 10 # Engines are indexed by ints
62 * targets = 10 # Engines are indexed by ints
63 * targets = [0,1,2,3] # A list of ints
63 * targets = [0,1,2,3] # A list of ints
64 * targets = 'all' # A string to indicate all targets
64 * targets = 'all' # A string to indicate all targets
65
65
66 If targets is bad in any way, an InvalidEngineID will be raised. This
66 If targets is bad in any way, an InvalidEngineID will be raised. This
67 includes engines not being registered.
67 includes engines not being registered.
68
68
69 All IEngineMultiplexer multiplexer methods must return a Deferred to a list
69 All IEngineMultiplexer multiplexer methods must return a Deferred to a list
70 with length equal to the number of targets. The elements of the list will
70 with length equal to the number of targets. The elements of the list will
71 correspond to the return of the corresponding IEngine method.
71 correspond to the return of the corresponding IEngine method.
72
72
73 Failures are aggressive, meaning that if an action fails for any target,
73 Failures are aggressive, meaning that if an action fails for any target,
74 the overall action will fail immediately with that Failure.
74 the overall action will fail immediately with that Failure.
75
75
76 :Parameters:
76 :Parameters:
77 targets : int, list of ints, or 'all'
77 targets : int, list of ints, or 'all'
78 Engine ids the action will apply to.
78 Engine ids the action will apply to.
79
79
80 :Returns: Deferred to a list of results for each engine.
80 :Returns: Deferred to a list of results for each engine.
81
81
82 :Exception:
82 :Exception:
83 InvalidEngineID
83 InvalidEngineID
84 If the targets argument is bad or engines aren't registered.
84 If the targets argument is bad or engines aren't registered.
85 NoEnginesRegistered
85 NoEnginesRegistered
86 If there are no engines registered and targets='all'
86 If there are no engines registered and targets='all'
87 """
87 """
88
88
89 #---------------------------------------------------------------------------
89 #---------------------------------------------------------------------------
90 # Mutiplexed methods
90 # Mutiplexed methods
91 #---------------------------------------------------------------------------
91 #---------------------------------------------------------------------------
92
92
93 def execute(lines, targets='all'):
93 def execute(lines, targets='all'):
94 """Execute lines of Python code on targets.
94 """Execute lines of Python code on targets.
95
95
96 See the class docstring for information about targets and possible
96 See the class docstring for information about targets and possible
97 exceptions this method can raise.
97 exceptions this method can raise.
98
98
99 :Parameters:
99 :Parameters:
100 lines : str
100 lines : str
101 String of python code to be executed on targets.
101 String of python code to be executed on targets.
102 """
102 """
103
103
104 def push(namespace, targets='all'):
104 def push(namespace, targets='all'):
105 """Push dict namespace into the user's namespace on targets.
105 """Push dict namespace into the user's namespace on targets.
106
106
107 See the class docstring for information about targets and possible
107 See the class docstring for information about targets and possible
108 exceptions this method can raise.
108 exceptions this method can raise.
109
109
110 :Parameters:
110 :Parameters:
111 namspace : dict
111 namspace : dict
112 Dict of key value pairs to be put into the users namspace.
112 Dict of key value pairs to be put into the users namspace.
113 """
113 """
114
114
115 def pull(keys, targets='all'):
115 def pull(keys, targets='all'):
116 """Pull values out of the user's namespace on targets by keys.
116 """Pull values out of the user's namespace on targets by keys.
117
117
118 See the class docstring for information about targets and possible
118 See the class docstring for information about targets and possible
119 exceptions this method can raise.
119 exceptions this method can raise.
120
120
121 :Parameters:
121 :Parameters:
122 keys : tuple of strings
122 keys : tuple of strings
123 Sequence of keys to be pulled from user's namespace.
123 Sequence of keys to be pulled from user's namespace.
124 """
124 """
125
125
126 def push_function(namespace, targets='all'):
126 def push_function(namespace, targets='all'):
127 """"""
127 """"""
128
128
129 def pull_function(keys, targets='all'):
129 def pull_function(keys, targets='all'):
130 """"""
130 """"""
131
131
132 def get_result(i=None, targets='all'):
132 def get_result(i=None, targets='all'):
133 """Get the result for command i from targets.
133 """Get the result for command i from targets.
134
134
135 See the class docstring for information about targets and possible
135 See the class docstring for information about targets and possible
136 exceptions this method can raise.
136 exceptions this method can raise.
137
137
138 :Parameters:
138 :Parameters:
139 i : int or None
139 i : int or None
140 Command index or None to indicate most recent command.
140 Command index or None to indicate most recent command.
141 """
141 """
142
142
143 def reset(targets='all'):
143 def reset(targets='all'):
144 """Reset targets.
144 """Reset targets.
145
145
146 This clears the users namespace of the Engines, but won't cause
146 This clears the users namespace of the Engines, but won't cause
147 modules to be reloaded.
147 modules to be reloaded.
148 """
148 """
149
149
150 def keys(targets='all'):
150 def keys(targets='all'):
151 """Get variable names defined in user's namespace on targets."""
151 """Get variable names defined in user's namespace on targets."""
152
152
153 def kill(controller=False, targets='all'):
153 def kill(controller=False, targets='all'):
154 """Kill the targets Engines and possibly the controller.
154 """Kill the targets Engines and possibly the controller.
155
155
156 :Parameters:
156 :Parameters:
157 controller : boolean
157 controller : boolean
158 Should the controller be killed as well. If so all the
158 Should the controller be killed as well. If so all the
159 engines will be killed first no matter what targets is.
159 engines will be killed first no matter what targets is.
160 """
160 """
161
161
162 def push_serialized(namespace, targets='all'):
162 def push_serialized(namespace, targets='all'):
163 """Push a namespace of Serialized objects to targets.
163 """Push a namespace of Serialized objects to targets.
164
164
165 :Parameters:
165 :Parameters:
166 namespace : dict
166 namespace : dict
167 A dict whose keys are the variable names and whose values
167 A dict whose keys are the variable names and whose values
168 are serialized version of the objects.
168 are serialized version of the objects.
169 """
169 """
170
170
171 def pull_serialized(keys, targets='all'):
171 def pull_serialized(keys, targets='all'):
172 """Pull Serialized objects by keys from targets.
172 """Pull Serialized objects by keys from targets.
173
173
174 :Parameters:
174 :Parameters:
175 keys : tuple of strings
175 keys : tuple of strings
176 Sequence of variable names to pull as serialized objects.
176 Sequence of variable names to pull as serialized objects.
177 """
177 """
178
178
179 def clear_queue(targets='all'):
179 def clear_queue(targets='all'):
180 """Clear the queue of pending command for targets."""
180 """Clear the queue of pending command for targets."""
181
181
182 def queue_status(targets='all'):
182 def queue_status(targets='all'):
183 """Get the status of the queue on the targets."""
183 """Get the status of the queue on the targets."""
184
184
185 def set_properties(properties, targets='all'):
185 def set_properties(properties, targets='all'):
186 """set properties by key and value"""
186 """set properties by key and value"""
187
187
188 def get_properties(keys=None, targets='all'):
188 def get_properties(keys=None, targets='all'):
189 """get a list of properties by `keys`, if no keys specified, get all"""
189 """get a list of properties by `keys`, if no keys specified, get all"""
190
190
191 def del_properties(keys, targets='all'):
191 def del_properties(keys, targets='all'):
192 """delete properties by `keys`"""
192 """delete properties by `keys`"""
193
193
194 def has_properties(keys, targets='all'):
194 def has_properties(keys, targets='all'):
195 """get a list of bool values for whether `properties` has `keys`"""
195 """get a list of bool values for whether `properties` has `keys`"""
196
196
197 def clear_properties(targets='all'):
197 def clear_properties(targets='all'):
198 """clear the properties dict"""
198 """clear the properties dict"""
199
199
200
200
201 class IMultiEngine(IEngineMultiplexer):
201 class IMultiEngine(IEngineMultiplexer):
202 """A controller that exposes an explicit interface to all of its engines.
202 """A controller that exposes an explicit interface to all of its engines.
203
203
204 This is the primary inteface for interactive usage.
204 This is the primary inteface for interactive usage.
205 """
205 """
206
206
207 def get_ids():
207 def get_ids():
208 """Return list of currently registered ids.
208 """Return list of currently registered ids.
209
209
210 :Returns: A Deferred to a list of registered engine ids.
210 :Returns: A Deferred to a list of registered engine ids.
211 """
211 """
212
212
213
213
214
214
215 #-------------------------------------------------------------------------------
215 #-------------------------------------------------------------------------------
216 # Implementation of the core MultiEngine classes
216 # Implementation of the core MultiEngine classes
217 #-------------------------------------------------------------------------------
217 #-------------------------------------------------------------------------------
218
218
219 class MultiEngine(ControllerAdapterBase):
219 class MultiEngine(ControllerAdapterBase):
220 """The representation of a ControllerService as a IMultiEngine.
220 """The representation of a ControllerService as a IMultiEngine.
221
221
222 Although it is not implemented currently, this class would be where a
222 Although it is not implemented currently, this class would be where a
223 client/notification API is implemented. It could inherit from something
223 client/notification API is implemented. It could inherit from something
224 like results.NotifierParent and then use the notify method to send
224 like results.NotifierParent and then use the notify method to send
225 notifications.
225 notifications.
226 """
226 """
227
227
228 implements(IMultiEngine)
228 implements(IMultiEngine)
229
229
230 def __init(self, controller):
230 def __init(self, controller):
231 ControllerAdapterBase.__init__(self, controller)
231 ControllerAdapterBase.__init__(self, controller)
232
232
233 #---------------------------------------------------------------------------
233 #---------------------------------------------------------------------------
234 # Helper methods
234 # Helper methods
235 #---------------------------------------------------------------------------
235 #---------------------------------------------------------------------------
236
236
237 def engineList(self, targets):
237 def engineList(self, targets):
238 """Parse the targets argument into a list of valid engine objects.
238 """Parse the targets argument into a list of valid engine objects.
239
239
240 :Parameters:
240 :Parameters:
241 targets : int, list of ints or 'all'
241 targets : int, list of ints or 'all'
242 The targets argument to be parsed.
242 The targets argument to be parsed.
243
243
244 :Returns: List of engine objects.
244 :Returns: List of engine objects.
245
245
246 :Exception:
246 :Exception:
247 InvalidEngineID
247 InvalidEngineID
248 If targets is not valid or if an engine is not registered.
248 If targets is not valid or if an engine is not registered.
249 """
249 """
250 if isinstance(targets, int):
250 if isinstance(targets, int):
251 if targets not in self.engines.keys():
251 if targets not in self.engines.keys():
252 log.msg("Engine with id %i is not registered" % targets)
252 log.msg("Engine with id %i is not registered" % targets)
253 raise error.InvalidEngineID("Engine with id %i is not registered" % targets)
253 raise error.InvalidEngineID("Engine with id %i is not registered" % targets)
254 else:
254 else:
255 return [self.engines[targets]]
255 return [self.engines[targets]]
256 elif isinstance(targets, (list, tuple)):
256 elif isinstance(targets, (list, tuple)):
257 for id in targets:
257 for id in targets:
258 if id not in self.engines.keys():
258 if id not in self.engines.keys():
259 log.msg("Engine with id %r is not registered" % id)
259 log.msg("Engine with id %r is not registered" % id)
260 raise error.InvalidEngineID("Engine with id %r is not registered" % id)
260 raise error.InvalidEngineID("Engine with id %r is not registered" % id)
261 return map(self.engines.get, targets)
261 return map(self.engines.get, targets)
262 elif targets == 'all':
262 elif targets == 'all':
263 eList = self.engines.values()
263 eList = self.engines.values()
264 if len(eList) == 0:
264 if len(eList) == 0:
265 msg = """There are no engines registered.
265 msg = """There are no engines registered.
266 Check the logs in ~/.ipython/log if you think there should have been."""
266 Check the logs in ~/.ipython/log if you think there should have been."""
267 raise error.NoEnginesRegistered(msg)
267 raise error.NoEnginesRegistered(msg)
268 else:
268 else:
269 return eList
269 return eList
270 else:
270 else:
271 raise error.InvalidEngineID("targets argument is not an int, list of ints or 'all': %r"%targets)
271 raise error.InvalidEngineID("targets argument is not an int, list of ints or 'all': %r"%targets)
272
272
273 def _performOnEngines(self, methodName, *args, **kwargs):
273 def _performOnEngines(self, methodName, *args, **kwargs):
274 """Calls a method on engines and returns deferred to list of results.
274 """Calls a method on engines and returns deferred to list of results.
275
275
276 :Parameters:
276 :Parameters:
277 methodName : str
277 methodName : str
278 Name of the method to be called.
278 Name of the method to be called.
279 targets : int, list of ints, 'all'
279 targets : int, list of ints, 'all'
280 The targets argument to be parsed into a list of engine objects.
280 The targets argument to be parsed into a list of engine objects.
281 args
281 args
282 The positional keyword arguments to be passed to the engines.
282 The positional keyword arguments to be passed to the engines.
283 kwargs
283 kwargs
284 The keyword arguments passed to the method
284 The keyword arguments passed to the method
285
285
286 :Returns: List of deferreds to the results on each engine
286 :Returns: List of deferreds to the results on each engine
287
287
288 :Exception:
288 :Exception:
289 InvalidEngineID
289 InvalidEngineID
290 If the targets argument is bad in any way.
290 If the targets argument is bad in any way.
291 AttributeError
291 AttributeError
292 If the method doesn't exist on one of the engines.
292 If the method doesn't exist on one of the engines.
293 """
293 """
294 targets = kwargs.pop('targets')
294 targets = kwargs.pop('targets')
295 log.msg("Performing %s on %r" % (methodName, targets))
295 log.msg("Performing %s on %r" % (methodName, targets))
296 # log.msg("Performing %s(%r, %r) on %r" % (methodName, args, kwargs, targets))
296 # log.msg("Performing %s(%r, %r) on %r" % (methodName, args, kwargs, targets))
297 # This will and should raise if targets is not valid!
297 # This will and should raise if targets is not valid!
298 engines = self.engineList(targets)
298 engines = self.engineList(targets)
299 dList = []
299 dList = []
300 for e in engines:
300 for e in engines:
301 meth = getattr(e, methodName, None)
301 meth = getattr(e, methodName, None)
302 if meth is not None:
302 if meth is not None:
303 dList.append(meth(*args, **kwargs))
303 dList.append(meth(*args, **kwargs))
304 else:
304 else:
305 raise AttributeError("Engine %i does not have method %s" % (e.id, methodName))
305 raise AttributeError("Engine %i does not have method %s" % (e.id, methodName))
306 return dList
306 return dList
307
307
308 def _performOnEnginesAndGatherBoth(self, methodName, *args, **kwargs):
308 def _performOnEnginesAndGatherBoth(self, methodName, *args, **kwargs):
309 """Called _performOnEngines and wraps result/exception into deferred."""
309 """Called _performOnEngines and wraps result/exception into deferred."""
310 try:
310 try:
311 dList = self._performOnEngines(methodName, *args, **kwargs)
311 dList = self._performOnEngines(methodName, *args, **kwargs)
312 except (error.InvalidEngineID, AttributeError, KeyError, error.NoEnginesRegistered):
312 except (error.InvalidEngineID, AttributeError, KeyError, error.NoEnginesRegistered):
313 return defer.fail(failure.Failure())
313 return defer.fail(failure.Failure())
314 else:
314 else:
315 # Having fireOnOneErrback is causing problems with the determinacy
315 # Having fireOnOneErrback is causing problems with the determinacy
316 # of the system. Basically, once a single engine has errbacked, this
316 # of the system. Basically, once a single engine has errbacked, this
317 # method returns. In some cases, this will cause client to submit
317 # method returns. In some cases, this will cause client to submit
318 # another command. Because the previous command is still running
318 # another command. Because the previous command is still running
319 # on some engines, this command will be queued. When those commands
319 # on some engines, this command will be queued. When those commands
320 # then errback, the second command will raise QueueCleared. Ahhh!
320 # then errback, the second command will raise QueueCleared. Ahhh!
321 d = gatherBoth(dList,
321 d = gatherBoth(dList,
322 fireOnOneErrback=0,
322 fireOnOneErrback=0,
323 consumeErrors=1,
323 consumeErrors=1,
324 logErrors=0)
324 logErrors=0)
325 d.addCallback(error.collect_exceptions, methodName)
325 d.addCallback(error.collect_exceptions, methodName)
326 return d
326 return d
327
327
328 #---------------------------------------------------------------------------
328 #---------------------------------------------------------------------------
329 # General IMultiEngine methods
329 # General IMultiEngine methods
330 #---------------------------------------------------------------------------
330 #---------------------------------------------------------------------------
331
331
332 def get_ids(self):
332 def get_ids(self):
333 return defer.succeed(self.engines.keys())
333 return defer.succeed(self.engines.keys())
334
334
335 #---------------------------------------------------------------------------
335 #---------------------------------------------------------------------------
336 # IEngineMultiplexer methods
336 # IEngineMultiplexer methods
337 #---------------------------------------------------------------------------
337 #---------------------------------------------------------------------------
338
338
339 def execute(self, lines, targets='all'):
339 def execute(self, lines, targets='all'):
340 return self._performOnEnginesAndGatherBoth('execute', lines, targets=targets)
340 return self._performOnEnginesAndGatherBoth('execute', lines, targets=targets)
341
341
342 def push(self, ns, targets='all'):
342 def push(self, ns, targets='all'):
343 return self._performOnEnginesAndGatherBoth('push', ns, targets=targets)
343 return self._performOnEnginesAndGatherBoth('push', ns, targets=targets)
344
344
345 def pull(self, keys, targets='all'):
345 def pull(self, keys, targets='all'):
346 return self._performOnEnginesAndGatherBoth('pull', keys, targets=targets)
346 return self._performOnEnginesAndGatherBoth('pull', keys, targets=targets)
347
347
348 def push_function(self, ns, targets='all'):
348 def push_function(self, ns, targets='all'):
349 return self._performOnEnginesAndGatherBoth('push_function', ns, targets=targets)
349 return self._performOnEnginesAndGatherBoth('push_function', ns, targets=targets)
350
350
351 def pull_function(self, keys, targets='all'):
351 def pull_function(self, keys, targets='all'):
352 return self._performOnEnginesAndGatherBoth('pull_function', keys, targets=targets)
352 return self._performOnEnginesAndGatherBoth('pull_function', keys, targets=targets)
353
353
354 def get_result(self, i=None, targets='all'):
354 def get_result(self, i=None, targets='all'):
355 return self._performOnEnginesAndGatherBoth('get_result', i, targets=targets)
355 return self._performOnEnginesAndGatherBoth('get_result', i, targets=targets)
356
356
357 def reset(self, targets='all'):
357 def reset(self, targets='all'):
358 return self._performOnEnginesAndGatherBoth('reset', targets=targets)
358 return self._performOnEnginesAndGatherBoth('reset', targets=targets)
359
359
360 def keys(self, targets='all'):
360 def keys(self, targets='all'):
361 return self._performOnEnginesAndGatherBoth('keys', targets=targets)
361 return self._performOnEnginesAndGatherBoth('keys', targets=targets)
362
362
363 def kill(self, controller=False, targets='all'):
363 def kill(self, controller=False, targets='all'):
364 if controller:
364 if controller:
365 targets = 'all'
365 targets = 'all'
366 d = self._performOnEnginesAndGatherBoth('kill', targets=targets)
366 d = self._performOnEnginesAndGatherBoth('kill', targets=targets)
367 if controller:
367 if controller:
368 log.msg("Killing controller")
368 log.msg("Killing controller")
369 d.addCallback(lambda _: reactor.callLater(2.0, reactor.stop))
369 d.addCallback(lambda _: reactor.callLater(2.0, reactor.stop))
370 # Consume any weird stuff coming back
370 # Consume any weird stuff coming back
371 d.addBoth(lambda _: None)
371 d.addBoth(lambda _: None)
372 return d
372 return d
373
373
374 def push_serialized(self, namespace, targets='all'):
374 def push_serialized(self, namespace, targets='all'):
375 for k, v in namespace.iteritems():
375 for k, v in namespace.iteritems():
376 log.msg("Pushed object %s is %f MB" % (k, v.getDataSize()))
376 log.msg("Pushed object %s is %f MB" % (k, v.getDataSize()))
377 d = self._performOnEnginesAndGatherBoth('push_serialized', namespace, targets=targets)
377 d = self._performOnEnginesAndGatherBoth('push_serialized', namespace, targets=targets)
378 return d
378 return d
379
379
380 def pull_serialized(self, keys, targets='all'):
380 def pull_serialized(self, keys, targets='all'):
381 try:
381 try:
382 dList = self._performOnEngines('pull_serialized', keys, targets=targets)
382 dList = self._performOnEngines('pull_serialized', keys, targets=targets)
383 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
383 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
384 return defer.fail(failure.Failure())
384 return defer.fail(failure.Failure())
385 else:
385 else:
386 for d in dList:
386 for d in dList:
387 d.addCallback(self._logSizes)
387 d.addCallback(self._logSizes)
388 d = gatherBoth(dList,
388 d = gatherBoth(dList,
389 fireOnOneErrback=0,
389 fireOnOneErrback=0,
390 consumeErrors=1,
390 consumeErrors=1,
391 logErrors=0)
391 logErrors=0)
392 d.addCallback(error.collect_exceptions, 'pull_serialized')
392 d.addCallback(error.collect_exceptions, 'pull_serialized')
393 return d
393 return d
394
394
395 def _logSizes(self, listOfSerialized):
395 def _logSizes(self, listOfSerialized):
396 if isinstance(listOfSerialized, (list, tuple)):
396 if isinstance(listOfSerialized, (list, tuple)):
397 for s in listOfSerialized:
397 for s in listOfSerialized:
398 log.msg("Pulled object is %f MB" % s.getDataSize())
398 log.msg("Pulled object is %f MB" % s.getDataSize())
399 else:
399 else:
400 log.msg("Pulled object is %f MB" % listOfSerialized.getDataSize())
400 log.msg("Pulled object is %f MB" % listOfSerialized.getDataSize())
401 return listOfSerialized
401 return listOfSerialized
402
402
403 def clear_queue(self, targets='all'):
403 def clear_queue(self, targets='all'):
404 return self._performOnEnginesAndGatherBoth('clear_queue', targets=targets)
404 return self._performOnEnginesAndGatherBoth('clear_queue', targets=targets)
405
405
406 def queue_status(self, targets='all'):
406 def queue_status(self, targets='all'):
407 log.msg("Getting queue status on %r" % targets)
407 log.msg("Getting queue status on %r" % targets)
408 try:
408 try:
409 engines = self.engineList(targets)
409 engines = self.engineList(targets)
410 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
410 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
411 return defer.fail(failure.Failure())
411 return defer.fail(failure.Failure())
412 else:
412 else:
413 dList = []
413 dList = []
414 for e in engines:
414 for e in engines:
415 dList.append(e.queue_status().addCallback(lambda s:(e.id, s)))
415 dList.append(e.queue_status().addCallback(lambda s:(e.id, s)))
416 d = gatherBoth(dList,
416 d = gatherBoth(dList,
417 fireOnOneErrback=0,
417 fireOnOneErrback=0,
418 consumeErrors=1,
418 consumeErrors=1,
419 logErrors=0)
419 logErrors=0)
420 d.addCallback(error.collect_exceptions, 'queue_status')
420 d.addCallback(error.collect_exceptions, 'queue_status')
421 return d
421 return d
422
422
423 def get_properties(self, keys=None, targets='all'):
423 def get_properties(self, keys=None, targets='all'):
424 log.msg("Getting properties on %r" % targets)
424 log.msg("Getting properties on %r" % targets)
425 try:
425 try:
426 engines = self.engineList(targets)
426 engines = self.engineList(targets)
427 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
427 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
428 return defer.fail(failure.Failure())
428 return defer.fail(failure.Failure())
429 else:
429 else:
430 dList = [e.get_properties(keys) for e in engines]
430 dList = [e.get_properties(keys) for e in engines]
431 d = gatherBoth(dList,
431 d = gatherBoth(dList,
432 fireOnOneErrback=0,
432 fireOnOneErrback=0,
433 consumeErrors=1,
433 consumeErrors=1,
434 logErrors=0)
434 logErrors=0)
435 d.addCallback(error.collect_exceptions, 'get_properties')
435 d.addCallback(error.collect_exceptions, 'get_properties')
436 return d
436 return d
437
437
438 def set_properties(self, properties, targets='all'):
438 def set_properties(self, properties, targets='all'):
439 log.msg("Setting properties on %r" % targets)
439 log.msg("Setting properties on %r" % targets)
440 try:
440 try:
441 engines = self.engineList(targets)
441 engines = self.engineList(targets)
442 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
442 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
443 return defer.fail(failure.Failure())
443 return defer.fail(failure.Failure())
444 else:
444 else:
445 dList = [e.set_properties(properties) for e in engines]
445 dList = [e.set_properties(properties) for e in engines]
446 d = gatherBoth(dList,
446 d = gatherBoth(dList,
447 fireOnOneErrback=0,
447 fireOnOneErrback=0,
448 consumeErrors=1,
448 consumeErrors=1,
449 logErrors=0)
449 logErrors=0)
450 d.addCallback(error.collect_exceptions, 'set_properties')
450 d.addCallback(error.collect_exceptions, 'set_properties')
451 return d
451 return d
452
452
453 def has_properties(self, keys, targets='all'):
453 def has_properties(self, keys, targets='all'):
454 log.msg("Checking properties on %r" % targets)
454 log.msg("Checking properties on %r" % targets)
455 try:
455 try:
456 engines = self.engineList(targets)
456 engines = self.engineList(targets)
457 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
457 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
458 return defer.fail(failure.Failure())
458 return defer.fail(failure.Failure())
459 else:
459 else:
460 dList = [e.has_properties(keys) for e in engines]
460 dList = [e.has_properties(keys) for e in engines]
461 d = gatherBoth(dList,
461 d = gatherBoth(dList,
462 fireOnOneErrback=0,
462 fireOnOneErrback=0,
463 consumeErrors=1,
463 consumeErrors=1,
464 logErrors=0)
464 logErrors=0)
465 d.addCallback(error.collect_exceptions, 'has_properties')
465 d.addCallback(error.collect_exceptions, 'has_properties')
466 return d
466 return d
467
467
468 def del_properties(self, keys, targets='all'):
468 def del_properties(self, keys, targets='all'):
469 log.msg("Deleting properties on %r" % targets)
469 log.msg("Deleting properties on %r" % targets)
470 try:
470 try:
471 engines = self.engineList(targets)
471 engines = self.engineList(targets)
472 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
472 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
473 return defer.fail(failure.Failure())
473 return defer.fail(failure.Failure())
474 else:
474 else:
475 dList = [e.del_properties(keys) for e in engines]
475 dList = [e.del_properties(keys) for e in engines]
476 d = gatherBoth(dList,
476 d = gatherBoth(dList,
477 fireOnOneErrback=0,
477 fireOnOneErrback=0,
478 consumeErrors=1,
478 consumeErrors=1,
479 logErrors=0)
479 logErrors=0)
480 d.addCallback(error.collect_exceptions, 'del_properties')
480 d.addCallback(error.collect_exceptions, 'del_properties')
481 return d
481 return d
482
482
483 def clear_properties(self, targets='all'):
483 def clear_properties(self, targets='all'):
484 log.msg("Clearing properties on %r" % targets)
484 log.msg("Clearing properties on %r" % targets)
485 try:
485 try:
486 engines = self.engineList(targets)
486 engines = self.engineList(targets)
487 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
487 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
488 return defer.fail(failure.Failure())
488 return defer.fail(failure.Failure())
489 else:
489 else:
490 dList = [e.clear_properties() for e in engines]
490 dList = [e.clear_properties() for e in engines]
491 d = gatherBoth(dList,
491 d = gatherBoth(dList,
492 fireOnOneErrback=0,
492 fireOnOneErrback=0,
493 consumeErrors=1,
493 consumeErrors=1,
494 logErrors=0)
494 logErrors=0)
495 d.addCallback(error.collect_exceptions, 'clear_properties')
495 d.addCallback(error.collect_exceptions, 'clear_properties')
496 return d
496 return d
497
497
498
498
499 components.registerAdapter(MultiEngine,
499 components.registerAdapter(MultiEngine,
500 IControllerBase,
500 IControllerBase,
501 IMultiEngine)
501 IMultiEngine)
502
502
503
503
504 #-------------------------------------------------------------------------------
504 #-------------------------------------------------------------------------------
505 # Interfaces for the Synchronous MultiEngine
505 # Interfaces for the Synchronous MultiEngine
506 #-------------------------------------------------------------------------------
506 #-------------------------------------------------------------------------------
507
507
508 class ISynchronousEngineMultiplexer(Interface):
508 class ISynchronousEngineMultiplexer(Interface):
509 pass
509 pass
510
510
511
511
512 class ISynchronousMultiEngine(ISynchronousEngineMultiplexer):
512 class ISynchronousMultiEngine(ISynchronousEngineMultiplexer):
513 """Synchronous, two-phase version of IMultiEngine.
513 """Synchronous, two-phase version of IMultiEngine.
514
514
515 Methods in this interface are identical to those of IMultiEngine, but they
515 Methods in this interface are identical to those of IMultiEngine, but they
516 take one additional argument:
516 take one additional argument:
517
517
518 execute(lines, targets='all') -> execute(lines, targets='all, block=True)
518 execute(lines, targets='all') -> execute(lines, targets='all, block=True)
519
519
520 :Parameters:
520 :Parameters:
521 block : boolean
521 block : boolean
522 Should the method return a deferred to a deferredID or the
522 Should the method return a deferred to a deferredID or the
523 actual result. If block=False a deferred to a deferredID is
523 actual result. If block=False a deferred to a deferredID is
524 returned and the user must call `get_pending_deferred` at a later
524 returned and the user must call `get_pending_deferred` at a later
525 point. If block=True, a deferred to the actual result comes back.
525 point. If block=True, a deferred to the actual result comes back.
526 """
526 """
527 def get_pending_deferred(deferredID, block=True):
527 def get_pending_deferred(deferredID, block=True):
528 """"""
528 """"""
529
529
530 def clear_pending_deferreds():
530 def clear_pending_deferreds():
531 """"""
531 """"""
532
532
533
533
534 #-------------------------------------------------------------------------------
534 #-------------------------------------------------------------------------------
535 # Implementation of the Synchronous MultiEngine
535 # Implementation of the Synchronous MultiEngine
536 #-------------------------------------------------------------------------------
536 #-------------------------------------------------------------------------------
537
537
538 class SynchronousMultiEngine(PendingDeferredManager):
538 class SynchronousMultiEngine(PendingDeferredManager):
539 """Adapt an `IMultiEngine` -> `ISynchronousMultiEngine`
539 """Adapt an `IMultiEngine` -> `ISynchronousMultiEngine`
540
540
541 Warning, this class uses a decorator that currently uses **kwargs.
541 Warning, this class uses a decorator that currently uses **kwargs.
542 Because of this block must be passed as a kwarg, not positionally.
542 Because of this block must be passed as a kwarg, not positionally.
543 """
543 """
544
544
545 implements(ISynchronousMultiEngine)
545 implements(ISynchronousMultiEngine)
546
546
547 def __init__(self, multiengine):
547 def __init__(self, multiengine):
548 self.multiengine = multiengine
548 self.multiengine = multiengine
549 PendingDeferredManager.__init__(self)
549 PendingDeferredManager.__init__(self)
550
550
551 #---------------------------------------------------------------------------
551 #---------------------------------------------------------------------------
552 # Decorated pending deferred methods
552 # Decorated pending deferred methods
553 #---------------------------------------------------------------------------
553 #---------------------------------------------------------------------------
554
554
555 @two_phase
555 @two_phase
556 def execute(self, lines, targets='all'):
556 def execute(self, lines, targets='all'):
557 d = self.multiengine.execute(lines, targets)
557 d = self.multiengine.execute(lines, targets)
558 return d
558 return d
559
559
560 @two_phase
560 @two_phase
561 def push(self, namespace, targets='all'):
561 def push(self, namespace, targets='all'):
562 return self.multiengine.push(namespace, targets)
562 return self.multiengine.push(namespace, targets)
563
563
564 @two_phase
564 @two_phase
565 def pull(self, keys, targets='all'):
565 def pull(self, keys, targets='all'):
566 d = self.multiengine.pull(keys, targets)
566 d = self.multiengine.pull(keys, targets)
567 return d
567 return d
568
568
569 @two_phase
569 @two_phase
570 def push_function(self, namespace, targets='all'):
570 def push_function(self, namespace, targets='all'):
571 return self.multiengine.push_function(namespace, targets)
571 return self.multiengine.push_function(namespace, targets)
572
572
573 @two_phase
573 @two_phase
574 def pull_function(self, keys, targets='all'):
574 def pull_function(self, keys, targets='all'):
575 d = self.multiengine.pull_function(keys, targets)
575 d = self.multiengine.pull_function(keys, targets)
576 return d
576 return d
577
577
578 @two_phase
578 @two_phase
579 def get_result(self, i=None, targets='all'):
579 def get_result(self, i=None, targets='all'):
580 return self.multiengine.get_result(i, targets='all')
580 return self.multiengine.get_result(i, targets='all')
581
581
582 @two_phase
582 @two_phase
583 def reset(self, targets='all'):
583 def reset(self, targets='all'):
584 return self.multiengine.reset(targets)
584 return self.multiengine.reset(targets)
585
585
586 @two_phase
586 @two_phase
587 def keys(self, targets='all'):
587 def keys(self, targets='all'):
588 return self.multiengine.keys(targets)
588 return self.multiengine.keys(targets)
589
589
590 @two_phase
590 @two_phase
591 def kill(self, controller=False, targets='all'):
591 def kill(self, controller=False, targets='all'):
592 return self.multiengine.kill(controller, targets)
592 return self.multiengine.kill(controller, targets)
593
593
594 @two_phase
594 @two_phase
595 def push_serialized(self, namespace, targets='all'):
595 def push_serialized(self, namespace, targets='all'):
596 return self.multiengine.push_serialized(namespace, targets)
596 return self.multiengine.push_serialized(namespace, targets)
597
597
598 @two_phase
598 @two_phase
599 def pull_serialized(self, keys, targets='all'):
599 def pull_serialized(self, keys, targets='all'):
600 return self.multiengine.pull_serialized(keys, targets)
600 return self.multiengine.pull_serialized(keys, targets)
601
601
602 @two_phase
602 @two_phase
603 def clear_queue(self, targets='all'):
603 def clear_queue(self, targets='all'):
604 return self.multiengine.clear_queue(targets)
604 return self.multiengine.clear_queue(targets)
605
605
606 @two_phase
606 @two_phase
607 def queue_status(self, targets='all'):
607 def queue_status(self, targets='all'):
608 return self.multiengine.queue_status(targets)
608 return self.multiengine.queue_status(targets)
609
609
610 @two_phase
610 @two_phase
611 def set_properties(self, properties, targets='all'):
611 def set_properties(self, properties, targets='all'):
612 return self.multiengine.set_properties(properties, targets)
612 return self.multiengine.set_properties(properties, targets)
613
613
614 @two_phase
614 @two_phase
615 def get_properties(self, keys=None, targets='all'):
615 def get_properties(self, keys=None, targets='all'):
616 return self.multiengine.get_properties(keys, targets)
616 return self.multiengine.get_properties(keys, targets)
617
617
618 @two_phase
618 @two_phase
619 def has_properties(self, keys, targets='all'):
619 def has_properties(self, keys, targets='all'):
620 return self.multiengine.has_properties(keys, targets)
620 return self.multiengine.has_properties(keys, targets)
621
621
622 @two_phase
622 @two_phase
623 def del_properties(self, keys, targets='all'):
623 def del_properties(self, keys, targets='all'):
624 return self.multiengine.del_properties(keys, targets)
624 return self.multiengine.del_properties(keys, targets)
625
625
626 @two_phase
626 @two_phase
627 def clear_properties(self, targets='all'):
627 def clear_properties(self, targets='all'):
628 return self.multiengine.clear_properties(targets)
628 return self.multiengine.clear_properties(targets)
629
629
630 #---------------------------------------------------------------------------
630 #---------------------------------------------------------------------------
631 # IMultiEngine methods
631 # IMultiEngine methods
632 #---------------------------------------------------------------------------
632 #---------------------------------------------------------------------------
633
633
634 def get_ids(self):
634 def get_ids(self):
635 """Return a list of registered engine ids.
635 """Return a list of registered engine ids.
636
636
637 Never use the two phase block/non-block stuff for this.
637 Never use the two phase block/non-block stuff for this.
638 """
638 """
639 return self.multiengine.get_ids()
639 return self.multiengine.get_ids()
640
640
641
641
642 components.registerAdapter(SynchronousMultiEngine, IMultiEngine, ISynchronousMultiEngine)
642 components.registerAdapter(SynchronousMultiEngine, IMultiEngine, ISynchronousMultiEngine)
643
643
644
644
645 #-------------------------------------------------------------------------------
645 #-------------------------------------------------------------------------------
646 # Various high-level interfaces that can be used as MultiEngine mix-ins
646 # Various high-level interfaces that can be used as MultiEngine mix-ins
647 #-------------------------------------------------------------------------------
647 #-------------------------------------------------------------------------------
648
648
649 #-------------------------------------------------------------------------------
649 #-------------------------------------------------------------------------------
650 # IMultiEngineCoordinator
650 # IMultiEngineCoordinator
651 #-------------------------------------------------------------------------------
651 #-------------------------------------------------------------------------------
652
652
653 class IMultiEngineCoordinator(Interface):
653 class IMultiEngineCoordinator(Interface):
654 """Methods that work on multiple engines explicitly."""
654 """Methods that work on multiple engines explicitly."""
655
655
656 def scatter(key, seq, style='basic', flatten=False, targets='all'):
656 def scatter(key, seq, dist='b', flatten=False, targets='all'):
657 """Partition and distribute a sequence to targets.
657 """Partition and distribute a sequence to targets."""
658
658
659 :Parameters:
659 def gather(key, dist='b', targets='all'):
660 key : str
660 """Gather object key from targets."""
661 The variable name to call the scattered sequence.
662 seq : list, tuple, array
663 The sequence to scatter. The type should be preserved.
664 style : string
665 A specification of how the sequence is partitioned. Currently
666 only 'basic' is implemented.
667 flatten : boolean
668 Should single element sequences be converted to scalars.
669 """
670
661
671 def gather(key, style='basic', targets='all'):
662 def raw_map(func, seqs, dist='b', targets='all'):
672 """Gather object key from targets.
673
674 :Parameters:
675 key : string
676 The name of a sequence on the targets to gather.
677 style : string
678 A specification of how the sequence is partitioned. Currently
679 only 'basic' is implemented.
680 """
663 """
664 A parallelized version of Python's builtin `map` function.
681
665
682 def map(func, seq, style='basic', targets='all'):
666 This has a slightly different syntax than the builtin `map`.
683 """A parallelized version of Python's builtin map.
667 This is needed because we need to have keyword arguments and thus
668 can't use *args to capture all the sequences. Instead, they must
669 be passed in a list or tuple.
684
670
685 This function implements the following pattern:
671 The equivalence is:
686
672
687 1. The sequence seq is scattered to the given targets.
673 raw_map(func, seqs) -> map(func, seqs[0], seqs[1], ...)
688 2. map(functionSource, seq) is called on each engine.
689 3. The resulting sequences are gathered back to the local machine.
690
674
691 :Parameters:
675 Most users will want to use parallel functions or the `mapper`
692 targets : int, list or 'all'
676 and `map` methods for an API that follows that of the builtin
693 The engine ids the action will apply to. Call `get_ids` to see
677 `map`.
694 a list of currently available engines.
695 func : str, function
696 An actual function object or a Python string that names a
697 callable defined on the engines.
698 seq : list, tuple or numpy array
699 The local sequence to be scattered.
700 style : str
701 Only 'basic' is supported for now.
702
703 :Returns: A list of len(seq) with functionSource called on each element
704 of seq.
705
706 Example
707 =======
708
709 >>> rc.mapAll('lambda x: x*x', range(10000))
710 [0,2,4,9,25,36,...]
711 """
678 """
712
679
713
680
714 class ISynchronousMultiEngineCoordinator(IMultiEngineCoordinator):
681 class ISynchronousMultiEngineCoordinator(IMultiEngineCoordinator):
715 """Methods that work on multiple engines explicitly."""
682 """Methods that work on multiple engines explicitly."""
716 pass
683
684 def scatter(key, seq, dist='b', flatten=False, targets='all', block=True):
685 """Partition and distribute a sequence to targets."""
686
687 def gather(key, dist='b', targets='all', block=True):
688 """Gather object key from targets"""
689
690 def raw_map(func, seqs, dist='b', targets='all', block=True):
691 """
692 A parallelized version of Python's builtin map.
693
694 This has a slightly different syntax than the builtin `map`.
695 This is needed because we need to have keyword arguments and thus
696 can't use *args to capture all the sequences. Instead, they must
697 be passed in a list or tuple.
698
699 raw_map(func, seqs) -> map(func, seqs[0], seqs[1], ...)
700
701 Most users will want to use parallel functions or the `mapper`
702 and `map` methods for an API that follows that of the builtin
703 `map`.
704 """
717
705
718
706
719 #-------------------------------------------------------------------------------
707 #-------------------------------------------------------------------------------
720 # IMultiEngineExtras
708 # IMultiEngineExtras
721 #-------------------------------------------------------------------------------
709 #-------------------------------------------------------------------------------
722
710
723 class IMultiEngineExtras(Interface):
711 class IMultiEngineExtras(Interface):
724
712
725 def zip_pull(targets, *keys):
713 def zip_pull(targets, keys):
726 """Pull, but return results in a different format from `pull`.
714 """
715 Pull, but return results in a different format from `pull`.
727
716
728 This method basically returns zip(pull(targets, *keys)), with a few
717 This method basically returns zip(pull(targets, *keys)), with a few
729 edge cases handled differently. Users of chainsaw will find this format
718 edge cases handled differently. Users of chainsaw will find this format
730 familiar.
719 familiar.
731
732 :Parameters:
733 targets : int, list or 'all'
734 The engine ids the action will apply to. Call `get_ids` to see
735 a list of currently available engines.
736 keys: list or tuple of str
737 A list of variable names as string of the Python objects to be pulled
738 back to the client.
739
740 :Returns: A list of pulled Python objects for each target.
741 """
720 """
742
721
743 def run(targets, fname):
722 def run(targets, fname):
744 """Run a .py file on targets.
723 """Run a .py file on targets."""
745
746 :Parameters:
747 targets : int, list or 'all'
748 The engine ids the action will apply to. Call `get_ids` to see
749 a list of currently available engines.
750 fname : str
751 The filename of a .py file on the local system to be sent to and run
752 on the engines.
753 block : boolean
754 Should I block or not. If block=True, wait for the action to
755 complete and return the result. If block=False, return a
756 `PendingResult` object that can be used to later get the
757 result. If block is not specified, the block attribute
758 will be used instead.
759 """
760
724
761
725
762 class ISynchronousMultiEngineExtras(IMultiEngineExtras):
726 class ISynchronousMultiEngineExtras(IMultiEngineExtras):
763 pass
727 def zip_pull(targets, keys, block=True):
728 """
729 Pull, but return results in a different format from `pull`.
730
731 This method basically returns zip(pull(targets, *keys)), with a few
732 edge cases handled differently. Users of chainsaw will find this format
733 familiar.
734 """
764
735
736 def run(targets, fname, block=True):
737 """Run a .py file on targets."""
765
738
766 #-------------------------------------------------------------------------------
739 #-------------------------------------------------------------------------------
767 # The full MultiEngine interface
740 # The full MultiEngine interface
768 #-------------------------------------------------------------------------------
741 #-------------------------------------------------------------------------------
769
742
770 class IFullMultiEngine(IMultiEngine,
743 class IFullMultiEngine(IMultiEngine,
771 IMultiEngineCoordinator,
744 IMultiEngineCoordinator,
772 IMultiEngineExtras):
745 IMultiEngineExtras):
773 pass
746 pass
774
747
775
748
776 class IFullSynchronousMultiEngine(ISynchronousMultiEngine,
749 class IFullSynchronousMultiEngine(ISynchronousMultiEngine,
777 ISynchronousMultiEngineCoordinator,
750 ISynchronousMultiEngineCoordinator,
778 ISynchronousMultiEngineExtras):
751 ISynchronousMultiEngineExtras):
779 pass
752 pass
780
753
@@ -1,840 +1,896 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 # -*- test-case-name: IPython.kernel.test.test_multiengineclient -*-
2 # -*- test-case-name: IPython.kernel.test.test_multiengineclient -*-
3
3
4 """General Classes for IMultiEngine clients."""
4 """General Classes for IMultiEngine clients."""
5
5
6 __docformat__ = "restructuredtext en"
6 __docformat__ = "restructuredtext en"
7
7
8 #-------------------------------------------------------------------------------
8 #-------------------------------------------------------------------------------
9 # Copyright (C) 2008 The IPython Development Team
9 # Copyright (C) 2008 The IPython Development Team
10 #
10 #
11 # Distributed under the terms of the BSD License. The full license is in
11 # Distributed under the terms of the BSD License. The full license is in
12 # the file COPYING, distributed as part of this software.
12 # the file COPYING, distributed as part of this software.
13 #-------------------------------------------------------------------------------
13 #-------------------------------------------------------------------------------
14
14
15 #-------------------------------------------------------------------------------
15 #-------------------------------------------------------------------------------
16 # Imports
16 # Imports
17 #-------------------------------------------------------------------------------
17 #-------------------------------------------------------------------------------
18
18
19 import sys
19 import sys
20 import cPickle as pickle
20 import cPickle as pickle
21 from types import FunctionType
21 from types import FunctionType
22 import linecache
22 import linecache
23
23
24 from twisted.internet import reactor
24 from twisted.internet import reactor
25 from twisted.python import components, log
25 from twisted.python import components, log
26 from twisted.python.failure import Failure
26 from twisted.python.failure import Failure
27 from zope.interface import Interface, implements, Attribute
27 from zope.interface import Interface, implements, Attribute
28
28
29 from IPython.ColorANSI import TermColors
29 from IPython.ColorANSI import TermColors
30
30
31 from IPython.kernel.twistedutil import blockingCallFromThread
31 from IPython.kernel.twistedutil import blockingCallFromThread
32 from IPython.kernel import error
32 from IPython.kernel import error
33 from IPython.kernel.parallelfunction import ParallelFunction
33 from IPython.kernel.parallelfunction import ParallelFunction
34 from IPython.kernel.mapper import (
35 MultiEngineMapper,
36 IMultiEngineMapperFactory,
37 IMapper
38 )
34 from IPython.kernel import map as Map
39 from IPython.kernel import map as Map
35 from IPython.kernel import multiengine as me
40 from IPython.kernel import multiengine as me
36 from IPython.kernel.multiengine import (IFullMultiEngine,
41 from IPython.kernel.multiengine import (IFullMultiEngine,
37 IFullSynchronousMultiEngine)
42 IFullSynchronousMultiEngine)
38
43
39
44
40 #-------------------------------------------------------------------------------
45 #-------------------------------------------------------------------------------
41 # Pending Result things
46 # Pending Result things
42 #-------------------------------------------------------------------------------
47 #-------------------------------------------------------------------------------
43
48
44 class IPendingResult(Interface):
49 class IPendingResult(Interface):
45 """A representation of a result that is pending.
50 """A representation of a result that is pending.
46
51
47 This class is similar to Twisted's `Deferred` object, but is designed to be
52 This class is similar to Twisted's `Deferred` object, but is designed to be
48 used in a synchronous context.
53 used in a synchronous context.
49 """
54 """
50
55
51 result_id=Attribute("ID of the deferred on the other side")
56 result_id=Attribute("ID of the deferred on the other side")
52 client=Attribute("A client that I came from")
57 client=Attribute("A client that I came from")
53 r=Attribute("An attribute that is a property that calls and returns get_result")
58 r=Attribute("An attribute that is a property that calls and returns get_result")
54
59
55 def get_result(default=None, block=True):
60 def get_result(default=None, block=True):
56 """
61 """
57 Get a result that is pending.
62 Get a result that is pending.
58
63
59 :Parameters:
64 :Parameters:
60 default
65 default
61 The value to return if the result is not ready.
66 The value to return if the result is not ready.
62 block : boolean
67 block : boolean
63 Should I block for the result.
68 Should I block for the result.
64
69
65 :Returns: The actual result or the default value.
70 :Returns: The actual result or the default value.
66 """
71 """
67
72
68 def add_callback(f, *args, **kwargs):
73 def add_callback(f, *args, **kwargs):
69 """
74 """
70 Add a callback that is called with the result.
75 Add a callback that is called with the result.
71
76
72 If the original result is foo, adding a callback will cause
77 If the original result is foo, adding a callback will cause
73 f(foo, *args, **kwargs) to be returned instead. If multiple
78 f(foo, *args, **kwargs) to be returned instead. If multiple
74 callbacks are registered, they are chained together: the result of
79 callbacks are registered, they are chained together: the result of
75 one is passed to the next and so on.
80 one is passed to the next and so on.
76
81
77 Unlike Twisted's Deferred object, there is no errback chain. Thus
82 Unlike Twisted's Deferred object, there is no errback chain. Thus
78 any exception raised will not be caught and handled. User must
83 any exception raised will not be caught and handled. User must
79 catch these by hand when calling `get_result`.
84 catch these by hand when calling `get_result`.
80 """
85 """
81
86
82
87
83 class PendingResult(object):
88 class PendingResult(object):
84 """A representation of a result that is not yet ready.
89 """A representation of a result that is not yet ready.
85
90
86 A user should not create a `PendingResult` instance by hand.
91 A user should not create a `PendingResult` instance by hand.
87
92
88 Methods
93 Methods
89 =======
94 =======
90
95
91 * `get_result`
96 * `get_result`
92 * `add_callback`
97 * `add_callback`
93
98
94 Properties
99 Properties
95 ==========
100 ==========
96 * `r`
101 * `r`
97 """
102 """
98
103
99 def __init__(self, client, result_id):
104 def __init__(self, client, result_id):
100 """Create a PendingResult with a result_id and a client instance.
105 """Create a PendingResult with a result_id and a client instance.
101
106
102 The client should implement `_getPendingResult(result_id, block)`.
107 The client should implement `_getPendingResult(result_id, block)`.
103 """
108 """
104 self.client = client
109 self.client = client
105 self.result_id = result_id
110 self.result_id = result_id
106 self.called = False
111 self.called = False
107 self.raised = False
112 self.raised = False
108 self.callbacks = []
113 self.callbacks = []
109
114
110 def get_result(self, default=None, block=True):
115 def get_result(self, default=None, block=True):
111 """Get a result that is pending.
116 """Get a result that is pending.
112
117
113 This method will connect to an IMultiEngine adapted controller
118 This method will connect to an IMultiEngine adapted controller
114 and see if the result is ready. If the action triggers an exception
119 and see if the result is ready. If the action triggers an exception
115 raise it and record it. This method records the result/exception once it is
120 raise it and record it. This method records the result/exception once it is
116 retrieved. Calling `get_result` again will get this cached result or will
121 retrieved. Calling `get_result` again will get this cached result or will
117 re-raise the exception. The .r attribute is a property that calls
122 re-raise the exception. The .r attribute is a property that calls
118 `get_result` with block=True.
123 `get_result` with block=True.
119
124
120 :Parameters:
125 :Parameters:
121 default
126 default
122 The value to return if the result is not ready.
127 The value to return if the result is not ready.
123 block : boolean
128 block : boolean
124 Should I block for the result.
129 Should I block for the result.
125
130
126 :Returns: The actual result or the default value.
131 :Returns: The actual result or the default value.
127 """
132 """
128
133
129 if self.called:
134 if self.called:
130 if self.raised:
135 if self.raised:
131 raise self.result[0], self.result[1], self.result[2]
136 raise self.result[0], self.result[1], self.result[2]
132 else:
137 else:
133 return self.result
138 return self.result
134 try:
139 try:
135 result = self.client.get_pending_deferred(self.result_id, block)
140 result = self.client.get_pending_deferred(self.result_id, block)
136 except error.ResultNotCompleted:
141 except error.ResultNotCompleted:
137 return default
142 return default
138 except:
143 except:
139 # Reraise other error, but first record them so they can be reraised
144 # Reraise other error, but first record them so they can be reraised
140 # later if .r or get_result is called again.
145 # later if .r or get_result is called again.
141 self.result = sys.exc_info()
146 self.result = sys.exc_info()
142 self.called = True
147 self.called = True
143 self.raised = True
148 self.raised = True
144 raise
149 raise
145 else:
150 else:
146 for cb in self.callbacks:
151 for cb in self.callbacks:
147 result = cb[0](result, *cb[1], **cb[2])
152 result = cb[0](result, *cb[1], **cb[2])
148 self.result = result
153 self.result = result
149 self.called = True
154 self.called = True
150 return result
155 return result
151
156
152 def add_callback(self, f, *args, **kwargs):
157 def add_callback(self, f, *args, **kwargs):
153 """Add a callback that is called with the result.
158 """Add a callback that is called with the result.
154
159
155 If the original result is result, adding a callback will cause
160 If the original result is result, adding a callback will cause
156 f(result, *args, **kwargs) to be returned instead. If multiple
161 f(result, *args, **kwargs) to be returned instead. If multiple
157 callbacks are registered, they are chained together: the result of
162 callbacks are registered, they are chained together: the result of
158 one is passed to the next and so on.
163 one is passed to the next and so on.
159
164
160 Unlike Twisted's Deferred object, there is no errback chain. Thus
165 Unlike Twisted's Deferred object, there is no errback chain. Thus
161 any exception raised will not be caught and handled. User must
166 any exception raised will not be caught and handled. User must
162 catch these by hand when calling `get_result`.
167 catch these by hand when calling `get_result`.
163 """
168 """
164 assert callable(f)
169 assert callable(f)
165 self.callbacks.append((f, args, kwargs))
170 self.callbacks.append((f, args, kwargs))
166
171
167 def __cmp__(self, other):
172 def __cmp__(self, other):
168 if self.result_id < other.result_id:
173 if self.result_id < other.result_id:
169 return -1
174 return -1
170 else:
175 else:
171 return 1
176 return 1
172
177
173 def _get_r(self):
178 def _get_r(self):
174 return self.get_result(block=True)
179 return self.get_result(block=True)
175
180
176 r = property(_get_r)
181 r = property(_get_r)
177 """This property is a shortcut to a `get_result(block=True)`."""
182 """This property is a shortcut to a `get_result(block=True)`."""
178
183
179
184
180 #-------------------------------------------------------------------------------
185 #-------------------------------------------------------------------------------
181 # Pretty printing wrappers for certain lists
186 # Pretty printing wrappers for certain lists
182 #-------------------------------------------------------------------------------
187 #-------------------------------------------------------------------------------
183
188
184 class ResultList(list):
189 class ResultList(list):
185 """A subclass of list that pretty prints the output of `execute`/`get_result`."""
190 """A subclass of list that pretty prints the output of `execute`/`get_result`."""
186
191
187 def __repr__(self):
192 def __repr__(self):
188 output = []
193 output = []
194 # These colored prompts were not working on Windows
195 if sys.platform == 'win32':
196 blue = normal = red = green = ''
197 else:
189 blue = TermColors.Blue
198 blue = TermColors.Blue
190 normal = TermColors.Normal
199 normal = TermColors.Normal
191 red = TermColors.Red
200 red = TermColors.Red
192 green = TermColors.Green
201 green = TermColors.Green
193 output.append("<Results List>\n")
202 output.append("<Results List>\n")
194 for cmd in self:
203 for cmd in self:
195 if isinstance(cmd, Failure):
204 if isinstance(cmd, Failure):
196 output.append(cmd)
205 output.append(cmd)
197 else:
206 else:
198 target = cmd.get('id',None)
207 target = cmd.get('id',None)
199 cmd_num = cmd.get('number',None)
208 cmd_num = cmd.get('number',None)
200 cmd_stdin = cmd.get('input',{}).get('translated','No Input')
209 cmd_stdin = cmd.get('input',{}).get('translated','No Input')
201 cmd_stdout = cmd.get('stdout', None)
210 cmd_stdout = cmd.get('stdout', None)
202 cmd_stderr = cmd.get('stderr', None)
211 cmd_stderr = cmd.get('stderr', None)
203 output.append("%s[%i]%s In [%i]:%s %s\n" % \
212 output.append("%s[%i]%s In [%i]:%s %s\n" % \
204 (green, target,
213 (green, target,
205 blue, cmd_num, normal, cmd_stdin))
214 blue, cmd_num, normal, cmd_stdin))
206 if cmd_stdout:
215 if cmd_stdout:
207 output.append("%s[%i]%s Out[%i]:%s %s\n" % \
216 output.append("%s[%i]%s Out[%i]:%s %s\n" % \
208 (green, target,
217 (green, target,
209 red, cmd_num, normal, cmd_stdout))
218 red, cmd_num, normal, cmd_stdout))
210 if cmd_stderr:
219 if cmd_stderr:
211 output.append("%s[%i]%s Err[%i]:\n%s %s" % \
220 output.append("%s[%i]%s Err[%i]:\n%s %s" % \
212 (green, target,
221 (green, target,
213 red, cmd_num, normal, cmd_stderr))
222 red, cmd_num, normal, cmd_stderr))
214 return ''.join(output)
223 return ''.join(output)
215
224
216
225
217 def wrapResultList(result):
226 def wrapResultList(result):
218 """A function that wraps the output of `execute`/`get_result` -> `ResultList`."""
227 """A function that wraps the output of `execute`/`get_result` -> `ResultList`."""
219 if len(result) == 0:
228 if len(result) == 0:
220 result = [result]
229 result = [result]
221 return ResultList(result)
230 return ResultList(result)
222
231
223
232
224 class QueueStatusList(list):
233 class QueueStatusList(list):
225 """A subclass of list that pretty prints the output of `queue_status`."""
234 """A subclass of list that pretty prints the output of `queue_status`."""
226
235
227 def __repr__(self):
236 def __repr__(self):
228 output = []
237 output = []
229 output.append("<Queue Status List>\n")
238 output.append("<Queue Status List>\n")
230 for e in self:
239 for e in self:
231 output.append("Engine: %s\n" % repr(e[0]))
240 output.append("Engine: %s\n" % repr(e[0]))
232 output.append(" Pending: %s\n" % repr(e[1]['pending']))
241 output.append(" Pending: %s\n" % repr(e[1]['pending']))
233 for q in e[1]['queue']:
242 for q in e[1]['queue']:
234 output.append(" Command: %s\n" % repr(q))
243 output.append(" Command: %s\n" % repr(q))
235 return ''.join(output)
244 return ''.join(output)
236
245
237
246
238 #-------------------------------------------------------------------------------
247 #-------------------------------------------------------------------------------
239 # InteractiveMultiEngineClient
248 # InteractiveMultiEngineClient
240 #-------------------------------------------------------------------------------
249 #-------------------------------------------------------------------------------
241
250
242 class InteractiveMultiEngineClient(object):
251 class InteractiveMultiEngineClient(object):
243 """A mixin class that add a few methods to a multiengine client.
252 """A mixin class that add a few methods to a multiengine client.
244
253
245 The methods in this mixin class are designed for interactive usage.
254 The methods in this mixin class are designed for interactive usage.
246 """
255 """
247
256
248 def activate(self):
257 def activate(self):
249 """Make this `MultiEngineClient` active for parallel magic commands.
258 """Make this `MultiEngineClient` active for parallel magic commands.
250
259
251 IPython has a magic command syntax to work with `MultiEngineClient` objects.
260 IPython has a magic command syntax to work with `MultiEngineClient` objects.
252 In a given IPython session there is a single active one. While
261 In a given IPython session there is a single active one. While
253 there can be many `MultiEngineClient` created and used by the user,
262 there can be many `MultiEngineClient` created and used by the user,
254 there is only one active one. The active `MultiEngineClient` is used whenever
263 there is only one active one. The active `MultiEngineClient` is used whenever
255 the magic commands %px and %autopx are used.
264 the magic commands %px and %autopx are used.
256
265
257 The activate() method is called on a given `MultiEngineClient` to make it
266 The activate() method is called on a given `MultiEngineClient` to make it
258 active. Once this has been done, the magic commands can be used.
267 active. Once this has been done, the magic commands can be used.
259 """
268 """
260
269
261 try:
270 try:
262 __IPYTHON__.activeController = self
271 __IPYTHON__.activeController = self
263 except NameError:
272 except NameError:
264 print "The IPython Controller magics only work within IPython."
273 print "The IPython Controller magics only work within IPython."
265
274
266 def __setitem__(self, key, value):
275 def __setitem__(self, key, value):
267 """Add a dictionary interface for pushing/pulling.
276 """Add a dictionary interface for pushing/pulling.
268
277
269 This functions as a shorthand for `push`.
278 This functions as a shorthand for `push`.
270
279
271 :Parameters:
280 :Parameters:
272 key : str
281 key : str
273 What to call the remote object.
282 What to call the remote object.
274 value : object
283 value : object
275 The local Python object to push.
284 The local Python object to push.
276 """
285 """
277 targets, block = self._findTargetsAndBlock()
286 targets, block = self._findTargetsAndBlock()
278 return self.push({key:value}, targets=targets, block=block)
287 return self.push({key:value}, targets=targets, block=block)
279
288
280 def __getitem__(self, key):
289 def __getitem__(self, key):
281 """Add a dictionary interface for pushing/pulling.
290 """Add a dictionary interface for pushing/pulling.
282
291
283 This functions as a shorthand to `pull`.
292 This functions as a shorthand to `pull`.
284
293
285 :Parameters:
294 :Parameters:
286 - `key`: A string representing the key.
295 - `key`: A string representing the key.
287 """
296 """
288 if isinstance(key, str):
297 if isinstance(key, str):
289 targets, block = self._findTargetsAndBlock()
298 targets, block = self._findTargetsAndBlock()
290 return self.pull(key, targets=targets, block=block)
299 return self.pull(key, targets=targets, block=block)
291 else:
300 else:
292 raise TypeError("__getitem__ only takes strs")
301 raise TypeError("__getitem__ only takes strs")
293
302
294 def __len__(self):
303 def __len__(self):
295 """Return the number of available engines."""
304 """Return the number of available engines."""
296 return len(self.get_ids())
305 return len(self.get_ids())
297
306
298 def parallelize(self, func, targets=None, block=None):
299 """Build a `ParallelFunction` object for functionName on engines.
300
301 The returned object will implement a parallel version of functionName
302 that takes a local sequence as its only argument and calls (in
303 parallel) functionName on each element of that sequence. The
304 `ParallelFunction` object has a `targets` attribute that controls
305 which engines the function is run on.
306
307 :Parameters:
308 targets : int, list or 'all'
309 The engine ids the action will apply to. Call `get_ids` to see
310 a list of currently available engines.
311 functionName : str
312 A Python string that names a callable defined on the engines.
313
314 :Returns: A `ParallelFunction` object.
315
316 Examples
317 ========
318
319 >>> psin = rc.parallelize('all','lambda x:sin(x)')
320 >>> psin(range(10000))
321 [0,2,4,9,25,36,...]
322 """
323 targets, block = self._findTargetsAndBlock(targets, block)
324 return ParallelFunction(func, self, targets, block)
325
326 #---------------------------------------------------------------------------
307 #---------------------------------------------------------------------------
327 # Make this a context manager for with
308 # Make this a context manager for with
328 #---------------------------------------------------------------------------
309 #---------------------------------------------------------------------------
329
310
330 def findsource_file(self,f):
311 def findsource_file(self,f):
331 linecache.checkcache()
312 linecache.checkcache()
332 s = findsource(f.f_code)
313 s = findsource(f.f_code)
333 lnum = f.f_lineno
314 lnum = f.f_lineno
334 wsource = s[0][f.f_lineno:]
315 wsource = s[0][f.f_lineno:]
335 return strip_whitespace(wsource)
316 return strip_whitespace(wsource)
336
317
337 def findsource_ipython(self,f):
318 def findsource_ipython(self,f):
338 from IPython import ipapi
319 from IPython import ipapi
339 self.ip = ipapi.get()
320 self.ip = ipapi.get()
340 wsource = [l+'\n' for l in
321 wsource = [l+'\n' for l in
341 self.ip.IP.input_hist_raw[-1].splitlines()[1:]]
322 self.ip.IP.input_hist_raw[-1].splitlines()[1:]]
342 return strip_whitespace(wsource)
323 return strip_whitespace(wsource)
343
324
344 def __enter__(self):
325 def __enter__(self):
345 f = sys._getframe(1)
326 f = sys._getframe(1)
346 local_ns = f.f_locals
327 local_ns = f.f_locals
347 global_ns = f.f_globals
328 global_ns = f.f_globals
348 if f.f_code.co_filename == '<ipython console>':
329 if f.f_code.co_filename == '<ipython console>':
349 s = self.findsource_ipython(f)
330 s = self.findsource_ipython(f)
350 else:
331 else:
351 s = self.findsource_file(f)
332 s = self.findsource_file(f)
352
333
353 self._with_context_result = self.execute(s)
334 self._with_context_result = self.execute(s)
354
335
355 def __exit__ (self, etype, value, tb):
336 def __exit__ (self, etype, value, tb):
356 if issubclass(etype,error.StopLocalExecution):
337 if issubclass(etype,error.StopLocalExecution):
357 return True
338 return True
358
339
359
340
360 def remote():
341 def remote():
361 m = 'Special exception to stop local execution of parallel code.'
342 m = 'Special exception to stop local execution of parallel code.'
362 raise error.StopLocalExecution(m)
343 raise error.StopLocalExecution(m)
363
344
364 def strip_whitespace(source):
345 def strip_whitespace(source):
365 # Expand tabs to avoid any confusion.
346 # Expand tabs to avoid any confusion.
366 wsource = [l.expandtabs(4) for l in source]
347 wsource = [l.expandtabs(4) for l in source]
367 # Detect the indentation level
348 # Detect the indentation level
368 done = False
349 done = False
369 for line in wsource:
350 for line in wsource:
370 if line.isspace():
351 if line.isspace():
371 continue
352 continue
372 for col,char in enumerate(line):
353 for col,char in enumerate(line):
373 if char != ' ':
354 if char != ' ':
374 done = True
355 done = True
375 break
356 break
376 if done:
357 if done:
377 break
358 break
378 # Now we know how much leading space there is in the code. Next, we
359 # Now we know how much leading space there is in the code. Next, we
379 # extract up to the first line that has less indentation.
360 # extract up to the first line that has less indentation.
380 # WARNINGS: we skip comments that may be misindented, but we do NOT yet
361 # WARNINGS: we skip comments that may be misindented, but we do NOT yet
381 # detect triple quoted strings that may have flush left text.
362 # detect triple quoted strings that may have flush left text.
382 for lno,line in enumerate(wsource):
363 for lno,line in enumerate(wsource):
383 lead = line[:col]
364 lead = line[:col]
384 if lead.isspace():
365 if lead.isspace():
385 continue
366 continue
386 else:
367 else:
387 if not lead.lstrip().startswith('#'):
368 if not lead.lstrip().startswith('#'):
388 break
369 break
389 # The real 'with' source is up to lno
370 # The real 'with' source is up to lno
390 src_lines = [l[col:] for l in wsource[:lno+1]]
371 src_lines = [l[col:] for l in wsource[:lno+1]]
391
372
392 # Finally, check that the source's first non-comment line begins with the
373 # Finally, check that the source's first non-comment line begins with the
393 # special call 'remote()'
374 # special call 'remote()'
394 for nline,line in enumerate(src_lines):
375 for nline,line in enumerate(src_lines):
395 if line.isspace() or line.startswith('#'):
376 if line.isspace() or line.startswith('#'):
396 continue
377 continue
397 if 'remote()' in line:
378 if 'remote()' in line:
398 break
379 break
399 else:
380 else:
400 raise ValueError('remote() call missing at the start of code')
381 raise ValueError('remote() call missing at the start of code')
401 src = ''.join(src_lines[nline+1:])
382 src = ''.join(src_lines[nline+1:])
402 #print 'SRC:\n<<<<<<<>>>>>>>\n%s<<<<<>>>>>>' % src # dbg
383 #print 'SRC:\n<<<<<<<>>>>>>>\n%s<<<<<>>>>>>' % src # dbg
403 return src
384 return src
404
385
405
386
406 #-------------------------------------------------------------------------------
387 #-------------------------------------------------------------------------------
407 # The top-level MultiEngine client adaptor
388 # The top-level MultiEngine client adaptor
408 #-------------------------------------------------------------------------------
389 #-------------------------------------------------------------------------------
409
390
410
391
411 class IFullBlockingMultiEngineClient(Interface):
392 class IFullBlockingMultiEngineClient(Interface):
412 pass
393 pass
413
394
414
395
415 class FullBlockingMultiEngineClient(InteractiveMultiEngineClient):
396 class FullBlockingMultiEngineClient(InteractiveMultiEngineClient):
416 """
397 """
417 A blocking client to the `IMultiEngine` controller interface.
398 A blocking client to the `IMultiEngine` controller interface.
418
399
419 This class allows users to use a set of engines for a parallel
400 This class allows users to use a set of engines for a parallel
420 computation through the `IMultiEngine` interface. In this interface,
401 computation through the `IMultiEngine` interface. In this interface,
421 each engine has a specific id (an int) that is used to refer to the
402 each engine has a specific id (an int) that is used to refer to the
422 engine, run code on it, etc.
403 engine, run code on it, etc.
423 """
404 """
424
405
425 implements(IFullBlockingMultiEngineClient)
406 implements(
407 IFullBlockingMultiEngineClient,
408 IMultiEngineMapperFactory,
409 IMapper
410 )
426
411
427 def __init__(self, smultiengine):
412 def __init__(self, smultiengine):
428 self.smultiengine = smultiengine
413 self.smultiengine = smultiengine
429 self.block = True
414 self.block = True
430 self.targets = 'all'
415 self.targets = 'all'
431
416
432 def _findBlock(self, block=None):
417 def _findBlock(self, block=None):
433 if block is None:
418 if block is None:
434 return self.block
419 return self.block
435 else:
420 else:
436 if block in (True, False):
421 if block in (True, False):
437 return block
422 return block
438 else:
423 else:
439 raise ValueError("block must be True or False")
424 raise ValueError("block must be True or False")
440
425
441 def _findTargets(self, targets=None):
426 def _findTargets(self, targets=None):
442 if targets is None:
427 if targets is None:
443 return self.targets
428 return self.targets
444 else:
429 else:
445 if not isinstance(targets, (str,list,tuple,int)):
430 if not isinstance(targets, (str,list,tuple,int)):
446 raise ValueError("targets must be a str, list, tuple or int")
431 raise ValueError("targets must be a str, list, tuple or int")
447 return targets
432 return targets
448
433
449 def _findTargetsAndBlock(self, targets=None, block=None):
434 def _findTargetsAndBlock(self, targets=None, block=None):
450 return self._findTargets(targets), self._findBlock(block)
435 return self._findTargets(targets), self._findBlock(block)
451
436
452 def _blockFromThread(self, function, *args, **kwargs):
437 def _blockFromThread(self, function, *args, **kwargs):
453 block = kwargs.get('block', None)
438 block = kwargs.get('block', None)
454 if block is None:
439 if block is None:
455 raise error.MissingBlockArgument("'block' keyword argument is missing")
440 raise error.MissingBlockArgument("'block' keyword argument is missing")
456 result = blockingCallFromThread(function, *args, **kwargs)
441 result = blockingCallFromThread(function, *args, **kwargs)
457 if not block:
442 if not block:
458 result = PendingResult(self, result)
443 result = PendingResult(self, result)
459 return result
444 return result
460
445
461 def get_pending_deferred(self, deferredID, block):
446 def get_pending_deferred(self, deferredID, block):
462 return blockingCallFromThread(self.smultiengine.get_pending_deferred, deferredID, block)
447 return blockingCallFromThread(self.smultiengine.get_pending_deferred, deferredID, block)
463
448
464 def barrier(self, pendingResults):
449 def barrier(self, pendingResults):
465 """Synchronize a set of `PendingResults`.
450 """Synchronize a set of `PendingResults`.
466
451
467 This method is a synchronization primitive that waits for a set of
452 This method is a synchronization primitive that waits for a set of
468 `PendingResult` objects to complete. More specifically, barier does
453 `PendingResult` objects to complete. More specifically, barier does
469 the following.
454 the following.
470
455
471 * The `PendingResult`s are sorted by result_id.
456 * The `PendingResult`s are sorted by result_id.
472 * The `get_result` method is called for each `PendingResult` sequentially
457 * The `get_result` method is called for each `PendingResult` sequentially
473 with block=True.
458 with block=True.
474 * If a `PendingResult` gets a result that is an exception, it is
459 * If a `PendingResult` gets a result that is an exception, it is
475 trapped and can be re-raised later by calling `get_result` again.
460 trapped and can be re-raised later by calling `get_result` again.
476 * The `PendingResult`s are flushed from the controller.
461 * The `PendingResult`s are flushed from the controller.
477
462
478 After barrier has been called on a `PendingResult`, its results can
463 After barrier has been called on a `PendingResult`, its results can
479 be retrieved by calling `get_result` again or accesing the `r` attribute
464 be retrieved by calling `get_result` again or accesing the `r` attribute
480 of the instance.
465 of the instance.
481 """
466 """
482
467
483 # Convert to list for sorting and check class type
468 # Convert to list for sorting and check class type
484 prList = list(pendingResults)
469 prList = list(pendingResults)
485 for pr in prList:
470 for pr in prList:
486 if not isinstance(pr, PendingResult):
471 if not isinstance(pr, PendingResult):
487 raise error.NotAPendingResult("Objects passed to barrier must be PendingResult instances")
472 raise error.NotAPendingResult("Objects passed to barrier must be PendingResult instances")
488
473
489 # Sort the PendingResults so they are in order
474 # Sort the PendingResults so they are in order
490 prList.sort()
475 prList.sort()
491 # Block on each PendingResult object
476 # Block on each PendingResult object
492 for pr in prList:
477 for pr in prList:
493 try:
478 try:
494 result = pr.get_result(block=True)
479 result = pr.get_result(block=True)
495 except Exception:
480 except Exception:
496 pass
481 pass
497
482
498 def flush(self):
483 def flush(self):
499 """
484 """
500 Clear all pending deferreds/results from the controller.
485 Clear all pending deferreds/results from the controller.
501
486
502 For each `PendingResult` that is created by this client, the controller
487 For each `PendingResult` that is created by this client, the controller
503 holds on to the result for that `PendingResult`. This can be a problem
488 holds on to the result for that `PendingResult`. This can be a problem
504 if there are a large number of `PendingResult` objects that are created.
489 if there are a large number of `PendingResult` objects that are created.
505
490
506 Once the result of the `PendingResult` has been retrieved, the result
491 Once the result of the `PendingResult` has been retrieved, the result
507 is removed from the controller, but if a user doesn't get a result (
492 is removed from the controller, but if a user doesn't get a result (
508 they just ignore the `PendingResult`) the result is kept forever on the
493 they just ignore the `PendingResult`) the result is kept forever on the
509 controller. This method allows the user to clear out all un-retrieved
494 controller. This method allows the user to clear out all un-retrieved
510 results on the controller.
495 results on the controller.
511 """
496 """
512 r = blockingCallFromThread(self.smultiengine.clear_pending_deferreds)
497 r = blockingCallFromThread(self.smultiengine.clear_pending_deferreds)
513 return r
498 return r
514
499
515 clear_pending_results = flush
500 clear_pending_results = flush
516
501
517 #---------------------------------------------------------------------------
502 #---------------------------------------------------------------------------
518 # IEngineMultiplexer related methods
503 # IEngineMultiplexer related methods
519 #---------------------------------------------------------------------------
504 #---------------------------------------------------------------------------
520
505
521 def execute(self, lines, targets=None, block=None):
506 def execute(self, lines, targets=None, block=None):
522 """
507 """
523 Execute code on a set of engines.
508 Execute code on a set of engines.
524
509
525 :Parameters:
510 :Parameters:
526 lines : str
511 lines : str
527 The Python code to execute as a string
512 The Python code to execute as a string
528 targets : id or list of ids
513 targets : id or list of ids
529 The engine to use for the execution
514 The engine to use for the execution
530 block : boolean
515 block : boolean
531 If False, this method will return the actual result. If False,
516 If False, this method will return the actual result. If False,
532 a `PendingResult` is returned which can be used to get the result
517 a `PendingResult` is returned which can be used to get the result
533 at a later time.
518 at a later time.
534 """
519 """
535 targets, block = self._findTargetsAndBlock(targets, block)
520 targets, block = self._findTargetsAndBlock(targets, block)
536 result = blockingCallFromThread(self.smultiengine.execute, lines,
521 result = blockingCallFromThread(self.smultiengine.execute, lines,
537 targets=targets, block=block)
522 targets=targets, block=block)
538 if block:
523 if block:
539 result = ResultList(result)
524 result = ResultList(result)
540 else:
525 else:
541 result = PendingResult(self, result)
526 result = PendingResult(self, result)
542 result.add_callback(wrapResultList)
527 result.add_callback(wrapResultList)
543 return result
528 return result
544
529
545 def push(self, namespace, targets=None, block=None):
530 def push(self, namespace, targets=None, block=None):
546 """
531 """
547 Push a dictionary of keys and values to engines namespace.
532 Push a dictionary of keys and values to engines namespace.
548
533
549 Each engine has a persistent namespace. This method is used to push
534 Each engine has a persistent namespace. This method is used to push
550 Python objects into that namespace.
535 Python objects into that namespace.
551
536
552 The objects in the namespace must be pickleable.
537 The objects in the namespace must be pickleable.
553
538
554 :Parameters:
539 :Parameters:
555 namespace : dict
540 namespace : dict
556 A dict that contains Python objects to be injected into
541 A dict that contains Python objects to be injected into
557 the engine persistent namespace.
542 the engine persistent namespace.
558 targets : id or list of ids
543 targets : id or list of ids
559 The engine to use for the execution
544 The engine to use for the execution
560 block : boolean
545 block : boolean
561 If False, this method will return the actual result. If False,
546 If False, this method will return the actual result. If False,
562 a `PendingResult` is returned which can be used to get the result
547 a `PendingResult` is returned which can be used to get the result
563 at a later time.
548 at a later time.
564 """
549 """
565 targets, block = self._findTargetsAndBlock(targets, block)
550 targets, block = self._findTargetsAndBlock(targets, block)
566 return self._blockFromThread(self.smultiengine.push, namespace,
551 return self._blockFromThread(self.smultiengine.push, namespace,
567 targets=targets, block=block)
552 targets=targets, block=block)
568
553
569 def pull(self, keys, targets=None, block=None):
554 def pull(self, keys, targets=None, block=None):
570 """
555 """
571 Pull Python objects by key out of engines namespaces.
556 Pull Python objects by key out of engines namespaces.
572
557
573 :Parameters:
558 :Parameters:
574 keys : str or list of str
559 keys : str or list of str
575 The names of the variables to be pulled
560 The names of the variables to be pulled
576 targets : id or list of ids
561 targets : id or list of ids
577 The engine to use for the execution
562 The engine to use for the execution
578 block : boolean
563 block : boolean
579 If False, this method will return the actual result. If False,
564 If False, this method will return the actual result. If False,
580 a `PendingResult` is returned which can be used to get the result
565 a `PendingResult` is returned which can be used to get the result
581 at a later time.
566 at a later time.
582 """
567 """
583 targets, block = self._findTargetsAndBlock(targets, block)
568 targets, block = self._findTargetsAndBlock(targets, block)
584 return self._blockFromThread(self.smultiengine.pull, keys, targets=targets, block=block)
569 return self._blockFromThread(self.smultiengine.pull, keys, targets=targets, block=block)
585
570
586 def push_function(self, namespace, targets=None, block=None):
571 def push_function(self, namespace, targets=None, block=None):
587 """
572 """
588 Push a Python function to an engine.
573 Push a Python function to an engine.
589
574
590 This method is used to push a Python function to an engine. This
575 This method is used to push a Python function to an engine. This
591 method can then be used in code on the engines. Closures are not supported.
576 method can then be used in code on the engines. Closures are not supported.
592
577
593 :Parameters:
578 :Parameters:
594 namespace : dict
579 namespace : dict
595 A dict whose values are the functions to be pushed. The keys give
580 A dict whose values are the functions to be pushed. The keys give
596 that names that the function will appear as in the engines
581 that names that the function will appear as in the engines
597 namespace.
582 namespace.
598 targets : id or list of ids
583 targets : id or list of ids
599 The engine to use for the execution
584 The engine to use for the execution
600 block : boolean
585 block : boolean
601 If False, this method will return the actual result. If False,
586 If False, this method will return the actual result. If False,
602 a `PendingResult` is returned which can be used to get the result
587 a `PendingResult` is returned which can be used to get the result
603 at a later time.
588 at a later time.
604 """
589 """
605 targets, block = self._findTargetsAndBlock(targets, block)
590 targets, block = self._findTargetsAndBlock(targets, block)
606 return self._blockFromThread(self.smultiengine.push_function, namespace, targets=targets, block=block)
591 return self._blockFromThread(self.smultiengine.push_function, namespace, targets=targets, block=block)
607
592
608 def pull_function(self, keys, targets=None, block=None):
593 def pull_function(self, keys, targets=None, block=None):
609 """
594 """
610 Pull a Python function from an engine.
595 Pull a Python function from an engine.
611
596
612 This method is used to pull a Python function from an engine.
597 This method is used to pull a Python function from an engine.
613 Closures are not supported.
598 Closures are not supported.
614
599
615 :Parameters:
600 :Parameters:
616 keys : str or list of str
601 keys : str or list of str
617 The names of the functions to be pulled
602 The names of the functions to be pulled
618 targets : id or list of ids
603 targets : id or list of ids
619 The engine to use for the execution
604 The engine to use for the execution
620 block : boolean
605 block : boolean
621 If False, this method will return the actual result. If False,
606 If False, this method will return the actual result. If False,
622 a `PendingResult` is returned which can be used to get the result
607 a `PendingResult` is returned which can be used to get the result
623 at a later time.
608 at a later time.
624 """
609 """
625 targets, block = self._findTargetsAndBlock(targets, block)
610 targets, block = self._findTargetsAndBlock(targets, block)
626 return self._blockFromThread(self.smultiengine.pull_function, keys, targets=targets, block=block)
611 return self._blockFromThread(self.smultiengine.pull_function, keys, targets=targets, block=block)
627
612
628 def push_serialized(self, namespace, targets=None, block=None):
613 def push_serialized(self, namespace, targets=None, block=None):
629 targets, block = self._findTargetsAndBlock(targets, block)
614 targets, block = self._findTargetsAndBlock(targets, block)
630 return self._blockFromThread(self.smultiengine.push_serialized, namespace, targets=targets, block=block)
615 return self._blockFromThread(self.smultiengine.push_serialized, namespace, targets=targets, block=block)
631
616
632 def pull_serialized(self, keys, targets=None, block=None):
617 def pull_serialized(self, keys, targets=None, block=None):
633 targets, block = self._findTargetsAndBlock(targets, block)
618 targets, block = self._findTargetsAndBlock(targets, block)
634 return self._blockFromThread(self.smultiengine.pull_serialized, keys, targets=targets, block=block)
619 return self._blockFromThread(self.smultiengine.pull_serialized, keys, targets=targets, block=block)
635
620
636 def get_result(self, i=None, targets=None, block=None):
621 def get_result(self, i=None, targets=None, block=None):
637 """
622 """
638 Get a previous result.
623 Get a previous result.
639
624
640 When code is executed in an engine, a dict is created and returned. This
625 When code is executed in an engine, a dict is created and returned. This
641 method retrieves that dict for previous commands.
626 method retrieves that dict for previous commands.
642
627
643 :Parameters:
628 :Parameters:
644 i : int
629 i : int
645 The number of the result to get
630 The number of the result to get
646 targets : id or list of ids
631 targets : id or list of ids
647 The engine to use for the execution
632 The engine to use for the execution
648 block : boolean
633 block : boolean
649 If False, this method will return the actual result. If False,
634 If False, this method will return the actual result. If False,
650 a `PendingResult` is returned which can be used to get the result
635 a `PendingResult` is returned which can be used to get the result
651 at a later time.
636 at a later time.
652 """
637 """
653 targets, block = self._findTargetsAndBlock(targets, block)
638 targets, block = self._findTargetsAndBlock(targets, block)
654 result = blockingCallFromThread(self.smultiengine.get_result, i, targets=targets, block=block)
639 result = blockingCallFromThread(self.smultiengine.get_result, i, targets=targets, block=block)
655 if block:
640 if block:
656 result = ResultList(result)
641 result = ResultList(result)
657 else:
642 else:
658 result = PendingResult(self, result)
643 result = PendingResult(self, result)
659 result.add_callback(wrapResultList)
644 result.add_callback(wrapResultList)
660 return result
645 return result
661
646
662 def reset(self, targets=None, block=None):
647 def reset(self, targets=None, block=None):
663 """
648 """
664 Reset an engine.
649 Reset an engine.
665
650
666 This method clears out the namespace of an engine.
651 This method clears out the namespace of an engine.
667
652
668 :Parameters:
653 :Parameters:
669 targets : id or list of ids
654 targets : id or list of ids
670 The engine to use for the execution
655 The engine to use for the execution
671 block : boolean
656 block : boolean
672 If False, this method will return the actual result. If False,
657 If False, this method will return the actual result. If False,
673 a `PendingResult` is returned which can be used to get the result
658 a `PendingResult` is returned which can be used to get the result
674 at a later time.
659 at a later time.
675 """
660 """
676 targets, block = self._findTargetsAndBlock(targets, block)
661 targets, block = self._findTargetsAndBlock(targets, block)
677 return self._blockFromThread(self.smultiengine.reset, targets=targets, block=block)
662 return self._blockFromThread(self.smultiengine.reset, targets=targets, block=block)
678
663
679 def keys(self, targets=None, block=None):
664 def keys(self, targets=None, block=None):
680 """
665 """
681 Get a list of all the variables in an engine's namespace.
666 Get a list of all the variables in an engine's namespace.
682
667
683 :Parameters:
668 :Parameters:
684 targets : id or list of ids
669 targets : id or list of ids
685 The engine to use for the execution
670 The engine to use for the execution
686 block : boolean
671 block : boolean
687 If False, this method will return the actual result. If False,
672 If False, this method will return the actual result. If False,
688 a `PendingResult` is returned which can be used to get the result
673 a `PendingResult` is returned which can be used to get the result
689 at a later time.
674 at a later time.
690 """
675 """
691 targets, block = self._findTargetsAndBlock(targets, block)
676 targets, block = self._findTargetsAndBlock(targets, block)
692 return self._blockFromThread(self.smultiengine.keys, targets=targets, block=block)
677 return self._blockFromThread(self.smultiengine.keys, targets=targets, block=block)
693
678
694 def kill(self, controller=False, targets=None, block=None):
679 def kill(self, controller=False, targets=None, block=None):
695 """
680 """
696 Kill the engines and controller.
681 Kill the engines and controller.
697
682
698 This method is used to stop the engine and controller by calling
683 This method is used to stop the engine and controller by calling
699 `reactor.stop`.
684 `reactor.stop`.
700
685
701 :Parameters:
686 :Parameters:
702 controller : boolean
687 controller : boolean
703 If True, kill the engines and controller. If False, just the
688 If True, kill the engines and controller. If False, just the
704 engines
689 engines
705 targets : id or list of ids
690 targets : id or list of ids
706 The engine to use for the execution
691 The engine to use for the execution
707 block : boolean
692 block : boolean
708 If False, this method will return the actual result. If False,
693 If False, this method will return the actual result. If False,
709 a `PendingResult` is returned which can be used to get the result
694 a `PendingResult` is returned which can be used to get the result
710 at a later time.
695 at a later time.
711 """
696 """
712 targets, block = self._findTargetsAndBlock(targets, block)
697 targets, block = self._findTargetsAndBlock(targets, block)
713 return self._blockFromThread(self.smultiengine.kill, controller, targets=targets, block=block)
698 return self._blockFromThread(self.smultiengine.kill, controller, targets=targets, block=block)
714
699
715 def clear_queue(self, targets=None, block=None):
700 def clear_queue(self, targets=None, block=None):
716 """
701 """
717 Clear out the controller's queue for an engine.
702 Clear out the controller's queue for an engine.
718
703
719 The controller maintains a queue for each engine. This clear it out.
704 The controller maintains a queue for each engine. This clear it out.
720
705
721 :Parameters:
706 :Parameters:
722 targets : id or list of ids
707 targets : id or list of ids
723 The engine to use for the execution
708 The engine to use for the execution
724 block : boolean
709 block : boolean
725 If False, this method will return the actual result. If False,
710 If False, this method will return the actual result. If False,
726 a `PendingResult` is returned which can be used to get the result
711 a `PendingResult` is returned which can be used to get the result
727 at a later time.
712 at a later time.
728 """
713 """
729 targets, block = self._findTargetsAndBlock(targets, block)
714 targets, block = self._findTargetsAndBlock(targets, block)
730 return self._blockFromThread(self.smultiengine.clear_queue, targets=targets, block=block)
715 return self._blockFromThread(self.smultiengine.clear_queue, targets=targets, block=block)
731
716
732 def queue_status(self, targets=None, block=None):
717 def queue_status(self, targets=None, block=None):
733 """
718 """
734 Get the status of an engines queue.
719 Get the status of an engines queue.
735
720
736 :Parameters:
721 :Parameters:
737 targets : id or list of ids
722 targets : id or list of ids
738 The engine to use for the execution
723 The engine to use for the execution
739 block : boolean
724 block : boolean
740 If False, this method will return the actual result. If False,
725 If False, this method will return the actual result. If False,
741 a `PendingResult` is returned which can be used to get the result
726 a `PendingResult` is returned which can be used to get the result
742 at a later time.
727 at a later time.
743 """
728 """
744 targets, block = self._findTargetsAndBlock(targets, block)
729 targets, block = self._findTargetsAndBlock(targets, block)
745 return self._blockFromThread(self.smultiengine.queue_status, targets=targets, block=block)
730 return self._blockFromThread(self.smultiengine.queue_status, targets=targets, block=block)
746
731
747 def set_properties(self, properties, targets=None, block=None):
732 def set_properties(self, properties, targets=None, block=None):
748 targets, block = self._findTargetsAndBlock(targets, block)
733 targets, block = self._findTargetsAndBlock(targets, block)
749 return self._blockFromThread(self.smultiengine.set_properties, properties, targets=targets, block=block)
734 return self._blockFromThread(self.smultiengine.set_properties, properties, targets=targets, block=block)
750
735
751 def get_properties(self, keys=None, targets=None, block=None):
736 def get_properties(self, keys=None, targets=None, block=None):
752 targets, block = self._findTargetsAndBlock(targets, block)
737 targets, block = self._findTargetsAndBlock(targets, block)
753 return self._blockFromThread(self.smultiengine.get_properties, keys, targets=targets, block=block)
738 return self._blockFromThread(self.smultiengine.get_properties, keys, targets=targets, block=block)
754
739
755 def has_properties(self, keys, targets=None, block=None):
740 def has_properties(self, keys, targets=None, block=None):
756 targets, block = self._findTargetsAndBlock(targets, block)
741 targets, block = self._findTargetsAndBlock(targets, block)
757 return self._blockFromThread(self.smultiengine.has_properties, keys, targets=targets, block=block)
742 return self._blockFromThread(self.smultiengine.has_properties, keys, targets=targets, block=block)
758
743
759 def del_properties(self, keys, targets=None, block=None):
744 def del_properties(self, keys, targets=None, block=None):
760 targets, block = self._findTargetsAndBlock(targets, block)
745 targets, block = self._findTargetsAndBlock(targets, block)
761 return self._blockFromThread(self.smultiengine.del_properties, keys, targets=targets, block=block)
746 return self._blockFromThread(self.smultiengine.del_properties, keys, targets=targets, block=block)
762
747
763 def clear_properties(self, targets=None, block=None):
748 def clear_properties(self, targets=None, block=None):
764 targets, block = self._findTargetsAndBlock(targets, block)
749 targets, block = self._findTargetsAndBlock(targets, block)
765 return self._blockFromThread(self.smultiengine.clear_properties, targets=targets, block=block)
750 return self._blockFromThread(self.smultiengine.clear_properties, targets=targets, block=block)
766
751
767 #---------------------------------------------------------------------------
752 #---------------------------------------------------------------------------
768 # IMultiEngine related methods
753 # IMultiEngine related methods
769 #---------------------------------------------------------------------------
754 #---------------------------------------------------------------------------
770
755
771 def get_ids(self):
756 def get_ids(self):
772 """
757 """
773 Returns the ids of currently registered engines.
758 Returns the ids of currently registered engines.
774 """
759 """
775 result = blockingCallFromThread(self.smultiengine.get_ids)
760 result = blockingCallFromThread(self.smultiengine.get_ids)
776 return result
761 return result
777
762
778 #---------------------------------------------------------------------------
763 #---------------------------------------------------------------------------
779 # IMultiEngineCoordinator
764 # IMultiEngineCoordinator
780 #---------------------------------------------------------------------------
765 #---------------------------------------------------------------------------
781
766
782 def scatter(self, key, seq, style='basic', flatten=False, targets=None, block=None):
767 def scatter(self, key, seq, dist='b', flatten=False, targets=None, block=None):
783 """
768 """
784 Partition a Python sequence and send the partitions to a set of engines.
769 Partition a Python sequence and send the partitions to a set of engines.
785 """
770 """
786 targets, block = self._findTargetsAndBlock(targets, block)
771 targets, block = self._findTargetsAndBlock(targets, block)
787 return self._blockFromThread(self.smultiengine.scatter, key, seq,
772 return self._blockFromThread(self.smultiengine.scatter, key, seq,
788 style, flatten, targets=targets, block=block)
773 dist, flatten, targets=targets, block=block)
789
774
790 def gather(self, key, style='basic', targets=None, block=None):
775 def gather(self, key, dist='b', targets=None, block=None):
791 """
776 """
792 Gather a partitioned sequence on a set of engines as a single local seq.
777 Gather a partitioned sequence on a set of engines as a single local seq.
793 """
778 """
794 targets, block = self._findTargetsAndBlock(targets, block)
779 targets, block = self._findTargetsAndBlock(targets, block)
795 return self._blockFromThread(self.smultiengine.gather, key, style,
780 return self._blockFromThread(self.smultiengine.gather, key, dist,
796 targets=targets, block=block)
781 targets=targets, block=block)
797
782
798 def map(self, func, seq, style='basic', targets=None, block=None):
783 def raw_map(self, func, seq, dist='b', targets=None, block=None):
784 """
785 A parallelized version of Python's builtin map.
786
787 This has a slightly different syntax than the builtin `map`.
788 This is needed because we need to have keyword arguments and thus
789 can't use *args to capture all the sequences. Instead, they must
790 be passed in a list or tuple.
791
792 raw_map(func, seqs) -> map(func, seqs[0], seqs[1], ...)
793
794 Most users will want to use parallel functions or the `mapper`
795 and `map` methods for an API that follows that of the builtin
796 `map`.
797 """
798 targets, block = self._findTargetsAndBlock(targets, block)
799 return self._blockFromThread(self.smultiengine.raw_map, func, seq,
800 dist, targets=targets, block=block)
801
802 def map(self, func, *sequences):
803 """
804 A parallel version of Python's builtin `map` function.
805
806 This method applies a function to sequences of arguments. It
807 follows the same syntax as the builtin `map`.
808
809 This method creates a mapper objects by calling `self.mapper` with
810 no arguments and then uses that mapper to do the mapping. See
811 the documentation of `mapper` for more details.
812 """
813 return self.mapper().map(func, *sequences)
814
815 def mapper(self, dist='b', targets='all', block=None):
816 """
817 Create a mapper object that has a `map` method.
818
819 This method returns an object that implements the `IMapper`
820 interface. This method is a factory that is used to control how
821 the map happens.
822
823 :Parameters:
824 dist : str
825 What decomposition to use, 'b' is the only one supported
826 currently
827 targets : str, int, sequence of ints
828 Which engines to use for the map
829 block : boolean
830 Should calls to `map` block or not
831 """
832 return MultiEngineMapper(self, dist, targets, block)
833
834 def parallel(self, dist='b', targets=None, block=None):
799 """
835 """
800 A parallelized version of Python's builtin map
836 A decorator that turns a function into a parallel function.
837
838 This can be used as:
839
840 @parallel()
841 def f(x, y)
842 ...
843
844 f(range(10), range(10))
845
846 This causes f(0,0), f(1,1), ... to be called in parallel.
847
848 :Parameters:
849 dist : str
850 What decomposition to use, 'b' is the only one supported
851 currently
852 targets : str, int, sequence of ints
853 Which engines to use for the map
854 block : boolean
855 Should calls to `map` block or not
801 """
856 """
802 targets, block = self._findTargetsAndBlock(targets, block)
857 targets, block = self._findTargetsAndBlock(targets, block)
803 return self._blockFromThread(self.smultiengine.map, func, seq,
858 mapper = self.mapper(dist, targets, block)
804 style, targets=targets, block=block)
859 pf = ParallelFunction(mapper)
860 return pf
805
861
806 #---------------------------------------------------------------------------
862 #---------------------------------------------------------------------------
807 # IMultiEngineExtras
863 # IMultiEngineExtras
808 #---------------------------------------------------------------------------
864 #---------------------------------------------------------------------------
809
865
810 def zip_pull(self, keys, targets=None, block=None):
866 def zip_pull(self, keys, targets=None, block=None):
811 targets, block = self._findTargetsAndBlock(targets, block)
867 targets, block = self._findTargetsAndBlock(targets, block)
812 return self._blockFromThread(self.smultiengine.zip_pull, keys,
868 return self._blockFromThread(self.smultiengine.zip_pull, keys,
813 targets=targets, block=block)
869 targets=targets, block=block)
814
870
815 def run(self, filename, targets=None, block=None):
871 def run(self, filename, targets=None, block=None):
816 """
872 """
817 Run a Python code in a file on the engines.
873 Run a Python code in a file on the engines.
818
874
819 :Parameters:
875 :Parameters:
820 filename : str
876 filename : str
821 The name of the local file to run
877 The name of the local file to run
822 targets : id or list of ids
878 targets : id or list of ids
823 The engine to use for the execution
879 The engine to use for the execution
824 block : boolean
880 block : boolean
825 If False, this method will return the actual result. If False,
881 If False, this method will return the actual result. If False,
826 a `PendingResult` is returned which can be used to get the result
882 a `PendingResult` is returned which can be used to get the result
827 at a later time.
883 at a later time.
828 """
884 """
829 targets, block = self._findTargetsAndBlock(targets, block)
885 targets, block = self._findTargetsAndBlock(targets, block)
830 return self._blockFromThread(self.smultiengine.run, filename,
886 return self._blockFromThread(self.smultiengine.run, filename,
831 targets=targets, block=block)
887 targets=targets, block=block)
832
888
833
889
834
890
835 components.registerAdapter(FullBlockingMultiEngineClient,
891 components.registerAdapter(FullBlockingMultiEngineClient,
836 IFullSynchronousMultiEngine, IFullBlockingMultiEngineClient)
892 IFullSynchronousMultiEngine, IFullBlockingMultiEngineClient)
837
893
838
894
839
895
840
896
@@ -1,668 +1,757 b''
1 # encoding: utf-8
1 # encoding: utf-8
2
2
3 """
3 """
4 Expose the multiengine controller over the Foolscap network protocol.
4 Expose the multiengine controller over the Foolscap network protocol.
5 """
5 """
6
6
7 __docformat__ = "restructuredtext en"
7 __docformat__ = "restructuredtext en"
8
8
9 #-------------------------------------------------------------------------------
9 #-------------------------------------------------------------------------------
10 # Copyright (C) 2008 The IPython Development Team
10 # Copyright (C) 2008 The IPython Development Team
11 #
11 #
12 # Distributed under the terms of the BSD License. The full license is in
12 # Distributed under the terms of the BSD License. The full license is in
13 # the file COPYING, distributed as part of this software.
13 # the file COPYING, distributed as part of this software.
14 #-------------------------------------------------------------------------------
14 #-------------------------------------------------------------------------------
15
15
16 #-------------------------------------------------------------------------------
16 #-------------------------------------------------------------------------------
17 # Imports
17 # Imports
18 #-------------------------------------------------------------------------------
18 #-------------------------------------------------------------------------------
19
19
20 import cPickle as pickle
20 import cPickle as pickle
21 from types import FunctionType
21 from types import FunctionType
22
22
23 from zope.interface import Interface, implements
23 from zope.interface import Interface, implements
24 from twisted.internet import defer
24 from twisted.internet import defer
25 from twisted.python import components, failure, log
25 from twisted.python import components, failure, log
26
26
27 from foolscap import Referenceable
27 from foolscap import Referenceable
28
28
29 from IPython.kernel import error
29 from IPython.kernel import error
30 from IPython.kernel.util import printer
30 from IPython.kernel.util import printer
31 from IPython.kernel import map as Map
31 from IPython.kernel import map as Map
32 from IPython.kernel.parallelfunction import ParallelFunction
33 from IPython.kernel.mapper import (
34 MultiEngineMapper,
35 IMultiEngineMapperFactory,
36 IMapper
37 )
32 from IPython.kernel.twistedutil import gatherBoth
38 from IPython.kernel.twistedutil import gatherBoth
33 from IPython.kernel.multiengine import (MultiEngine,
39 from IPython.kernel.multiengine import (MultiEngine,
34 IMultiEngine,
40 IMultiEngine,
35 IFullSynchronousMultiEngine,
41 IFullSynchronousMultiEngine,
36 ISynchronousMultiEngine)
42 ISynchronousMultiEngine)
37 from IPython.kernel.multiengineclient import wrapResultList
43 from IPython.kernel.multiengineclient import wrapResultList
38 from IPython.kernel.pendingdeferred import PendingDeferredManager
44 from IPython.kernel.pendingdeferred import PendingDeferredManager
39 from IPython.kernel.pickleutil import (can, canDict,
45 from IPython.kernel.pickleutil import (can, canDict,
40 canSequence, uncan, uncanDict, uncanSequence)
46 canSequence, uncan, uncanDict, uncanSequence)
41
47
42 from IPython.kernel.clientinterfaces import (
48 from IPython.kernel.clientinterfaces import (
43 IFCClientInterfaceProvider,
49 IFCClientInterfaceProvider,
44 IBlockingClientAdaptor
50 IBlockingClientAdaptor
45 )
51 )
46
52
47 # Needed to access the true globals from __main__.__dict__
53 # Needed to access the true globals from __main__.__dict__
48 import __main__
54 import __main__
49
55
50 #-------------------------------------------------------------------------------
56 #-------------------------------------------------------------------------------
51 # The Controller side of things
57 # The Controller side of things
52 #-------------------------------------------------------------------------------
58 #-------------------------------------------------------------------------------
53
59
54 def packageResult(wrappedMethod):
60 def packageResult(wrappedMethod):
55
61
56 def wrappedPackageResult(self, *args, **kwargs):
62 def wrappedPackageResult(self, *args, **kwargs):
57 d = wrappedMethod(self, *args, **kwargs)
63 d = wrappedMethod(self, *args, **kwargs)
58 d.addCallback(self.packageSuccess)
64 d.addCallback(self.packageSuccess)
59 d.addErrback(self.packageFailure)
65 d.addErrback(self.packageFailure)
60 return d
66 return d
61 return wrappedPackageResult
67 return wrappedPackageResult
62
68
63
69
64 class IFCSynchronousMultiEngine(Interface):
70 class IFCSynchronousMultiEngine(Interface):
65 """Foolscap interface to `ISynchronousMultiEngine`.
71 """Foolscap interface to `ISynchronousMultiEngine`.
66
72
67 The methods in this interface are similar to those of
73 The methods in this interface are similar to those of
68 `ISynchronousMultiEngine`, but their arguments and return values are pickled
74 `ISynchronousMultiEngine`, but their arguments and return values are pickled
69 if they are not already simple Python types that can be send over XML-RPC.
75 if they are not already simple Python types that can be send over XML-RPC.
70
76
71 See the documentation of `ISynchronousMultiEngine` and `IMultiEngine` for
77 See the documentation of `ISynchronousMultiEngine` and `IMultiEngine` for
72 documentation about the methods.
78 documentation about the methods.
73
79
74 Most methods in this interface act like the `ISynchronousMultiEngine`
80 Most methods in this interface act like the `ISynchronousMultiEngine`
75 versions and can be called in blocking or non-blocking mode.
81 versions and can be called in blocking or non-blocking mode.
76 """
82 """
77 pass
83 pass
78
84
79
85
80 class FCSynchronousMultiEngineFromMultiEngine(Referenceable):
86 class FCSynchronousMultiEngineFromMultiEngine(Referenceable):
81 """Adapt `IMultiEngine` -> `ISynchronousMultiEngine` -> `IFCSynchronousMultiEngine`.
87 """Adapt `IMultiEngine` -> `ISynchronousMultiEngine` -> `IFCSynchronousMultiEngine`.
82 """
88 """
83
89
84 implements(IFCSynchronousMultiEngine, IFCClientInterfaceProvider)
90 implements(IFCSynchronousMultiEngine, IFCClientInterfaceProvider)
85
91
86 addSlash = True
92 addSlash = True
87
93
88 def __init__(self, multiengine):
94 def __init__(self, multiengine):
89 # Adapt the raw multiengine to `ISynchronousMultiEngine` before saving
95 # Adapt the raw multiengine to `ISynchronousMultiEngine` before saving
90 # it. This allow this class to do two adaptation steps.
96 # it. This allow this class to do two adaptation steps.
91 self.smultiengine = ISynchronousMultiEngine(multiengine)
97 self.smultiengine = ISynchronousMultiEngine(multiengine)
92 self._deferredIDCallbacks = {}
98 self._deferredIDCallbacks = {}
93
99
94 #---------------------------------------------------------------------------
100 #---------------------------------------------------------------------------
95 # Non interface methods
101 # Non interface methods
96 #---------------------------------------------------------------------------
102 #---------------------------------------------------------------------------
97
103
98 def packageFailure(self, f):
104 def packageFailure(self, f):
99 f.cleanFailure()
105 f.cleanFailure()
100 return self.packageSuccess(f)
106 return self.packageSuccess(f)
101
107
102 def packageSuccess(self, obj):
108 def packageSuccess(self, obj):
103 serial = pickle.dumps(obj, 2)
109 serial = pickle.dumps(obj, 2)
104 return serial
110 return serial
105
111
106 #---------------------------------------------------------------------------
112 #---------------------------------------------------------------------------
107 # Things related to PendingDeferredManager
113 # Things related to PendingDeferredManager
108 #---------------------------------------------------------------------------
114 #---------------------------------------------------------------------------
109
115
110 @packageResult
116 @packageResult
111 def remote_get_pending_deferred(self, deferredID, block):
117 def remote_get_pending_deferred(self, deferredID, block):
112 d = self.smultiengine.get_pending_deferred(deferredID, block)
118 d = self.smultiengine.get_pending_deferred(deferredID, block)
113 try:
119 try:
114 callback = self._deferredIDCallbacks.pop(deferredID)
120 callback = self._deferredIDCallbacks.pop(deferredID)
115 except KeyError:
121 except KeyError:
116 callback = None
122 callback = None
117 if callback is not None:
123 if callback is not None:
118 d.addCallback(callback[0], *callback[1], **callback[2])
124 d.addCallback(callback[0], *callback[1], **callback[2])
119 return d
125 return d
120
126
121 @packageResult
127 @packageResult
122 def remote_clear_pending_deferreds(self):
128 def remote_clear_pending_deferreds(self):
123 return defer.maybeDeferred(self.smultiengine.clear_pending_deferreds)
129 return defer.maybeDeferred(self.smultiengine.clear_pending_deferreds)
124
130
125 def _addDeferredIDCallback(self, did, callback, *args, **kwargs):
131 def _addDeferredIDCallback(self, did, callback, *args, **kwargs):
126 self._deferredIDCallbacks[did] = (callback, args, kwargs)
132 self._deferredIDCallbacks[did] = (callback, args, kwargs)
127 return did
133 return did
128
134
129 #---------------------------------------------------------------------------
135 #---------------------------------------------------------------------------
130 # IEngineMultiplexer related methods
136 # IEngineMultiplexer related methods
131 #---------------------------------------------------------------------------
137 #---------------------------------------------------------------------------
132
138
133 @packageResult
139 @packageResult
134 def remote_execute(self, lines, targets, block):
140 def remote_execute(self, lines, targets, block):
135 return self.smultiengine.execute(lines, targets=targets, block=block)
141 return self.smultiengine.execute(lines, targets=targets, block=block)
136
142
137 @packageResult
143 @packageResult
138 def remote_push(self, binaryNS, targets, block):
144 def remote_push(self, binaryNS, targets, block):
139 try:
145 try:
140 namespace = pickle.loads(binaryNS)
146 namespace = pickle.loads(binaryNS)
141 except:
147 except:
142 d = defer.fail(failure.Failure())
148 d = defer.fail(failure.Failure())
143 else:
149 else:
144 d = self.smultiengine.push(namespace, targets=targets, block=block)
150 d = self.smultiengine.push(namespace, targets=targets, block=block)
145 return d
151 return d
146
152
147 @packageResult
153 @packageResult
148 def remote_pull(self, keys, targets, block):
154 def remote_pull(self, keys, targets, block):
149 d = self.smultiengine.pull(keys, targets=targets, block=block)
155 d = self.smultiengine.pull(keys, targets=targets, block=block)
150 return d
156 return d
151
157
152 @packageResult
158 @packageResult
153 def remote_push_function(self, binaryNS, targets, block):
159 def remote_push_function(self, binaryNS, targets, block):
154 try:
160 try:
155 namespace = pickle.loads(binaryNS)
161 namespace = pickle.loads(binaryNS)
156 except:
162 except:
157 d = defer.fail(failure.Failure())
163 d = defer.fail(failure.Failure())
158 else:
164 else:
159 namespace = uncanDict(namespace)
165 namespace = uncanDict(namespace)
160 d = self.smultiengine.push_function(namespace, targets=targets, block=block)
166 d = self.smultiengine.push_function(namespace, targets=targets, block=block)
161 return d
167 return d
162
168
163 def _canMultipleKeys(self, result):
169 def _canMultipleKeys(self, result):
164 return [canSequence(r) for r in result]
170 return [canSequence(r) for r in result]
165
171
166 @packageResult
172 @packageResult
167 def remote_pull_function(self, keys, targets, block):
173 def remote_pull_function(self, keys, targets, block):
168 def can_functions(r, keys):
174 def can_functions(r, keys):
169 if len(keys)==1 or isinstance(keys, str):
175 if len(keys)==1 or isinstance(keys, str):
170 result = canSequence(r)
176 result = canSequence(r)
171 elif len(keys)>1:
177 elif len(keys)>1:
172 result = [canSequence(s) for s in r]
178 result = [canSequence(s) for s in r]
173 return result
179 return result
174 d = self.smultiengine.pull_function(keys, targets=targets, block=block)
180 d = self.smultiengine.pull_function(keys, targets=targets, block=block)
175 if block:
181 if block:
176 d.addCallback(can_functions, keys)
182 d.addCallback(can_functions, keys)
177 else:
183 else:
178 d.addCallback(lambda did: self._addDeferredIDCallback(did, can_functions, keys))
184 d.addCallback(lambda did: self._addDeferredIDCallback(did, can_functions, keys))
179 return d
185 return d
180
186
181 @packageResult
187 @packageResult
182 def remote_push_serialized(self, binaryNS, targets, block):
188 def remote_push_serialized(self, binaryNS, targets, block):
183 try:
189 try:
184 namespace = pickle.loads(binaryNS)
190 namespace = pickle.loads(binaryNS)
185 except:
191 except:
186 d = defer.fail(failure.Failure())
192 d = defer.fail(failure.Failure())
187 else:
193 else:
188 d = self.smultiengine.push_serialized(namespace, targets=targets, block=block)
194 d = self.smultiengine.push_serialized(namespace, targets=targets, block=block)
189 return d
195 return d
190
196
191 @packageResult
197 @packageResult
192 def remote_pull_serialized(self, keys, targets, block):
198 def remote_pull_serialized(self, keys, targets, block):
193 d = self.smultiengine.pull_serialized(keys, targets=targets, block=block)
199 d = self.smultiengine.pull_serialized(keys, targets=targets, block=block)
194 return d
200 return d
195
201
196 @packageResult
202 @packageResult
197 def remote_get_result(self, i, targets, block):
203 def remote_get_result(self, i, targets, block):
198 if i == 'None':
204 if i == 'None':
199 i = None
205 i = None
200 return self.smultiengine.get_result(i, targets=targets, block=block)
206 return self.smultiengine.get_result(i, targets=targets, block=block)
201
207
202 @packageResult
208 @packageResult
203 def remote_reset(self, targets, block):
209 def remote_reset(self, targets, block):
204 return self.smultiengine.reset(targets=targets, block=block)
210 return self.smultiengine.reset(targets=targets, block=block)
205
211
206 @packageResult
212 @packageResult
207 def remote_keys(self, targets, block):
213 def remote_keys(self, targets, block):
208 return self.smultiengine.keys(targets=targets, block=block)
214 return self.smultiengine.keys(targets=targets, block=block)
209
215
210 @packageResult
216 @packageResult
211 def remote_kill(self, controller, targets, block):
217 def remote_kill(self, controller, targets, block):
212 return self.smultiengine.kill(controller, targets=targets, block=block)
218 return self.smultiengine.kill(controller, targets=targets, block=block)
213
219
214 @packageResult
220 @packageResult
215 def remote_clear_queue(self, targets, block):
221 def remote_clear_queue(self, targets, block):
216 return self.smultiengine.clear_queue(targets=targets, block=block)
222 return self.smultiengine.clear_queue(targets=targets, block=block)
217
223
218 @packageResult
224 @packageResult
219 def remote_queue_status(self, targets, block):
225 def remote_queue_status(self, targets, block):
220 return self.smultiengine.queue_status(targets=targets, block=block)
226 return self.smultiengine.queue_status(targets=targets, block=block)
221
227
222 @packageResult
228 @packageResult
223 def remote_set_properties(self, binaryNS, targets, block):
229 def remote_set_properties(self, binaryNS, targets, block):
224 try:
230 try:
225 ns = pickle.loads(binaryNS)
231 ns = pickle.loads(binaryNS)
226 except:
232 except:
227 d = defer.fail(failure.Failure())
233 d = defer.fail(failure.Failure())
228 else:
234 else:
229 d = self.smultiengine.set_properties(ns, targets=targets, block=block)
235 d = self.smultiengine.set_properties(ns, targets=targets, block=block)
230 return d
236 return d
231
237
232 @packageResult
238 @packageResult
233 def remote_get_properties(self, keys, targets, block):
239 def remote_get_properties(self, keys, targets, block):
234 if keys=='None':
240 if keys=='None':
235 keys=None
241 keys=None
236 return self.smultiengine.get_properties(keys, targets=targets, block=block)
242 return self.smultiengine.get_properties(keys, targets=targets, block=block)
237
243
238 @packageResult
244 @packageResult
239 def remote_has_properties(self, keys, targets, block):
245 def remote_has_properties(self, keys, targets, block):
240 return self.smultiengine.has_properties(keys, targets=targets, block=block)
246 return self.smultiengine.has_properties(keys, targets=targets, block=block)
241
247
242 @packageResult
248 @packageResult
243 def remote_del_properties(self, keys, targets, block):
249 def remote_del_properties(self, keys, targets, block):
244 return self.smultiengine.del_properties(keys, targets=targets, block=block)
250 return self.smultiengine.del_properties(keys, targets=targets, block=block)
245
251
246 @packageResult
252 @packageResult
247 def remote_clear_properties(self, targets, block):
253 def remote_clear_properties(self, targets, block):
248 return self.smultiengine.clear_properties(targets=targets, block=block)
254 return self.smultiengine.clear_properties(targets=targets, block=block)
249
255
250 #---------------------------------------------------------------------------
256 #---------------------------------------------------------------------------
251 # IMultiEngine related methods
257 # IMultiEngine related methods
252 #---------------------------------------------------------------------------
258 #---------------------------------------------------------------------------
253
259
254 def remote_get_ids(self):
260 def remote_get_ids(self):
255 """Get the ids of the registered engines.
261 """Get the ids of the registered engines.
256
262
257 This method always blocks.
263 This method always blocks.
258 """
264 """
259 return self.smultiengine.get_ids()
265 return self.smultiengine.get_ids()
260
266
261 #---------------------------------------------------------------------------
267 #---------------------------------------------------------------------------
262 # IFCClientInterfaceProvider related methods
268 # IFCClientInterfaceProvider related methods
263 #---------------------------------------------------------------------------
269 #---------------------------------------------------------------------------
264
270
265 def remote_get_client_name(self):
271 def remote_get_client_name(self):
266 return 'IPython.kernel.multienginefc.FCFullSynchronousMultiEngineClient'
272 return 'IPython.kernel.multienginefc.FCFullSynchronousMultiEngineClient'
267
273
268
274
269 # The __init__ method of `FCMultiEngineFromMultiEngine` first adapts the
275 # The __init__ method of `FCMultiEngineFromMultiEngine` first adapts the
270 # `IMultiEngine` to `ISynchronousMultiEngine` so this is actually doing a
276 # `IMultiEngine` to `ISynchronousMultiEngine` so this is actually doing a
271 # two phase adaptation.
277 # two phase adaptation.
272 components.registerAdapter(FCSynchronousMultiEngineFromMultiEngine,
278 components.registerAdapter(FCSynchronousMultiEngineFromMultiEngine,
273 IMultiEngine, IFCSynchronousMultiEngine)
279 IMultiEngine, IFCSynchronousMultiEngine)
274
280
275
281
276 #-------------------------------------------------------------------------------
282 #-------------------------------------------------------------------------------
277 # The Client side of things
283 # The Client side of things
278 #-------------------------------------------------------------------------------
284 #-------------------------------------------------------------------------------
279
285
280
286
281 class FCFullSynchronousMultiEngineClient(object):
287 class FCFullSynchronousMultiEngineClient(object):
282
288
283 implements(IFullSynchronousMultiEngine, IBlockingClientAdaptor)
289 implements(
290 IFullSynchronousMultiEngine,
291 IBlockingClientAdaptor,
292 IMultiEngineMapperFactory,
293 IMapper
294 )
284
295
285 def __init__(self, remote_reference):
296 def __init__(self, remote_reference):
286 self.remote_reference = remote_reference
297 self.remote_reference = remote_reference
287 self._deferredIDCallbacks = {}
298 self._deferredIDCallbacks = {}
288 # This class manages some pending deferreds through this instance. This
299 # This class manages some pending deferreds through this instance. This
289 # is required for methods like gather/scatter as it enables us to
300 # is required for methods like gather/scatter as it enables us to
290 # create our own pending deferreds for composite operations.
301 # create our own pending deferreds for composite operations.
291 self.pdm = PendingDeferredManager()
302 self.pdm = PendingDeferredManager()
292
303
293 #---------------------------------------------------------------------------
304 #---------------------------------------------------------------------------
294 # Non interface methods
305 # Non interface methods
295 #---------------------------------------------------------------------------
306 #---------------------------------------------------------------------------
296
307
297 def unpackage(self, r):
308 def unpackage(self, r):
298 return pickle.loads(r)
309 return pickle.loads(r)
299
310
300 #---------------------------------------------------------------------------
311 #---------------------------------------------------------------------------
301 # Things related to PendingDeferredManager
312 # Things related to PendingDeferredManager
302 #---------------------------------------------------------------------------
313 #---------------------------------------------------------------------------
303
314
304 def get_pending_deferred(self, deferredID, block=True):
315 def get_pending_deferred(self, deferredID, block=True):
305
316
306 # Because we are managing some pending deferreds locally (through
317 # Because we are managing some pending deferreds locally (through
307 # self.pdm) and some remotely (on the controller), we first try the
318 # self.pdm) and some remotely (on the controller), we first try the
308 # local one and then the remote one.
319 # local one and then the remote one.
309 if self.pdm.quick_has_id(deferredID):
320 if self.pdm.quick_has_id(deferredID):
310 d = self.pdm.get_pending_deferred(deferredID, block)
321 d = self.pdm.get_pending_deferred(deferredID, block)
311 return d
322 return d
312 else:
323 else:
313 d = self.remote_reference.callRemote('get_pending_deferred', deferredID, block)
324 d = self.remote_reference.callRemote('get_pending_deferred', deferredID, block)
314 d.addCallback(self.unpackage)
325 d.addCallback(self.unpackage)
315 try:
326 try:
316 callback = self._deferredIDCallbacks.pop(deferredID)
327 callback = self._deferredIDCallbacks.pop(deferredID)
317 except KeyError:
328 except KeyError:
318 callback = None
329 callback = None
319 if callback is not None:
330 if callback is not None:
320 d.addCallback(callback[0], *callback[1], **callback[2])
331 d.addCallback(callback[0], *callback[1], **callback[2])
321 return d
332 return d
322
333
323 def clear_pending_deferreds(self):
334 def clear_pending_deferreds(self):
324
335
325 # This clear both the local (self.pdm) and remote pending deferreds
336 # This clear both the local (self.pdm) and remote pending deferreds
326 self.pdm.clear_pending_deferreds()
337 self.pdm.clear_pending_deferreds()
327 d2 = self.remote_reference.callRemote('clear_pending_deferreds')
338 d2 = self.remote_reference.callRemote('clear_pending_deferreds')
328 d2.addCallback(self.unpackage)
339 d2.addCallback(self.unpackage)
329 return d2
340 return d2
330
341
331 def _addDeferredIDCallback(self, did, callback, *args, **kwargs):
342 def _addDeferredIDCallback(self, did, callback, *args, **kwargs):
332 self._deferredIDCallbacks[did] = (callback, args, kwargs)
343 self._deferredIDCallbacks[did] = (callback, args, kwargs)
333 return did
344 return did
334
345
335 #---------------------------------------------------------------------------
346 #---------------------------------------------------------------------------
336 # IEngineMultiplexer related methods
347 # IEngineMultiplexer related methods
337 #---------------------------------------------------------------------------
348 #---------------------------------------------------------------------------
338
349
339 def execute(self, lines, targets='all', block=True):
350 def execute(self, lines, targets='all', block=True):
340 d = self.remote_reference.callRemote('execute', lines, targets, block)
351 d = self.remote_reference.callRemote('execute', lines, targets, block)
341 d.addCallback(self.unpackage)
352 d.addCallback(self.unpackage)
342 return d
353 return d
343
354
344 def push(self, namespace, targets='all', block=True):
355 def push(self, namespace, targets='all', block=True):
345 serial = pickle.dumps(namespace, 2)
356 serial = pickle.dumps(namespace, 2)
346 d = self.remote_reference.callRemote('push', serial, targets, block)
357 d = self.remote_reference.callRemote('push', serial, targets, block)
347 d.addCallback(self.unpackage)
358 d.addCallback(self.unpackage)
348 return d
359 return d
349
360
350 def pull(self, keys, targets='all', block=True):
361 def pull(self, keys, targets='all', block=True):
351 d = self.remote_reference.callRemote('pull', keys, targets, block)
362 d = self.remote_reference.callRemote('pull', keys, targets, block)
352 d.addCallback(self.unpackage)
363 d.addCallback(self.unpackage)
353 return d
364 return d
354
365
355 def push_function(self, namespace, targets='all', block=True):
366 def push_function(self, namespace, targets='all', block=True):
356 cannedNamespace = canDict(namespace)
367 cannedNamespace = canDict(namespace)
357 serial = pickle.dumps(cannedNamespace, 2)
368 serial = pickle.dumps(cannedNamespace, 2)
358 d = self.remote_reference.callRemote('push_function', serial, targets, block)
369 d = self.remote_reference.callRemote('push_function', serial, targets, block)
359 d.addCallback(self.unpackage)
370 d.addCallback(self.unpackage)
360 return d
371 return d
361
372
362 def pull_function(self, keys, targets='all', block=True):
373 def pull_function(self, keys, targets='all', block=True):
363 def uncan_functions(r, keys):
374 def uncan_functions(r, keys):
364 if len(keys)==1 or isinstance(keys, str):
375 if len(keys)==1 or isinstance(keys, str):
365 return uncanSequence(r)
376 return uncanSequence(r)
366 elif len(keys)>1:
377 elif len(keys)>1:
367 return [uncanSequence(s) for s in r]
378 return [uncanSequence(s) for s in r]
368 d = self.remote_reference.callRemote('pull_function', keys, targets, block)
379 d = self.remote_reference.callRemote('pull_function', keys, targets, block)
369 if block:
380 if block:
370 d.addCallback(self.unpackage)
381 d.addCallback(self.unpackage)
371 d.addCallback(uncan_functions, keys)
382 d.addCallback(uncan_functions, keys)
372 else:
383 else:
373 d.addCallback(self.unpackage)
384 d.addCallback(self.unpackage)
374 d.addCallback(lambda did: self._addDeferredIDCallback(did, uncan_functions, keys))
385 d.addCallback(lambda did: self._addDeferredIDCallback(did, uncan_functions, keys))
375 return d
386 return d
376
387
377 def push_serialized(self, namespace, targets='all', block=True):
388 def push_serialized(self, namespace, targets='all', block=True):
378 cannedNamespace = canDict(namespace)
389 cannedNamespace = canDict(namespace)
379 serial = pickle.dumps(cannedNamespace, 2)
390 serial = pickle.dumps(cannedNamespace, 2)
380 d = self.remote_reference.callRemote('push_serialized', serial, targets, block)
391 d = self.remote_reference.callRemote('push_serialized', serial, targets, block)
381 d.addCallback(self.unpackage)
392 d.addCallback(self.unpackage)
382 return d
393 return d
383
394
384 def pull_serialized(self, keys, targets='all', block=True):
395 def pull_serialized(self, keys, targets='all', block=True):
385 d = self.remote_reference.callRemote('pull_serialized', keys, targets, block)
396 d = self.remote_reference.callRemote('pull_serialized', keys, targets, block)
386 d.addCallback(self.unpackage)
397 d.addCallback(self.unpackage)
387 return d
398 return d
388
399
389 def get_result(self, i=None, targets='all', block=True):
400 def get_result(self, i=None, targets='all', block=True):
390 if i is None: # This is because None cannot be marshalled by xml-rpc
401 if i is None: # This is because None cannot be marshalled by xml-rpc
391 i = 'None'
402 i = 'None'
392 d = self.remote_reference.callRemote('get_result', i, targets, block)
403 d = self.remote_reference.callRemote('get_result', i, targets, block)
393 d.addCallback(self.unpackage)
404 d.addCallback(self.unpackage)
394 return d
405 return d
395
406
396 def reset(self, targets='all', block=True):
407 def reset(self, targets='all', block=True):
397 d = self.remote_reference.callRemote('reset', targets, block)
408 d = self.remote_reference.callRemote('reset', targets, block)
398 d.addCallback(self.unpackage)
409 d.addCallback(self.unpackage)
399 return d
410 return d
400
411
401 def keys(self, targets='all', block=True):
412 def keys(self, targets='all', block=True):
402 d = self.remote_reference.callRemote('keys', targets, block)
413 d = self.remote_reference.callRemote('keys', targets, block)
403 d.addCallback(self.unpackage)
414 d.addCallback(self.unpackage)
404 return d
415 return d
405
416
406 def kill(self, controller=False, targets='all', block=True):
417 def kill(self, controller=False, targets='all', block=True):
407 d = self.remote_reference.callRemote('kill', controller, targets, block)
418 d = self.remote_reference.callRemote('kill', controller, targets, block)
408 d.addCallback(self.unpackage)
419 d.addCallback(self.unpackage)
409 return d
420 return d
410
421
411 def clear_queue(self, targets='all', block=True):
422 def clear_queue(self, targets='all', block=True):
412 d = self.remote_reference.callRemote('clear_queue', targets, block)
423 d = self.remote_reference.callRemote('clear_queue', targets, block)
413 d.addCallback(self.unpackage)
424 d.addCallback(self.unpackage)
414 return d
425 return d
415
426
416 def queue_status(self, targets='all', block=True):
427 def queue_status(self, targets='all', block=True):
417 d = self.remote_reference.callRemote('queue_status', targets, block)
428 d = self.remote_reference.callRemote('queue_status', targets, block)
418 d.addCallback(self.unpackage)
429 d.addCallback(self.unpackage)
419 return d
430 return d
420
431
421 def set_properties(self, properties, targets='all', block=True):
432 def set_properties(self, properties, targets='all', block=True):
422 serial = pickle.dumps(properties, 2)
433 serial = pickle.dumps(properties, 2)
423 d = self.remote_reference.callRemote('set_properties', serial, targets, block)
434 d = self.remote_reference.callRemote('set_properties', serial, targets, block)
424 d.addCallback(self.unpackage)
435 d.addCallback(self.unpackage)
425 return d
436 return d
426
437
427 def get_properties(self, keys=None, targets='all', block=True):
438 def get_properties(self, keys=None, targets='all', block=True):
428 if keys==None:
439 if keys==None:
429 keys='None'
440 keys='None'
430 d = self.remote_reference.callRemote('get_properties', keys, targets, block)
441 d = self.remote_reference.callRemote('get_properties', keys, targets, block)
431 d.addCallback(self.unpackage)
442 d.addCallback(self.unpackage)
432 return d
443 return d
433
444
434 def has_properties(self, keys, targets='all', block=True):
445 def has_properties(self, keys, targets='all', block=True):
435 d = self.remote_reference.callRemote('has_properties', keys, targets, block)
446 d = self.remote_reference.callRemote('has_properties', keys, targets, block)
436 d.addCallback(self.unpackage)
447 d.addCallback(self.unpackage)
437 return d
448 return d
438
449
439 def del_properties(self, keys, targets='all', block=True):
450 def del_properties(self, keys, targets='all', block=True):
440 d = self.remote_reference.callRemote('del_properties', keys, targets, block)
451 d = self.remote_reference.callRemote('del_properties', keys, targets, block)
441 d.addCallback(self.unpackage)
452 d.addCallback(self.unpackage)
442 return d
453 return d
443
454
444 def clear_properties(self, targets='all', block=True):
455 def clear_properties(self, targets='all', block=True):
445 d = self.remote_reference.callRemote('clear_properties', targets, block)
456 d = self.remote_reference.callRemote('clear_properties', targets, block)
446 d.addCallback(self.unpackage)
457 d.addCallback(self.unpackage)
447 return d
458 return d
448
459
449 #---------------------------------------------------------------------------
460 #---------------------------------------------------------------------------
450 # IMultiEngine related methods
461 # IMultiEngine related methods
451 #---------------------------------------------------------------------------
462 #---------------------------------------------------------------------------
452
463
453 def get_ids(self):
464 def get_ids(self):
454 d = self.remote_reference.callRemote('get_ids')
465 d = self.remote_reference.callRemote('get_ids')
455 return d
466 return d
456
467
457 #---------------------------------------------------------------------------
468 #---------------------------------------------------------------------------
458 # ISynchronousMultiEngineCoordinator related methods
469 # ISynchronousMultiEngineCoordinator related methods
459 #---------------------------------------------------------------------------
470 #---------------------------------------------------------------------------
460
471
461 def _process_targets(self, targets):
472 def _process_targets(self, targets):
462 def create_targets(ids):
473 def create_targets(ids):
463 if isinstance(targets, int):
474 if isinstance(targets, int):
464 engines = [targets]
475 engines = [targets]
465 elif targets=='all':
476 elif targets=='all':
466 engines = ids
477 engines = ids
467 elif isinstance(targets, (list, tuple)):
478 elif isinstance(targets, (list, tuple)):
468 engines = targets
479 engines = targets
469 for t in engines:
480 for t in engines:
470 if not t in ids:
481 if not t in ids:
471 raise error.InvalidEngineID("engine with id %r does not exist"%t)
482 raise error.InvalidEngineID("engine with id %r does not exist"%t)
472 return engines
483 return engines
473
484
474 d = self.get_ids()
485 d = self.get_ids()
475 d.addCallback(create_targets)
486 d.addCallback(create_targets)
476 return d
487 return d
477
488
478 def scatter(self, key, seq, style='basic', flatten=False, targets='all', block=True):
489 def scatter(self, key, seq, dist='b', flatten=False, targets='all', block=True):
479
490
480 # Note: scatter and gather handle pending deferreds locally through self.pdm.
491 # Note: scatter and gather handle pending deferreds locally through self.pdm.
481 # This enables us to collect a bunch fo deferred ids and make a secondary
492 # This enables us to collect a bunch fo deferred ids and make a secondary
482 # deferred id that corresponds to the entire group. This logic is extremely
493 # deferred id that corresponds to the entire group. This logic is extremely
483 # difficult to get right though.
494 # difficult to get right though.
484 def do_scatter(engines):
495 def do_scatter(engines):
485 nEngines = len(engines)
496 nEngines = len(engines)
486 mapClass = Map.styles[style]
497 mapClass = Map.dists[dist]
487 mapObject = mapClass()
498 mapObject = mapClass()
488 d_list = []
499 d_list = []
489 # Loop through and push to each engine in non-blocking mode.
500 # Loop through and push to each engine in non-blocking mode.
490 # This returns a set of deferreds to deferred_ids
501 # This returns a set of deferreds to deferred_ids
491 for index, engineid in enumerate(engines):
502 for index, engineid in enumerate(engines):
492 partition = mapObject.getPartition(seq, index, nEngines)
503 partition = mapObject.getPartition(seq, index, nEngines)
493 if flatten and len(partition) == 1:
504 if flatten and len(partition) == 1:
494 d = self.push({key: partition[0]}, targets=engineid, block=False)
505 d = self.push({key: partition[0]}, targets=engineid, block=False)
495 else:
506 else:
496 d = self.push({key: partition}, targets=engineid, block=False)
507 d = self.push({key: partition}, targets=engineid, block=False)
497 d_list.append(d)
508 d_list.append(d)
498 # Collect the deferred to deferred_ids
509 # Collect the deferred to deferred_ids
499 d = gatherBoth(d_list,
510 d = gatherBoth(d_list,
500 fireOnOneErrback=0,
511 fireOnOneErrback=0,
501 consumeErrors=1,
512 consumeErrors=1,
502 logErrors=0)
513 logErrors=0)
503 # Now d has a list of deferred_ids or Failures coming
514 # Now d has a list of deferred_ids or Failures coming
504 d.addCallback(error.collect_exceptions, 'scatter')
515 d.addCallback(error.collect_exceptions, 'scatter')
505 def process_did_list(did_list):
516 def process_did_list(did_list):
506 """Turn a list of deferred_ids into a final result or failure."""
517 """Turn a list of deferred_ids into a final result or failure."""
507 new_d_list = [self.get_pending_deferred(did, True) for did in did_list]
518 new_d_list = [self.get_pending_deferred(did, True) for did in did_list]
508 final_d = gatherBoth(new_d_list,
519 final_d = gatherBoth(new_d_list,
509 fireOnOneErrback=0,
520 fireOnOneErrback=0,
510 consumeErrors=1,
521 consumeErrors=1,
511 logErrors=0)
522 logErrors=0)
512 final_d.addCallback(error.collect_exceptions, 'scatter')
523 final_d.addCallback(error.collect_exceptions, 'scatter')
513 final_d.addCallback(lambda lop: [i[0] for i in lop])
524 final_d.addCallback(lambda lop: [i[0] for i in lop])
514 return final_d
525 return final_d
515 # Now, depending on block, we need to handle the list deferred_ids
526 # Now, depending on block, we need to handle the list deferred_ids
516 # coming down the pipe diferently.
527 # coming down the pipe diferently.
517 if block:
528 if block:
518 # If we are blocking register a callback that will transform the
529 # If we are blocking register a callback that will transform the
519 # list of deferred_ids into the final result.
530 # list of deferred_ids into the final result.
520 d.addCallback(process_did_list)
531 d.addCallback(process_did_list)
521 return d
532 return d
522 else:
533 else:
523 # Here we are going to use a _local_ PendingDeferredManager.
534 # Here we are going to use a _local_ PendingDeferredManager.
524 deferred_id = self.pdm.get_deferred_id()
535 deferred_id = self.pdm.get_deferred_id()
525 # This is the deferred we will return to the user that will fire
536 # This is the deferred we will return to the user that will fire
526 # with the local deferred_id AFTER we have received the list of
537 # with the local deferred_id AFTER we have received the list of
527 # primary deferred_ids
538 # primary deferred_ids
528 d_to_return = defer.Deferred()
539 d_to_return = defer.Deferred()
529 def do_it(did_list):
540 def do_it(did_list):
530 """Produce a deferred to the final result, but first fire the
541 """Produce a deferred to the final result, but first fire the
531 deferred we will return to the user that has the local
542 deferred we will return to the user that has the local
532 deferred id."""
543 deferred id."""
533 d_to_return.callback(deferred_id)
544 d_to_return.callback(deferred_id)
534 return process_did_list(did_list)
545 return process_did_list(did_list)
535 d.addCallback(do_it)
546 d.addCallback(do_it)
536 # Now save the deferred to the final result
547 # Now save the deferred to the final result
537 self.pdm.save_pending_deferred(d, deferred_id)
548 self.pdm.save_pending_deferred(d, deferred_id)
538 return d_to_return
549 return d_to_return
539
550
540 d = self._process_targets(targets)
551 d = self._process_targets(targets)
541 d.addCallback(do_scatter)
552 d.addCallback(do_scatter)
542 return d
553 return d
543
554
544 def gather(self, key, style='basic', targets='all', block=True):
555 def gather(self, key, dist='b', targets='all', block=True):
545
556
546 # Note: scatter and gather handle pending deferreds locally through self.pdm.
557 # Note: scatter and gather handle pending deferreds locally through self.pdm.
547 # This enables us to collect a bunch fo deferred ids and make a secondary
558 # This enables us to collect a bunch fo deferred ids and make a secondary
548 # deferred id that corresponds to the entire group. This logic is extremely
559 # deferred id that corresponds to the entire group. This logic is extremely
549 # difficult to get right though.
560 # difficult to get right though.
550 def do_gather(engines):
561 def do_gather(engines):
551 nEngines = len(engines)
562 nEngines = len(engines)
552 mapClass = Map.styles[style]
563 mapClass = Map.dists[dist]
553 mapObject = mapClass()
564 mapObject = mapClass()
554 d_list = []
565 d_list = []
555 # Loop through and push to each engine in non-blocking mode.
566 # Loop through and push to each engine in non-blocking mode.
556 # This returns a set of deferreds to deferred_ids
567 # This returns a set of deferreds to deferred_ids
557 for index, engineid in enumerate(engines):
568 for index, engineid in enumerate(engines):
558 d = self.pull(key, targets=engineid, block=False)
569 d = self.pull(key, targets=engineid, block=False)
559 d_list.append(d)
570 d_list.append(d)
560 # Collect the deferred to deferred_ids
571 # Collect the deferred to deferred_ids
561 d = gatherBoth(d_list,
572 d = gatherBoth(d_list,
562 fireOnOneErrback=0,
573 fireOnOneErrback=0,
563 consumeErrors=1,
574 consumeErrors=1,
564 logErrors=0)
575 logErrors=0)
565 # Now d has a list of deferred_ids or Failures coming
576 # Now d has a list of deferred_ids or Failures coming
566 d.addCallback(error.collect_exceptions, 'scatter')
577 d.addCallback(error.collect_exceptions, 'scatter')
567 def process_did_list(did_list):
578 def process_did_list(did_list):
568 """Turn a list of deferred_ids into a final result or failure."""
579 """Turn a list of deferred_ids into a final result or failure."""
569 new_d_list = [self.get_pending_deferred(did, True) for did in did_list]
580 new_d_list = [self.get_pending_deferred(did, True) for did in did_list]
570 final_d = gatherBoth(new_d_list,
581 final_d = gatherBoth(new_d_list,
571 fireOnOneErrback=0,
582 fireOnOneErrback=0,
572 consumeErrors=1,
583 consumeErrors=1,
573 logErrors=0)
584 logErrors=0)
574 final_d.addCallback(error.collect_exceptions, 'gather')
585 final_d.addCallback(error.collect_exceptions, 'gather')
575 final_d.addCallback(lambda lop: [i[0] for i in lop])
586 final_d.addCallback(lambda lop: [i[0] for i in lop])
576 final_d.addCallback(mapObject.joinPartitions)
587 final_d.addCallback(mapObject.joinPartitions)
577 return final_d
588 return final_d
578 # Now, depending on block, we need to handle the list deferred_ids
589 # Now, depending on block, we need to handle the list deferred_ids
579 # coming down the pipe diferently.
590 # coming down the pipe diferently.
580 if block:
591 if block:
581 # If we are blocking register a callback that will transform the
592 # If we are blocking register a callback that will transform the
582 # list of deferred_ids into the final result.
593 # list of deferred_ids into the final result.
583 d.addCallback(process_did_list)
594 d.addCallback(process_did_list)
584 return d
595 return d
585 else:
596 else:
586 # Here we are going to use a _local_ PendingDeferredManager.
597 # Here we are going to use a _local_ PendingDeferredManager.
587 deferred_id = self.pdm.get_deferred_id()
598 deferred_id = self.pdm.get_deferred_id()
588 # This is the deferred we will return to the user that will fire
599 # This is the deferred we will return to the user that will fire
589 # with the local deferred_id AFTER we have received the list of
600 # with the local deferred_id AFTER we have received the list of
590 # primary deferred_ids
601 # primary deferred_ids
591 d_to_return = defer.Deferred()
602 d_to_return = defer.Deferred()
592 def do_it(did_list):
603 def do_it(did_list):
593 """Produce a deferred to the final result, but first fire the
604 """Produce a deferred to the final result, but first fire the
594 deferred we will return to the user that has the local
605 deferred we will return to the user that has the local
595 deferred id."""
606 deferred id."""
596 d_to_return.callback(deferred_id)
607 d_to_return.callback(deferred_id)
597 return process_did_list(did_list)
608 return process_did_list(did_list)
598 d.addCallback(do_it)
609 d.addCallback(do_it)
599 # Now save the deferred to the final result
610 # Now save the deferred to the final result
600 self.pdm.save_pending_deferred(d, deferred_id)
611 self.pdm.save_pending_deferred(d, deferred_id)
601 return d_to_return
612 return d_to_return
602
613
603 d = self._process_targets(targets)
614 d = self._process_targets(targets)
604 d.addCallback(do_gather)
615 d.addCallback(do_gather)
605 return d
616 return d
606
617
607 def map(self, func, seq, style='basic', targets='all', block=True):
618 def raw_map(self, func, sequences, dist='b', targets='all', block=True):
608 d_list = []
619 """
620 A parallelized version of Python's builtin map.
621
622 This has a slightly different syntax than the builtin `map`.
623 This is needed because we need to have keyword arguments and thus
624 can't use *args to capture all the sequences. Instead, they must
625 be passed in a list or tuple.
626
627 raw_map(func, seqs) -> map(func, seqs[0], seqs[1], ...)
628
629 Most users will want to use parallel functions or the `mapper`
630 and `map` methods for an API that follows that of the builtin
631 `map`.
632 """
633 if not isinstance(sequences, (list, tuple)):
634 raise TypeError('sequences must be a list or tuple')
635 max_len = max(len(s) for s in sequences)
636 for s in sequences:
637 if len(s)!=max_len:
638 raise ValueError('all sequences must have equal length')
609 if isinstance(func, FunctionType):
639 if isinstance(func, FunctionType):
610 d = self.push_function(dict(_ipython_map_func=func), targets=targets, block=False)
640 d = self.push_function(dict(_ipython_map_func=func), targets=targets, block=False)
611 d.addCallback(lambda did: self.get_pending_deferred(did, True))
641 d.addCallback(lambda did: self.get_pending_deferred(did, True))
612 sourceToRun = '_ipython_map_seq_result = map(_ipython_map_func, _ipython_map_seq)'
642 sourceToRun = '_ipython_map_seq_result = map(_ipython_map_func, *zip(*_ipython_map_seq))'
613 elif isinstance(func, str):
643 elif isinstance(func, str):
614 d = defer.succeed(None)
644 d = defer.succeed(None)
615 sourceToRun = \
645 sourceToRun = \
616 '_ipython_map_seq_result = map(%s, _ipython_map_seq)' % func
646 '_ipython_map_seq_result = map(%s, *zip(*_ipython_map_seq))' % func
617 else:
647 else:
618 raise TypeError("func must be a function or str")
648 raise TypeError("func must be a function or str")
619
649
620 d.addCallback(lambda _: self.scatter('_ipython_map_seq', seq, style, targets=targets))
650 d.addCallback(lambda _: self.scatter('_ipython_map_seq', zip(*sequences), dist, targets=targets))
621 d.addCallback(lambda _: self.execute(sourceToRun, targets=targets, block=False))
651 d.addCallback(lambda _: self.execute(sourceToRun, targets=targets, block=False))
622 d.addCallback(lambda did: self.get_pending_deferred(did, True))
652 d.addCallback(lambda did: self.get_pending_deferred(did, True))
623 d.addCallback(lambda _: self.gather('_ipython_map_seq_result', style, targets=targets, block=block))
653 d.addCallback(lambda _: self.gather('_ipython_map_seq_result', dist, targets=targets, block=block))
624 return d
654 return d
625
655
656 def map(self, func, *sequences):
657 """
658 A parallel version of Python's builtin `map` function.
659
660 This method applies a function to sequences of arguments. It
661 follows the same syntax as the builtin `map`.
662
663 This method creates a mapper objects by calling `self.mapper` with
664 no arguments and then uses that mapper to do the mapping. See
665 the documentation of `mapper` for more details.
666 """
667 return self.mapper().map(func, *sequences)
668
669 def mapper(self, dist='b', targets='all', block=True):
670 """
671 Create a mapper object that has a `map` method.
672
673 This method returns an object that implements the `IMapper`
674 interface. This method is a factory that is used to control how
675 the map happens.
676
677 :Parameters:
678 dist : str
679 What decomposition to use, 'b' is the only one supported
680 currently
681 targets : str, int, sequence of ints
682 Which engines to use for the map
683 block : boolean
684 Should calls to `map` block or not
685 """
686 return MultiEngineMapper(self, dist, targets, block)
687
688 def parallel(self, dist='b', targets='all', block=True):
689 """
690 A decorator that turns a function into a parallel function.
691
692 This can be used as:
693
694 @parallel()
695 def f(x, y)
696 ...
697
698 f(range(10), range(10))
699
700 This causes f(0,0), f(1,1), ... to be called in parallel.
701
702 :Parameters:
703 dist : str
704 What decomposition to use, 'b' is the only one supported
705 currently
706 targets : str, int, sequence of ints
707 Which engines to use for the map
708 block : boolean
709 Should calls to `map` block or not
710 """
711 mapper = self.mapper(dist, targets, block)
712 pf = ParallelFunction(mapper)
713 return pf
714
626 #---------------------------------------------------------------------------
715 #---------------------------------------------------------------------------
627 # ISynchronousMultiEngineExtras related methods
716 # ISynchronousMultiEngineExtras related methods
628 #---------------------------------------------------------------------------
717 #---------------------------------------------------------------------------
629
718
630 def _transformPullResult(self, pushResult, multitargets, lenKeys):
719 def _transformPullResult(self, pushResult, multitargets, lenKeys):
631 if not multitargets:
720 if not multitargets:
632 result = pushResult[0]
721 result = pushResult[0]
633 elif lenKeys > 1:
722 elif lenKeys > 1:
634 result = zip(*pushResult)
723 result = zip(*pushResult)
635 elif lenKeys is 1:
724 elif lenKeys is 1:
636 result = list(pushResult)
725 result = list(pushResult)
637 return result
726 return result
638
727
639 def zip_pull(self, keys, targets='all', block=True):
728 def zip_pull(self, keys, targets='all', block=True):
640 multitargets = not isinstance(targets, int) and len(targets) > 1
729 multitargets = not isinstance(targets, int) and len(targets) > 1
641 lenKeys = len(keys)
730 lenKeys = len(keys)
642 d = self.pull(keys, targets=targets, block=block)
731 d = self.pull(keys, targets=targets, block=block)
643 if block:
732 if block:
644 d.addCallback(self._transformPullResult, multitargets, lenKeys)
733 d.addCallback(self._transformPullResult, multitargets, lenKeys)
645 else:
734 else:
646 d.addCallback(lambda did: self._addDeferredIDCallback(did, self._transformPullResult, multitargets, lenKeys))
735 d.addCallback(lambda did: self._addDeferredIDCallback(did, self._transformPullResult, multitargets, lenKeys))
647 return d
736 return d
648
737
649 def run(self, fname, targets='all', block=True):
738 def run(self, fname, targets='all', block=True):
650 fileobj = open(fname,'r')
739 fileobj = open(fname,'r')
651 source = fileobj.read()
740 source = fileobj.read()
652 fileobj.close()
741 fileobj.close()
653 # if the compilation blows, we get a local error right away
742 # if the compilation blows, we get a local error right away
654 try:
743 try:
655 code = compile(source,fname,'exec')
744 code = compile(source,fname,'exec')
656 except:
745 except:
657 return defer.fail(failure.Failure())
746 return defer.fail(failure.Failure())
658 # Now run the code
747 # Now run the code
659 d = self.execute(source, targets=targets, block=block)
748 d = self.execute(source, targets=targets, block=block)
660 return d
749 return d
661
750
662 #---------------------------------------------------------------------------
751 #---------------------------------------------------------------------------
663 # IBlockingClientAdaptor related methods
752 # IBlockingClientAdaptor related methods
664 #---------------------------------------------------------------------------
753 #---------------------------------------------------------------------------
665
754
666 def adapt_to_blocking_client(self):
755 def adapt_to_blocking_client(self):
667 from IPython.kernel.multiengineclient import IFullBlockingMultiEngineClient
756 from IPython.kernel.multiengineclient import IFullBlockingMultiEngineClient
668 return IFullBlockingMultiEngineClient(self)
757 return IFullBlockingMultiEngineClient(self)
@@ -1,32 +1,107 b''
1 # encoding: utf-8
1 # encoding: utf-8
2
2
3 """A parallelized function that does scatter/execute/gather."""
3 """A parallelized function that does scatter/execute/gather."""
4
4
5 __docformat__ = "restructuredtext en"
5 __docformat__ = "restructuredtext en"
6
6
7 #-------------------------------------------------------------------------------
7 #-------------------------------------------------------------------------------
8 # Copyright (C) 2008 The IPython Development Team
8 # Copyright (C) 2008 The IPython Development Team
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #-------------------------------------------------------------------------------
12 #-------------------------------------------------------------------------------
13
13
14 #-------------------------------------------------------------------------------
14 #-------------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #-------------------------------------------------------------------------------
16 #-------------------------------------------------------------------------------
17
17
18 from types import FunctionType
18 from types import FunctionType
19 from zope.interface import Interface, implements
19
20
20 class ParallelFunction:
21
21 """A function that operates in parallel on sequences."""
22 class IMultiEngineParallelDecorator(Interface):
22 def __init__(self, func, multiengine, targets, block):
23 """A decorator that creates a parallel function."""
23 """Create a `ParallelFunction`.
24
25 def parallel(dist='b', targets=None, block=None):
26 """
27 A decorator that turns a function into a parallel function.
28
29 This can be used as:
30
31 @parallel()
32 def f(x, y)
33 ...
34
35 f(range(10), range(10))
36
37 This causes f(0,0), f(1,1), ... to be called in parallel.
38
39 :Parameters:
40 dist : str
41 What decomposition to use, 'b' is the only one supported
42 currently
43 targets : str, int, sequence of ints
44 Which engines to use for the map
45 block : boolean
46 Should calls to `map` block or not
47 """
48
49 class ITaskParallelDecorator(Interface):
50 """A decorator that creates a parallel function."""
51
52 def parallel(clear_before=False, clear_after=False, retries=0,
53 recovery_task=None, depend=None, block=True):
54 """
55 A decorator that turns a function into a parallel function.
56
57 This can be used as:
58
59 @parallel()
60 def f(x, y)
61 ...
62
63 f(range(10), range(10))
64
65 This causes f(0,0), f(1,1), ... to be called in parallel.
66
67 See the documentation for `IPython.kernel.task.BaseTask` for
68 documentation on the arguments to this method.
69 """
70
71 class IParallelFunction(Interface):
72 pass
73
74 class ParallelFunction(object):
75 """
76 The implementation of a parallel function.
77
78 A parallel function is similar to Python's map function:
79
80 map(func, *sequences) -> pfunc(*sequences)
81
82 Parallel functions should be created by using the @parallel decorator.
83 """
84
85 implements(IParallelFunction)
86
87 def __init__(self, mapper):
88 """
89 Create a parallel function from an `IMapper`.
90
91 :Parameters:
92 mapper : an `IMapper` implementer.
93 The mapper to use for the parallel function
94 """
95 self.mapper = mapper
96
97 def __call__(self, func):
98 """
99 Decorate a function to make it run in parallel.
24 """
100 """
25 assert isinstance(func, (str, FunctionType)), "func must be a fuction or str"
101 assert isinstance(func, (str, FunctionType)), "func must be a fuction or str"
26 self.func = func
102 self.func = func
27 self.multiengine = multiengine
103 def call_function(*sequences):
28 self.targets = targets
104 return self.mapper.map(self.func, *sequences)
29 self.block = block
105 return call_function
106
30
107
No newline at end of file
31 def __call__(self, sequence):
32 return self.multiengine.map(self.func, sequence, targets=self.targets, block=self.block) No newline at end of file
@@ -1,323 +1,324 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # encoding: utf-8
2 # encoding: utf-8
3
3
4 """Start an IPython cluster conveniently, either locally or remotely.
4 """Start an IPython cluster conveniently, either locally or remotely.
5
5
6 Basic usage
6 Basic usage
7 -----------
7 -----------
8
8
9 For local operation, the simplest mode of usage is:
9 For local operation, the simplest mode of usage is:
10
10
11 %prog -n N
11 %prog -n N
12
12
13 where N is the number of engines you want started.
13 where N is the number of engines you want started.
14
14
15 For remote operation, you must call it with a cluster description file:
15 For remote operation, you must call it with a cluster description file:
16
16
17 %prog -f clusterfile.py
17 %prog -f clusterfile.py
18
18
19 The cluster file is a normal Python script which gets run via execfile(). You
19 The cluster file is a normal Python script which gets run via execfile(). You
20 can have arbitrary logic in it, but all that matters is that at the end of the
20 can have arbitrary logic in it, but all that matters is that at the end of the
21 execution, it declares the variables 'controller', 'engines', and optionally
21 execution, it declares the variables 'controller', 'engines', and optionally
22 'sshx'. See the accompanying examples for details on what these variables must
22 'sshx'. See the accompanying examples for details on what these variables must
23 contain.
23 contain.
24
24
25
25
26 Notes
26 Notes
27 -----
27 -----
28
28
29 WARNING: this code is still UNFINISHED and EXPERIMENTAL! It is incomplete,
29 WARNING: this code is still UNFINISHED and EXPERIMENTAL! It is incomplete,
30 some listed options are not really implemented, and all of its interfaces are
30 some listed options are not really implemented, and all of its interfaces are
31 subject to change.
31 subject to change.
32
32
33 When operating over SSH for a remote cluster, this program relies on the
33 When operating over SSH for a remote cluster, this program relies on the
34 existence of a particular script called 'sshx'. This script must live in the
34 existence of a particular script called 'sshx'. This script must live in the
35 target systems where you'll be running your controller and engines, and is
35 target systems where you'll be running your controller and engines, and is
36 needed to configure your PATH and PYTHONPATH variables for further execution of
36 needed to configure your PATH and PYTHONPATH variables for further execution of
37 python code at the other end of an SSH connection. The script can be as simple
37 python code at the other end of an SSH connection. The script can be as simple
38 as:
38 as:
39
39
40 #!/bin/sh
40 #!/bin/sh
41 . $HOME/.bashrc
41 . $HOME/.bashrc
42 "$@"
42 "$@"
43
43
44 which is the default one provided by IPython. You can modify this or provide
44 which is the default one provided by IPython. You can modify this or provide
45 your own. Since it's quite likely that for different clusters you may need
45 your own. Since it's quite likely that for different clusters you may need
46 this script to configure things differently or that it may live in different
46 this script to configure things differently or that it may live in different
47 locations, its full path can be set in the same file where you define the
47 locations, its full path can be set in the same file where you define the
48 cluster setup. IPython's order of evaluation for this variable is the
48 cluster setup. IPython's order of evaluation for this variable is the
49 following:
49 following:
50
50
51 a) Internal default: 'sshx'. This only works if it is in the default system
51 a) Internal default: 'sshx'. This only works if it is in the default system
52 path which SSH sets up in non-interactive mode.
52 path which SSH sets up in non-interactive mode.
53
53
54 b) Environment variable: if $IPYTHON_SSHX is defined, this overrides the
54 b) Environment variable: if $IPYTHON_SSHX is defined, this overrides the
55 internal default.
55 internal default.
56
56
57 c) Variable 'sshx' in the cluster configuration file: finally, this will
57 c) Variable 'sshx' in the cluster configuration file: finally, this will
58 override the previous two values.
58 override the previous two values.
59
59
60 This code is Unix-only, with precious little hope of any of this ever working
60 This code is Unix-only, with precious little hope of any of this ever working
61 under Windows, since we need SSH from the ground up, we background processes,
61 under Windows, since we need SSH from the ground up, we background processes,
62 etc. Ports of this functionality to Windows are welcome.
62 etc. Ports of this functionality to Windows are welcome.
63
63
64
64
65 Call summary
65 Call summary
66 ------------
66 ------------
67
67
68 %prog [options]
68 %prog [options]
69 """
69 """
70
70
71 __docformat__ = "restructuredtext en"
71 __docformat__ = "restructuredtext en"
72
72
73 #-------------------------------------------------------------------------------
73 #-------------------------------------------------------------------------------
74 # Copyright (C) 2008 The IPython Development Team
74 # Copyright (C) 2008 The IPython Development Team
75 #
75 #
76 # Distributed under the terms of the BSD License. The full license is in
76 # Distributed under the terms of the BSD License. The full license is in
77 # the file COPYING, distributed as part of this software.
77 # the file COPYING, distributed as part of this software.
78 #-------------------------------------------------------------------------------
78 #-------------------------------------------------------------------------------
79
79
80 #-------------------------------------------------------------------------------
80 #-------------------------------------------------------------------------------
81 # Stdlib imports
81 # Stdlib imports
82 #-------------------------------------------------------------------------------
82 #-------------------------------------------------------------------------------
83
83
84 import os
84 import os
85 import signal
85 import signal
86 import sys
86 import sys
87 import time
87 import time
88
88
89 from optparse import OptionParser
89 from optparse import OptionParser
90 from subprocess import Popen,call
90 from subprocess import Popen,call
91
91
92 #---------------------------------------------------------------------------
92 #---------------------------------------------------------------------------
93 # IPython imports
93 # IPython imports
94 #---------------------------------------------------------------------------
94 #---------------------------------------------------------------------------
95 from IPython.tools import utils
95 from IPython.tools import utils
96 from IPython.config import cutils
96 from IPython.config import cutils
97
97
98 #---------------------------------------------------------------------------
98 #---------------------------------------------------------------------------
99 # Normal code begins
99 # Normal code begins
100 #---------------------------------------------------------------------------
100 #---------------------------------------------------------------------------
101
101
102 def parse_args():
102 def parse_args():
103 """Parse command line and return opts,args."""
103 """Parse command line and return opts,args."""
104
104
105 parser = OptionParser(usage=__doc__)
105 parser = OptionParser(usage=__doc__)
106 newopt = parser.add_option # shorthand
106 newopt = parser.add_option # shorthand
107
107
108 newopt("--controller-port", type="int", dest="controllerport",
108 newopt("--controller-port", type="int", dest="controllerport",
109 help="the TCP port the controller is listening on")
109 help="the TCP port the controller is listening on")
110
110
111 newopt("--controller-ip", type="string", dest="controllerip",
111 newopt("--controller-ip", type="string", dest="controllerip",
112 help="the TCP ip address of the controller")
112 help="the TCP ip address of the controller")
113
113
114 newopt("-n", "--num", type="int", dest="n",default=2,
114 newopt("-n", "--num", type="int", dest="n",default=2,
115 help="the number of engines to start")
115 help="the number of engines to start")
116
116
117 newopt("--engine-port", type="int", dest="engineport",
117 newopt("--engine-port", type="int", dest="engineport",
118 help="the TCP port the controller will listen on for engine "
118 help="the TCP port the controller will listen on for engine "
119 "connections")
119 "connections")
120
120
121 newopt("--engine-ip", type="string", dest="engineip",
121 newopt("--engine-ip", type="string", dest="engineip",
122 help="the TCP ip address the controller will listen on "
122 help="the TCP ip address the controller will listen on "
123 "for engine connections")
123 "for engine connections")
124
124
125 newopt("--mpi", type="string", dest="mpi",
125 newopt("--mpi", type="string", dest="mpi",
126 help="use mpi with package: for instance --mpi=mpi4py")
126 help="use mpi with package: for instance --mpi=mpi4py")
127
127
128 newopt("-l", "--logfile", type="string", dest="logfile",
128 newopt("-l", "--logfile", type="string", dest="logfile",
129 help="log file name")
129 help="log file name")
130
130
131 newopt('-f','--cluster-file',dest='clusterfile',
131 newopt('-f','--cluster-file',dest='clusterfile',
132 help='file describing a remote cluster')
132 help='file describing a remote cluster')
133
133
134 return parser.parse_args()
134 return parser.parse_args()
135
135
136 def numAlive(controller,engines):
136 def numAlive(controller,engines):
137 """Return the number of processes still alive."""
137 """Return the number of processes still alive."""
138 retcodes = [controller.poll()] + \
138 retcodes = [controller.poll()] + \
139 [e.poll() for e in engines]
139 [e.poll() for e in engines]
140 return retcodes.count(None)
140 return retcodes.count(None)
141
141
142 stop = lambda pid: os.kill(pid,signal.SIGINT)
142 stop = lambda pid: os.kill(pid,signal.SIGINT)
143 kill = lambda pid: os.kill(pid,signal.SIGTERM)
143 kill = lambda pid: os.kill(pid,signal.SIGTERM)
144
144
145 def cleanup(clean,controller,engines):
145 def cleanup(clean,controller,engines):
146 """Stop the controller and engines with the given cleanup method."""
146 """Stop the controller and engines with the given cleanup method."""
147
147
148 for e in engines:
148 for e in engines:
149 if e.poll() is None:
149 if e.poll() is None:
150 print 'Stopping engine, pid',e.pid
150 print 'Stopping engine, pid',e.pid
151 clean(e.pid)
151 clean(e.pid)
152 if controller.poll() is None:
152 if controller.poll() is None:
153 print 'Stopping controller, pid',controller.pid
153 print 'Stopping controller, pid',controller.pid
154 clean(controller.pid)
154 clean(controller.pid)
155
155
156
156
157 def ensureDir(path):
157 def ensureDir(path):
158 """Ensure a directory exists or raise an exception."""
158 """Ensure a directory exists or raise an exception."""
159 if not os.path.isdir(path):
159 if not os.path.isdir(path):
160 os.makedirs(path)
160 os.makedirs(path)
161
161
162
162
163 def startMsg(control_host,control_port=10105):
163 def startMsg(control_host,control_port=10105):
164 """Print a startup message"""
164 """Print a startup message"""
165 print
165 print
166 print 'Your cluster is up and running.'
166 print 'Your cluster is up and running.'
167 print
167 print
168 print 'For interactive use, you can make a MultiEngineClient with:'
168 print 'For interactive use, you can make a MultiEngineClient with:'
169 print
169 print
170 print 'from IPython.kernel import client'
170 print 'from IPython.kernel import client'
171 print "mec = client.MultiEngineClient((%r,%s))" % \
171 print "mec = client.MultiEngineClient()"
172 (control_host,control_port)
173 print
172 print
174 print 'You can then cleanly stop the cluster from IPython using:'
173 print 'You can then cleanly stop the cluster from IPython using:'
175 print
174 print
176 print 'mec.kill(controller=True)'
175 print 'mec.kill(controller=True)'
177 print
176 print
178
177
179
178
180 def clusterLocal(opt,arg):
179 def clusterLocal(opt,arg):
181 """Start a cluster on the local machine."""
180 """Start a cluster on the local machine."""
182
181
183 # Store all logs inside the ipython directory
182 # Store all logs inside the ipython directory
184 ipdir = cutils.get_ipython_dir()
183 ipdir = cutils.get_ipython_dir()
185 pjoin = os.path.join
184 pjoin = os.path.join
186
185
187 logfile = opt.logfile
186 logfile = opt.logfile
188 if logfile is None:
187 if logfile is None:
189 logdir_base = pjoin(ipdir,'log')
188 logdir_base = pjoin(ipdir,'log')
190 ensureDir(logdir_base)
189 ensureDir(logdir_base)
191 logfile = pjoin(logdir_base,'ipcluster-')
190 logfile = pjoin(logdir_base,'ipcluster-')
192
191
193 print 'Starting controller:',
192 print 'Starting controller:',
194 controller = Popen(['ipcontroller','--logfile',logfile])
193 controller = Popen(['ipcontroller','--logfile',logfile,'-x','-y'])
195 print 'Controller PID:',controller.pid
194 print 'Controller PID:',controller.pid
196
195
197 print 'Starting engines: ',
196 print 'Starting engines: ',
198 time.sleep(3)
197 time.sleep(5)
199
198
200 englogfile = '%s%s-' % (logfile,controller.pid)
199 englogfile = '%s%s-' % (logfile,controller.pid)
201 mpi = opt.mpi
200 mpi = opt.mpi
202 if mpi: # start with mpi - killing the engines with sigterm will not work if you do this
201 if mpi: # start with mpi - killing the engines with sigterm will not work if you do this
203 engines = [Popen(['mpirun', '-np', str(opt.n), 'ipengine', '--mpi', mpi, '--logfile',englogfile])]
202 engines = [Popen(['mpirun', '-np', str(opt.n), 'ipengine', '--mpi',
203 mpi, '--logfile',englogfile])]
204 # engines = [Popen(['mpirun', '-np', str(opt.n), 'ipengine', '--mpi', mpi])]
204 else: # do what we would normally do
205 else: # do what we would normally do
205 engines = [ Popen(['ipengine','--logfile',englogfile])
206 engines = [ Popen(['ipengine','--logfile',englogfile])
206 for i in range(opt.n) ]
207 for i in range(opt.n) ]
207 eids = [e.pid for e in engines]
208 eids = [e.pid for e in engines]
208 print 'Engines PIDs: ',eids
209 print 'Engines PIDs: ',eids
209 print 'Log files: %s*' % englogfile
210 print 'Log files: %s*' % englogfile
210
211
211 proc_ids = eids + [controller.pid]
212 proc_ids = eids + [controller.pid]
212 procs = engines + [controller]
213 procs = engines + [controller]
213
214
214 grpid = os.getpgrp()
215 grpid = os.getpgrp()
215 try:
216 try:
216 startMsg('127.0.0.1')
217 startMsg('127.0.0.1')
217 print 'You can also hit Ctrl-C to stop it, or use from the cmd line:'
218 print 'You can also hit Ctrl-C to stop it, or use from the cmd line:'
218 print
219 print
219 print 'kill -INT',grpid
220 print 'kill -INT',grpid
220 print
221 print
221 try:
222 try:
222 while True:
223 while True:
223 time.sleep(5)
224 time.sleep(5)
224 except:
225 except:
225 pass
226 pass
226 finally:
227 finally:
227 print 'Stopping cluster. Cleaning up...'
228 print 'Stopping cluster. Cleaning up...'
228 cleanup(stop,controller,engines)
229 cleanup(stop,controller,engines)
229 for i in range(4):
230 for i in range(4):
230 time.sleep(i+2)
231 time.sleep(i+2)
231 nZombies = numAlive(controller,engines)
232 nZombies = numAlive(controller,engines)
232 if nZombies== 0:
233 if nZombies== 0:
233 print 'OK: All processes cleaned up.'
234 print 'OK: All processes cleaned up.'
234 break
235 break
235 print 'Trying again, %d processes did not stop...' % nZombies
236 print 'Trying again, %d processes did not stop...' % nZombies
236 cleanup(kill,controller,engines)
237 cleanup(kill,controller,engines)
237 if numAlive(controller,engines) == 0:
238 if numAlive(controller,engines) == 0:
238 print 'OK: All processes cleaned up.'
239 print 'OK: All processes cleaned up.'
239 break
240 break
240 else:
241 else:
241 print '*'*75
242 print '*'*75
242 print 'ERROR: could not kill some processes, try to do it',
243 print 'ERROR: could not kill some processes, try to do it',
243 print 'manually.'
244 print 'manually.'
244 zombies = []
245 zombies = []
245 if controller.returncode is None:
246 if controller.returncode is None:
246 print 'Controller is alive: pid =',controller.pid
247 print 'Controller is alive: pid =',controller.pid
247 zombies.append(controller.pid)
248 zombies.append(controller.pid)
248 liveEngines = [ e for e in engines if e.returncode is None ]
249 liveEngines = [ e for e in engines if e.returncode is None ]
249 for e in liveEngines:
250 for e in liveEngines:
250 print 'Engine is alive: pid =',e.pid
251 print 'Engine is alive: pid =',e.pid
251 zombies.append(e.pid)
252 zombies.append(e.pid)
252 print
253 print
253 print 'Zombie summary:',' '.join(map(str,zombies))
254 print 'Zombie summary:',' '.join(map(str,zombies))
254
255
255 def clusterRemote(opt,arg):
256 def clusterRemote(opt,arg):
256 """Start a remote cluster over SSH"""
257 """Start a remote cluster over SSH"""
257
258
258 # Load the remote cluster configuration
259 # Load the remote cluster configuration
259 clConfig = {}
260 clConfig = {}
260 execfile(opt.clusterfile,clConfig)
261 execfile(opt.clusterfile,clConfig)
261 contConfig = clConfig['controller']
262 contConfig = clConfig['controller']
262 engConfig = clConfig['engines']
263 engConfig = clConfig['engines']
263 # Determine where to find sshx:
264 # Determine where to find sshx:
264 sshx = clConfig.get('sshx',os.environ.get('IPYTHON_SSHX','sshx'))
265 sshx = clConfig.get('sshx',os.environ.get('IPYTHON_SSHX','sshx'))
265
266
266 # Store all logs inside the ipython directory
267 # Store all logs inside the ipython directory
267 ipdir = cutils.get_ipython_dir()
268 ipdir = cutils.get_ipython_dir()
268 pjoin = os.path.join
269 pjoin = os.path.join
269
270
270 logfile = opt.logfile
271 logfile = opt.logfile
271 if logfile is None:
272 if logfile is None:
272 logdir_base = pjoin(ipdir,'log')
273 logdir_base = pjoin(ipdir,'log')
273 ensureDir(logdir_base)
274 ensureDir(logdir_base)
274 logfile = pjoin(logdir_base,'ipcluster')
275 logfile = pjoin(logdir_base,'ipcluster')
275
276
276 # Append this script's PID to the logfile name always
277 # Append this script's PID to the logfile name always
277 logfile = '%s-%s' % (logfile,os.getpid())
278 logfile = '%s-%s' % (logfile,os.getpid())
278
279
279 print 'Starting controller:'
280 print 'Starting controller:'
280 # Controller data:
281 # Controller data:
281 xsys = os.system
282 xsys = os.system
282
283
283 contHost = contConfig['host']
284 contHost = contConfig['host']
284 contLog = '%s-con-%s-' % (logfile,contHost)
285 contLog = '%s-con-%s-' % (logfile,contHost)
285 cmd = "ssh %s '%s' 'ipcontroller --logfile %s' &" % \
286 cmd = "ssh %s '%s' 'ipcontroller --logfile %s' &" % \
286 (contHost,sshx,contLog)
287 (contHost,sshx,contLog)
287 #print 'cmd:<%s>' % cmd # dbg
288 #print 'cmd:<%s>' % cmd # dbg
288 xsys(cmd)
289 xsys(cmd)
289 time.sleep(2)
290 time.sleep(2)
290
291
291 print 'Starting engines: '
292 print 'Starting engines: '
292 for engineHost,engineData in engConfig.iteritems():
293 for engineHost,engineData in engConfig.iteritems():
293 if isinstance(engineData,int):
294 if isinstance(engineData,int):
294 numEngines = engineData
295 numEngines = engineData
295 else:
296 else:
296 raise NotImplementedError('port configuration not finished for engines')
297 raise NotImplementedError('port configuration not finished for engines')
297
298
298 print 'Sarting %d engines on %s' % (numEngines,engineHost)
299 print 'Sarting %d engines on %s' % (numEngines,engineHost)
299 engLog = '%s-eng-%s-' % (logfile,engineHost)
300 engLog = '%s-eng-%s-' % (logfile,engineHost)
300 for i in range(numEngines):
301 for i in range(numEngines):
301 cmd = "ssh %s '%s' 'ipengine --controller-ip %s --logfile %s' &" % \
302 cmd = "ssh %s '%s' 'ipengine --controller-ip %s --logfile %s' &" % \
302 (engineHost,sshx,contHost,engLog)
303 (engineHost,sshx,contHost,engLog)
303 #print 'cmd:<%s>' % cmd # dbg
304 #print 'cmd:<%s>' % cmd # dbg
304 xsys(cmd)
305 xsys(cmd)
305 # Wait after each host a little bit
306 # Wait after each host a little bit
306 time.sleep(1)
307 time.sleep(1)
307
308
308 startMsg(contConfig['host'])
309 startMsg(contConfig['host'])
309
310
310 def main():
311 def main():
311 """Main driver for the two big options: local or remote cluster."""
312 """Main driver for the two big options: local or remote cluster."""
312
313
313 opt,arg = parse_args()
314 opt,arg = parse_args()
314
315
315 clusterfile = opt.clusterfile
316 clusterfile = opt.clusterfile
316 if clusterfile:
317 if clusterfile:
317 clusterRemote(opt,arg)
318 clusterRemote(opt,arg)
318 else:
319 else:
319 clusterLocal(opt,arg)
320 clusterLocal(opt,arg)
320
321
321
322
322 if __name__=='__main__':
323 if __name__=='__main__':
323 main()
324 main()
@@ -1,169 +1,171 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # encoding: utf-8
2 # encoding: utf-8
3
3
4 """Start the IPython Engine."""
4 """Start the IPython Engine."""
5
5
6 __docformat__ = "restructuredtext en"
6 __docformat__ = "restructuredtext en"
7
7
8 #-------------------------------------------------------------------------------
8 #-------------------------------------------------------------------------------
9 # Copyright (C) 2008 The IPython Development Team
9 # Copyright (C) 2008 The IPython Development Team
10 #
10 #
11 # Distributed under the terms of the BSD License. The full license is in
11 # Distributed under the terms of the BSD License. The full license is in
12 # the file COPYING, distributed as part of this software.
12 # the file COPYING, distributed as part of this software.
13 #-------------------------------------------------------------------------------
13 #-------------------------------------------------------------------------------
14
14
15 #-------------------------------------------------------------------------------
15 #-------------------------------------------------------------------------------
16 # Imports
16 # Imports
17 #-------------------------------------------------------------------------------
17 #-------------------------------------------------------------------------------
18
18
19 # Python looks for an empty string at the beginning of sys.path to enable
19 # Python looks for an empty string at the beginning of sys.path to enable
20 # importing from the cwd.
20 # importing from the cwd.
21 import sys
21 import sys
22 sys.path.insert(0, '')
22 sys.path.insert(0, '')
23
23
24 import sys, os
24 import sys, os
25 from optparse import OptionParser
25 from optparse import OptionParser
26
26
27 from twisted.application import service
27 from twisted.application import service
28 from twisted.internet import reactor
28 from twisted.internet import reactor
29 from twisted.python import log
29 from twisted.python import log
30
30
31 from IPython.kernel.fcutil import Tub, UnauthenticatedTub
31 from IPython.kernel.fcutil import Tub, UnauthenticatedTub
32
32
33 from IPython.kernel.core.config import config_manager as core_config_manager
33 from IPython.kernel.core.config import config_manager as core_config_manager
34 from IPython.config.cutils import import_item
34 from IPython.config.cutils import import_item
35 from IPython.kernel.engineservice import EngineService
35 from IPython.kernel.engineservice import EngineService
36 from IPython.kernel.config import config_manager as kernel_config_manager
36 from IPython.kernel.config import config_manager as kernel_config_manager
37 from IPython.kernel.engineconnector import EngineConnector
37 from IPython.kernel.engineconnector import EngineConnector
38
38
39
39
40 #-------------------------------------------------------------------------------
40 #-------------------------------------------------------------------------------
41 # Code
41 # Code
42 #-------------------------------------------------------------------------------
42 #-------------------------------------------------------------------------------
43
43
44 def start_engine():
44 def start_engine():
45 """
45 """
46 Start the engine, by creating it and starting the Twisted reactor.
46 Start the engine, by creating it and starting the Twisted reactor.
47
47
48 This method does:
48 This method does:
49
49
50 * If it exists, runs the `mpi_import_statement` to call `MPI_Init`
50 * If it exists, runs the `mpi_import_statement` to call `MPI_Init`
51 * Starts the engine logging
51 * Starts the engine logging
52 * Creates an IPython shell and wraps it in an `EngineService`
52 * Creates an IPython shell and wraps it in an `EngineService`
53 * Creates a `foolscap.Tub` to use in connecting to a controller.
53 * Creates a `foolscap.Tub` to use in connecting to a controller.
54 * Uses the tub and the `EngineService` along with a Foolscap URL
54 * Uses the tub and the `EngineService` along with a Foolscap URL
55 (or FURL) to connect to the controller and register the engine
55 (or FURL) to connect to the controller and register the engine
56 with the controller
56 with the controller
57 """
57 """
58 kernel_config = kernel_config_manager.get_config_obj()
58 kernel_config = kernel_config_manager.get_config_obj()
59 core_config = core_config_manager.get_config_obj()
59 core_config = core_config_manager.get_config_obj()
60
60
61
61 # Execute the mpi import statement that needs to call MPI_Init
62 # Execute the mpi import statement that needs to call MPI_Init
63 global mpi
62 mpikey = kernel_config['mpi']['default']
64 mpikey = kernel_config['mpi']['default']
63 mpi_import_statement = kernel_config['mpi'].get(mpikey, None)
65 mpi_import_statement = kernel_config['mpi'].get(mpikey, None)
64 if mpi_import_statement is not None:
66 if mpi_import_statement is not None:
65 try:
67 try:
66 exec mpi_import_statement in locals(), globals()
68 exec mpi_import_statement in globals()
67 except:
69 except:
68 mpi = None
70 mpi = None
69 else:
71 else:
70 mpi = None
72 mpi = None
71
73
72 # Start logging
74 # Start logging
73 logfile = kernel_config['engine']['logfile']
75 logfile = kernel_config['engine']['logfile']
74 if logfile:
76 if logfile:
75 logfile = logfile + str(os.getpid()) + '.log'
77 logfile = logfile + str(os.getpid()) + '.log'
76 try:
78 try:
77 openLogFile = open(logfile, 'w')
79 openLogFile = open(logfile, 'w')
78 except:
80 except:
79 openLogFile = sys.stdout
81 openLogFile = sys.stdout
80 else:
82 else:
81 openLogFile = sys.stdout
83 openLogFile = sys.stdout
82 log.startLogging(openLogFile)
84 log.startLogging(openLogFile)
83
85
84 # Create the underlying shell class and EngineService
86 # Create the underlying shell class and EngineService
85 shell_class = import_item(core_config['shell']['shell_class'])
87 shell_class = import_item(core_config['shell']['shell_class'])
86 engine_service = EngineService(shell_class, mpi=mpi)
88 engine_service = EngineService(shell_class, mpi=mpi)
87 shell_import_statement = core_config['shell']['import_statement']
89 shell_import_statement = core_config['shell']['import_statement']
88 if shell_import_statement:
90 if shell_import_statement:
89 try:
91 try:
90 engine_service.execute(shell_import_statement)
92 engine_service.execute(shell_import_statement)
91 except:
93 except:
92 log.msg("Error running import_statement: %s" % sis)
94 log.msg("Error running import_statement: %s" % sis)
93
95
94 # Create the service hierarchy
96 # Create the service hierarchy
95 main_service = service.MultiService()
97 main_service = service.MultiService()
96 engine_service.setServiceParent(main_service)
98 engine_service.setServiceParent(main_service)
97 tub_service = Tub()
99 tub_service = Tub()
98 tub_service.setServiceParent(main_service)
100 tub_service.setServiceParent(main_service)
99 # This needs to be called before the connection is initiated
101 # This needs to be called before the connection is initiated
100 main_service.startService()
102 main_service.startService()
101
103
102 # This initiates the connection to the controller and calls
104 # This initiates the connection to the controller and calls
103 # register_engine to tell the controller we are ready to do work
105 # register_engine to tell the controller we are ready to do work
104 engine_connector = EngineConnector(tub_service)
106 engine_connector = EngineConnector(tub_service)
105 furl_file = kernel_config['engine']['furl_file']
107 furl_file = kernel_config['engine']['furl_file']
106 d = engine_connector.connect_to_controller(engine_service, furl_file)
108 d = engine_connector.connect_to_controller(engine_service, furl_file)
107 d.addErrback(lambda _: reactor.stop())
109 d.addErrback(lambda _: reactor.stop())
108
110
109 reactor.run()
111 reactor.run()
110
112
111
113
112 def init_config():
114 def init_config():
113 """
115 """
114 Initialize the configuration using default and command line options.
116 Initialize the configuration using default and command line options.
115 """
117 """
116
118
117 parser = OptionParser()
119 parser = OptionParser()
118
120
119 parser.add_option(
121 parser.add_option(
120 "--furl-file",
122 "--furl-file",
121 type="string",
123 type="string",
122 dest="furl_file",
124 dest="furl_file",
123 help="The filename containing the FURL of the controller"
125 help="The filename containing the FURL of the controller"
124 )
126 )
125 parser.add_option(
127 parser.add_option(
126 "--mpi",
128 "--mpi",
127 type="string",
129 type="string",
128 dest="mpi",
130 dest="mpi",
129 help="How to enable MPI (mpi4py, pytrilinos, or empty string to disable)"
131 help="How to enable MPI (mpi4py, pytrilinos, or empty string to disable)"
130 )
132 )
131 parser.add_option(
133 parser.add_option(
132 "-l",
134 "-l",
133 "--logfile",
135 "--logfile",
134 type="string",
136 type="string",
135 dest="logfile",
137 dest="logfile",
136 help="log file name (default is stdout)"
138 help="log file name (default is stdout)"
137 )
139 )
138 parser.add_option(
140 parser.add_option(
139 "--ipythondir",
141 "--ipythondir",
140 type="string",
142 type="string",
141 dest="ipythondir",
143 dest="ipythondir",
142 help="look for config files and profiles in this directory"
144 help="look for config files and profiles in this directory"
143 )
145 )
144
146
145 (options, args) = parser.parse_args()
147 (options, args) = parser.parse_args()
146
148
147 kernel_config_manager.update_config_obj_from_default_file(options.ipythondir)
149 kernel_config_manager.update_config_obj_from_default_file(options.ipythondir)
148 core_config_manager.update_config_obj_from_default_file(options.ipythondir)
150 core_config_manager.update_config_obj_from_default_file(options.ipythondir)
149
151
150 kernel_config = kernel_config_manager.get_config_obj()
152 kernel_config = kernel_config_manager.get_config_obj()
151 # Now override with command line options
153 # Now override with command line options
152 if options.furl_file is not None:
154 if options.furl_file is not None:
153 kernel_config['engine']['furl_file'] = options.furl_file
155 kernel_config['engine']['furl_file'] = options.furl_file
154 if options.logfile is not None:
156 if options.logfile is not None:
155 kernel_config['engine']['logfile'] = options.logfile
157 kernel_config['engine']['logfile'] = options.logfile
156 if options.mpi is not None:
158 if options.mpi is not None:
157 kernel_config['mpi']['default'] = options.mpi
159 kernel_config['mpi']['default'] = options.mpi
158
160
159
161
160 def main():
162 def main():
161 """
163 """
162 After creating the configuration information, start the engine.
164 After creating the configuration information, start the engine.
163 """
165 """
164 init_config()
166 init_config()
165 start_engine()
167 start_engine()
166
168
167
169
168 if __name__ == "__main__":
170 if __name__ == "__main__":
169 main() No newline at end of file
171 main()
This diff has been collapsed as it changes many lines, (654 lines changed) Show them Hide them
@@ -1,799 +1,1113 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 # -*- test-case-name: IPython.kernel.tests.test_task -*-
2 # -*- test-case-name: IPython.kernel.tests.test_task -*-
3
3
4 """Task farming representation of the ControllerService."""
4 """Task farming representation of the ControllerService."""
5
5
6 __docformat__ = "restructuredtext en"
6 __docformat__ = "restructuredtext en"
7
7
8 #-------------------------------------------------------------------------------
8 #-----------------------------------------------------------------------------
9 # Copyright (C) 2008 The IPython Development Team
9 # Copyright (C) 2008 The IPython Development Team
10 #
10 #
11 # Distributed under the terms of the BSD License. The full license is in
11 # Distributed under the terms of the BSD License. The full license is in
12 # the file COPYING, distributed as part of this software.
12 # the file COPYING, distributed as part of this software.
13 #-------------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15 #-------------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16 # Imports
16 # Imports
17 #-------------------------------------------------------------------------------
17 #-----------------------------------------------------------------------------
18
18
19 import copy, time
19 import copy, time
20 from types import FunctionType as function
20 from types import FunctionType
21
21
22 import zope.interface as zi, string
22 import zope.interface as zi, string
23 from twisted.internet import defer, reactor
23 from twisted.internet import defer, reactor
24 from twisted.python import components, log, failure
24 from twisted.python import components, log, failure
25
25
26 # from IPython.genutils import time
26 from IPython.kernel.util import printer
27
28 from IPython.kernel import engineservice as es, error
27 from IPython.kernel import engineservice as es, error
29 from IPython.kernel import controllerservice as cs
28 from IPython.kernel import controllerservice as cs
30 from IPython.kernel.twistedutil import gatherBoth, DeferredList
29 from IPython.kernel.twistedutil import gatherBoth, DeferredList
31
30
32 from IPython.kernel.pickleutil import can,uncan, CannedFunction
31 from IPython.kernel.pickleutil import can, uncan, CannedFunction
33
32
34 def canTask(task):
33 #-----------------------------------------------------------------------------
35 t = copy.copy(task)
34 # Definition of the Task objects
36 t.depend = can(t.depend)
35 #-----------------------------------------------------------------------------
37 if t.recovery_task:
38 t.recovery_task = canTask(t.recovery_task)
39 return t
40
41 def uncanTask(task):
42 t = copy.copy(task)
43 t.depend = uncan(t.depend)
44 if t.recovery_task and t.recovery_task is not task:
45 t.recovery_task = uncanTask(t.recovery_task)
46 return t
47
36
48 time_format = '%Y/%m/%d %H:%M:%S'
37 time_format = '%Y/%m/%d %H:%M:%S'
49
38
50 class Task(object):
39 class ITask(zi.Interface):
51 """Our representation of a task for the `TaskController` interface.
40 """
41 This interface provides a generic definition of what constitutes a task.
42
43 There are two sides to a task. First a task needs to take input from
44 a user to determine what work is performed by the task. Second, the
45 task needs to have the logic that knows how to turn that information
46 info specific calls to a worker, through the `IQueuedEngine` interface.
47
48 Many method in this class get two things passed to them: a Deferred
49 and an IQueuedEngine implementer. Such methods should register callbacks
50 on the Deferred that use the IQueuedEngine to accomplish something. See
51 the existing task objects for examples.
52 """
53
54 zi.Attribute('retries','How many times to retry the task')
55 zi.Attribute('recovery_task','A task to try if the initial one fails')
56 zi.Attribute('taskid','the id of the task')
57
58 def start_time(result):
59 """
60 Do anything needed to start the timing of the task.
61
62 Must simply return the result after starting the timers.
63 """
64
65 def stop_time(result):
66 """
67 Do anything needed to stop the timing of the task.
68
69 Must simply return the result after stopping the timers. This
70 method will usually set attributes that are used by `process_result`
71 in building result of the task.
72 """
73
74 def pre_task(d, queued_engine):
75 """Do something with the queued_engine before the task is run.
76
77 This method should simply add callbacks to the input Deferred
78 that do something with the `queued_engine` before the task is run.
79
80 :Parameters:
81 d : Deferred
82 The deferred that actions should be attached to
83 queued_engine : IQueuedEngine implementer
84 The worker that has been allocated to perform the task
85 """
86
87 def post_task(d, queued_engine):
88 """Do something with the queued_engine after the task is run.
89
90 This method should simply add callbacks to the input Deferred
91 that do something with the `queued_engine` before the task is run.
92
93 :Parameters:
94 d : Deferred
95 The deferred that actions should be attached to
96 queued_engine : IQueuedEngine implementer
97 The worker that has been allocated to perform the task
98 """
99
100 def submit_task(d, queued_engine):
101 """Submit a task using the `queued_engine` we have been allocated.
102
103 When a task is ready to run, this method is called. This method
104 must take the internal information of the task and make suitable
105 calls on the queued_engine to have the actual work done.
106
107 This method should simply add callbacks to the input Deferred
108 that do something with the `queued_engine` before the task is run.
109
110 :Parameters:
111 d : Deferred
112 The deferred that actions should be attached to
113 queued_engine : IQueuedEngine implementer
114 The worker that has been allocated to perform the task
115 """
116
117 def process_result(d, result, engine_id):
118 """Take a raw task result.
119
120 Objects that implement `ITask` can choose how the result of running
121 the task is presented. This method takes the raw result and
122 does this logic. Two example are the `MapTask` which simply returns
123 the raw result or a `Failure` object and the `StringTask` which
124 returns a `TaskResult` object.
125
126 :Parameters:
127 d : Deferred
128 The deferred that actions should be attached to
129 result : object
130 The raw task result that needs to be wrapped
131 engine_id : int
132 The id of the engine that did the task
133
134 :Returns:
135 The result, as a tuple of the form: (success, result).
136 Here, success is a boolean indicating if the task
137 succeeded or failed and result is the result.
138 """
52
139
53 The user should create instances of this class to represent a task that
140 def check_depend(properties):
54 needs to be done.
141 """Check properties to see if the task should be run.
142
143 :Parameters:
144 properties : dict
145 A dictionary of properties that an engine has set
146
147 :Returns:
148 True if the task should be run, False otherwise
149 """
150
151 def can_task(self):
152 """Serialize (can) any functions in the task for pickling.
153
154 Subclasses must override this method and make sure that all
155 functions in the task are canned by calling `can` on the
156 function.
157 """
158
159 def uncan_task(self):
160 """Unserialize (uncan) any canned function in the task."""
161
162 class BaseTask(object):
163 """
164 Common fuctionality for all objects implementing `ITask`.
165 """
166
167 zi.implements(ITask)
168
169 def __init__(self, clear_before=False, clear_after=False, retries=0,
170 recovery_task=None, depend=None):
171 """
172 Make a generic task.
55
173
56 :Parameters:
174 :Parameters:
57 expression : str
58 A str that is valid python code that is the task.
59 pull : str or list of str
60 The names of objects to be pulled as results. If not specified,
61 will return {'result', None}
62 push : dict
63 A dict of objects to be pushed into the engines namespace before
64 execution of the expression.
65 clear_before : boolean
175 clear_before : boolean
66 Should the engine's namespace be cleared before the task is run.
176 Should the engines namespace be cleared before the task
67 Default=False.
177 is run
68 clear_after : boolean
178 clear_after : boolean
69 Should the engine's namespace be cleared after the task is run.
179 Should the engines namespace be clear after the task is run
70 Default=False.
71 retries : int
180 retries : int
72 The number of times to resumbit the task if it fails. Default=0.
181 The number of times a task should be retries upon failure
73 recovery_task : Task
182 recovery_task : any task object
74 This is the Task to be run when the task has exhausted its retries
183 If a task fails and it has a recovery_task, that is run
75 Default=None.
184 upon a retry
76 depend : bool function(properties)
185 depend : FunctionType
77 This is the dependency function for the Task, which determines
186 A function that is called to test for properties. This function
78 whether a task can be run on a Worker. `depend` is called with
187 must take one argument, the properties dict and return a boolean
79 one argument, the worker's properties dict, and should return
188 """
80 True if the worker meets the dependencies or False if it does
189 self.clear_before = clear_before
81 not.
190 self.clear_after = clear_after
82 Default=None - run on any worker
191 self.retries = retries
83 options : dict
192 self.recovery_task = recovery_task
84 Any other keyword options for more elaborate uses of tasks
193 self.depend = depend
85
194 self.taskid = None
86 Examples
195
87 --------
196 def start_time(self, result):
197 """
198 Start the basic timers.
199 """
200 self.start = time.time()
201 self.start_struct = time.localtime()
202 return result
203
204 def stop_time(self, result):
205 """
206 Stop the basic timers.
207 """
208 self.stop = time.time()
209 self.stop_struct = time.localtime()
210 self.duration = self.stop - self.start
211 self.submitted = time.strftime(time_format, self.start_struct)
212 self.completed = time.strftime(time_format)
213 return result
214
215 def pre_task(self, d, queued_engine):
216 """
217 Clear the engine before running the task if clear_before is set.
218 """
219 if self.clear_before:
220 d.addCallback(lambda r: queued_engine.reset())
221
222 def post_task(self, d, queued_engine):
223 """
224 Clear the engine after running the task if clear_after is set.
225 """
226 def reseter(result):
227 queued_engine.reset()
228 return result
229 if self.clear_after:
230 d.addBoth(reseter)
231
232 def submit_task(self, d, queued_engine):
233 raise NotImplementedError('submit_task must be implemented in a subclass')
234
235 def process_result(self, result, engine_id):
236 """
237 Process a task result.
238
239 This is the default `process_result` that just returns the raw
240 result or a `Failure`.
241 """
242 if isinstance(result, failure.Failure):
243 return (False, result)
244 else:
245 return (True, result)
246
247 def check_depend(self, properties):
248 """
249 Calls self.depend(properties) to see if a task should be run.
250 """
251 if self.depend is not None:
252 return self.depend(properties)
253 else:
254 return True
255
256 def can_task(self):
257 self.depend = can(self.depend)
258 if isinstance(self.recovery_task, BaseTask):
259 self.recovery_task.can_task()
260
261 def uncan_task(self):
262 self.depend = uncan(self.depend)
263 if isinstance(self.recovery_task, BaseTask):
264 self.recovery_task.uncan_task()
265
266 class MapTask(BaseTask):
267 """
268 A task that consists of a function and arguments.
269 """
88
270
89 >>> t = Task('dostuff(args)')
271 zi.implements(ITask)
90 >>> t = Task('a=5', pull='a')
272
91 >>> t = Task('a=5\nb=4', pull=['a','b'])
273 def __init__(self, function, args=None, kwargs=None, clear_before=False,
92 >>> t = Task('os.kill(os.getpid(),9)', retries=100) # this is a bad idea
274 clear_after=False, retries=0, recovery_task=None, depend=None):
93 # A dependency case:
275 """
94 >>> def hasMPI(props):
276 Create a task based on a function, args and kwargs.
95 ... return props.get('mpi') is not None
277
96 >>> t = Task('mpi.send(blah,blah)', depend = hasMPI)
278 This is a simple type of task that consists of calling:
279 function(*args, **kwargs) and wrapping the result in a `TaskResult`.
280
281 The return value of the function, or a `Failure` wrapping an
282 exception is the task result for this type of task.
283 """
284 BaseTask.__init__(self, clear_before, clear_after, retries,
285 recovery_task, depend)
286 if not isinstance(function, FunctionType):
287 raise TypeError('a task function must be a FunctionType')
288 self.function = function
289 if args is None:
290 self.args = ()
291 else:
292 self.args = args
293 if not isinstance(self.args, (list, tuple)):
294 raise TypeError('a task args must be a list or tuple')
295 if kwargs is None:
296 self.kwargs = {}
297 else:
298 self.kwargs = kwargs
299 if not isinstance(self.kwargs, dict):
300 raise TypeError('a task kwargs must be a dict')
301
302 def submit_task(self, d, queued_engine):
303 d.addCallback(lambda r: queued_engine.push_function(
304 dict(_ipython_task_function=self.function))
305 )
306 d.addCallback(lambda r: queued_engine.push(
307 dict(_ipython_task_args=self.args,_ipython_task_kwargs=self.kwargs))
308 )
309 d.addCallback(lambda r: queued_engine.execute(
310 '_ipython_task_result = _ipython_task_function(*_ipython_task_args,**_ipython_task_kwargs)')
311 )
312 d.addCallback(lambda r: queued_engine.pull('_ipython_task_result'))
313
314 def can_task(self):
315 self.function = can(self.function)
316 BaseTask.can_task(self)
317
318 def uncan_task(self):
319 self.function = uncan(self.function)
320 BaseTask.uncan_task(self)
321
322
323 class StringTask(BaseTask):
324 """
325 A task that consists of a string of Python code to run.
97 """
326 """
98
327
99 def __init__(self, expression, pull=None, push=None,
328 def __init__(self, expression, pull=None, push=None,
100 clear_before=False, clear_after=False, retries=0,
329 clear_before=False, clear_after=False, retries=0,
101 recovery_task=None, depend=None, **options):
330 recovery_task=None, depend=None):
331 """
332 Create a task based on a Python expression and variables
333
334 This type of task lets you push a set of variables to the engines
335 namespace, run a Python string in that namespace and then bring back
336 a different set of Python variables as the result.
337
338 Because this type of task can return many results (through the
339 `pull` keyword argument) it returns a special `TaskResult` object
340 that wraps the pulled variables, statistics about the run and
341 any exceptions raised.
342 """
343 if not isinstance(expression, str):
344 raise TypeError('a task expression must be a string')
102 self.expression = expression
345 self.expression = expression
103 if isinstance(pull, str):
346
104 self.pull = [pull]
347 if pull==None:
105 else:
348 self.pull = ()
349 elif isinstance(pull, str):
350 self.pull = (pull,)
351 elif isinstance(pull, (list, tuple)):
106 self.pull = pull
352 self.pull = pull
353 else:
354 raise TypeError('pull must be str or a sequence of strs')
355
356 if push==None:
357 self.push = {}
358 elif isinstance(push, dict):
107 self.push = push
359 self.push = push
108 self.clear_before = clear_before
360 else:
109 self.clear_after = clear_after
361 raise TypeError('push must be a dict')
110 self.retries=retries
362
111 self.recovery_task = recovery_task
363 BaseTask.__init__(self, clear_before, clear_after, retries,
112 self.depend = depend
364 recovery_task, depend)
113 self.options = options
114 self.taskid = None
115
365
116 class ResultNS:
366 def submit_task(self, d, queued_engine):
117 """The result namespace object for use in TaskResult objects as tr.ns.
367 if self.push is not None:
368 d.addCallback(lambda r: queued_engine.push(self.push))
369
370 d.addCallback(lambda r: queued_engine.execute(self.expression))
371
372 if self.pull is not None:
373 d.addCallback(lambda r: queued_engine.pull(self.pull))
374 else:
375 d.addCallback(lambda r: None)
376
377 def process_result(self, result, engine_id):
378 if isinstance(result, failure.Failure):
379 tr = TaskResult(result, engine_id)
380 else:
381 if self.pull is None:
382 resultDict = {}
383 elif len(self.pull) == 1:
384 resultDict = {self.pull[0]:result}
385 else:
386 resultDict = dict(zip(self.pull, result))
387 tr = TaskResult(resultDict, engine_id)
388 # Assign task attributes
389 tr.submitted = self.submitted
390 tr.completed = self.completed
391 tr.duration = self.duration
392 if hasattr(self,'taskid'):
393 tr.taskid = self.taskid
394 else:
395 tr.taskid = None
396 if isinstance(result, failure.Failure):
397 return (False, tr)
398 else:
399 return (True, tr)
400
401 class ResultNS(object):
402 """
403 A dict like object for holding the results of a task.
404
405 The result namespace object for use in `TaskResult` objects as tr.ns.
118 It builds an object from a dictionary, such that it has attributes
406 It builds an object from a dictionary, such that it has attributes
119 according to the key,value pairs of the dictionary.
407 according to the key,value pairs of the dictionary.
120
408
121 This works by calling setattr on ALL key,value pairs in the dict. If a user
409 This works by calling setattr on ALL key,value pairs in the dict. If a user
122 chooses to overwrite the `__repr__` or `__getattr__` attributes, they can.
410 chooses to overwrite the `__repr__` or `__getattr__` attributes, they can.
123 This can be a bad idea, as it may corrupt standard behavior of the
411 This can be a bad idea, as it may corrupt standard behavior of the
124 ns object.
412 ns object.
125
413
126 Example
414 Example
127 --------
415 --------
128
416
129 >>> ns = ResultNS({'a':17,'foo':range(3)})
417 >>> ns = ResultNS({'a':17,'foo':range(3)})
130 >>> print ns
418 >>> print ns
131 NS{'a':17,'foo':range(3)}
419 NS{'a':17,'foo':range(3)}
132 >>> ns.a
420 >>> ns.a
133 17
421 17
134 >>> ns['foo']
422 >>> ns['foo']
135 [0,1,2]
423 [0,1,2]
136 """
424 """
137 def __init__(self, dikt):
425 def __init__(self, dikt):
138 for k,v in dikt.iteritems():
426 for k,v in dikt.iteritems():
139 setattr(self,k,v)
427 setattr(self,k,v)
140
428
141 def __repr__(self):
429 def __repr__(self):
142 l = dir(self)
430 l = dir(self)
143 d = {}
431 d = {}
144 for k in l:
432 for k in l:
145 # do not print private objects
433 # do not print private objects
146 if k[:2] != '__' and k[-2:] != '__':
434 if k[:2] != '__' and k[-2:] != '__':
147 d[k] = getattr(self, k)
435 d[k] = getattr(self, k)
148 return "NS"+repr(d)
436 return "NS"+repr(d)
149
437
150 def __getitem__(self, key):
438 def __getitem__(self, key):
151 return getattr(self, key)
439 return getattr(self, key)
152
440
153 class TaskResult(object):
441 class TaskResult(object):
154 """
442 """
155 An object for returning task results.
443 An object for returning task results for certain types of tasks.
156
444
157 This object encapsulates the results of a task. On task
445 This object encapsulates the results of a task. On task
158 success it will have a keys attribute that will have a list
446 success it will have a keys attribute that will have a list
159 of the variables that have been pulled back. These variables
447 of the variables that have been pulled back. These variables
160 are accessible as attributes of this class as well. On
448 are accessible as attributes of this class as well. On
161 success the failure attribute will be None.
449 success the failure attribute will be None.
162
450
163 In task failure, keys will be empty, but failure will contain
451 In task failure, keys will be empty, but failure will contain
164 the failure object that encapsulates the remote exception.
452 the failure object that encapsulates the remote exception.
165 One can also simply call the raiseException() method of
453 One can also simply call the `raise_exception` method of
166 this class to re-raise any remote exception in the local
454 this class to re-raise any remote exception in the local
167 session.
455 session.
168
456
169 The TaskResult has a .ns member, which is a property for access
457 The `TaskResult` has a `.ns` member, which is a property for access
170 to the results. If the Task had pull=['a', 'b'], then the
458 to the results. If the Task had pull=['a', 'b'], then the
171 Task Result will have attributes tr.ns.a, tr.ns.b for those values.
459 Task Result will have attributes `tr.ns.a`, `tr.ns.b` for those values.
172 Accessing tr.ns will raise the remote failure if the task failed.
460 Accessing `tr.ns` will raise the remote failure if the task failed.
173
461
174 The engineid attribute should have the engineid of the engine
462 The `engineid` attribute should have the `engineid` of the engine
175 that ran the task. But, because engines can come and go in
463 that ran the task. But, because engines can come and go,
176 the ipython task system, the engineid may not continue to be
464 the `engineid` may not continue to be
177 valid or accurate.
465 valid or accurate.
178
466
179 The taskid attribute simply gives the taskid that the task
467 The `taskid` attribute simply gives the `taskid` that the task
180 is tracked under.
468 is tracked under.
181 """
469 """
182 taskid = None
470 taskid = None
183
471
184 def _getNS(self):
472 def _getNS(self):
185 if isinstance(self.failure, failure.Failure):
473 if isinstance(self.failure, failure.Failure):
186 return self.failure.raiseException()
474 return self.failure.raiseException()
187 else:
475 else:
188 return self._ns
476 return self._ns
189
477
190 def _setNS(self, v):
478 def _setNS(self, v):
191 raise Exception("I am protected!")
479 raise Exception("the ns attribute cannot be changed")
192
480
193 ns = property(_getNS, _setNS)
481 ns = property(_getNS, _setNS)
194
482
195 def __init__(self, results, engineid):
483 def __init__(self, results, engineid):
196 self.engineid = engineid
484 self.engineid = engineid
197 if isinstance(results, failure.Failure):
485 if isinstance(results, failure.Failure):
198 self.failure = results
486 self.failure = results
199 self.results = {}
487 self.results = {}
200 else:
488 else:
201 self.results = results
489 self.results = results
202 self.failure = None
490 self.failure = None
203
491
204 self._ns = ResultNS(self.results)
492 self._ns = ResultNS(self.results)
205
493
206 self.keys = self.results.keys()
494 self.keys = self.results.keys()
207
495
208 def __repr__(self):
496 def __repr__(self):
209 if self.failure is not None:
497 if self.failure is not None:
210 contents = self.failure
498 contents = self.failure
211 else:
499 else:
212 contents = self.results
500 contents = self.results
213 return "TaskResult[ID:%r]:%r"%(self.taskid, contents)
501 return "TaskResult[ID:%r]:%r"%(self.taskid, contents)
214
502
215 def __getitem__(self, key):
503 def __getitem__(self, key):
216 if self.failure is not None:
504 if self.failure is not None:
217 self.raiseException()
505 self.raise_exception()
218 return self.results[key]
506 return self.results[key]
219
507
220 def raiseException(self):
508 def raise_exception(self):
221 """Re-raise any remote exceptions in the local python session."""
509 """Re-raise any remote exceptions in the local python session."""
222 if self.failure is not None:
510 if self.failure is not None:
223 self.failure.raiseException()
511 self.failure.raiseException()
224
512
225
513
514 #-----------------------------------------------------------------------------
515 # The controller side of things
516 #-----------------------------------------------------------------------------
517
226 class IWorker(zi.Interface):
518 class IWorker(zi.Interface):
227 """The Basic Worker Interface.
519 """The Basic Worker Interface.
228
520
229 A worked is a representation of an Engine that is ready to run tasks.
521 A worked is a representation of an Engine that is ready to run tasks.
230 """
522 """
231
523
232 zi.Attribute("workerid", "the id of the worker")
524 zi.Attribute("workerid", "the id of the worker")
233
525
234 def run(task):
526 def run(task):
235 """Run task in worker's namespace.
527 """Run task in worker's namespace.
236
528
237 :Parameters:
529 :Parameters:
238 task : a `Task` object
530 task : a `Task` object
239
531
240 :Returns: `Deferred` to a `TaskResult` object.
532 :Returns: `Deferred` to a tuple of (success, result) where
533 success if a boolean that signifies success or failure
534 and result is the task result.
241 """
535 """
242
536
243
537
244 class WorkerFromQueuedEngine(object):
538 class WorkerFromQueuedEngine(object):
245 """Adapt an `IQueuedEngine` to an `IWorker` object"""
539 """Adapt an `IQueuedEngine` to an `IWorker` object"""
540
246 zi.implements(IWorker)
541 zi.implements(IWorker)
247
542
248 def __init__(self, qe):
543 def __init__(self, qe):
249 self.queuedEngine = qe
544 self.queuedEngine = qe
250 self.workerid = None
545 self.workerid = None
251
546
252 def _get_properties(self):
547 def _get_properties(self):
253 return self.queuedEngine.properties
548 return self.queuedEngine.properties
254
549
255 properties = property(_get_properties, lambda self, _:None)
550 properties = property(_get_properties, lambda self, _:None)
256
551
257 def run(self, task):
552 def run(self, task):
258 """Run task in worker's namespace.
553 """Run task in worker's namespace.
259
554
555 This takes a task and calls methods on the task that actually
556 cause `self.queuedEngine` to do the task. See the methods of
557 `ITask` for more information about how these methods are called.
558
260 :Parameters:
559 :Parameters:
261 task : a `Task` object
560 task : a `Task` object
262
561
263 :Returns: `Deferred` to a `TaskResult` object.
562 :Returns: `Deferred` to a tuple of (success, result) where
563 success if a boolean that signifies success or failure
564 and result is the task result.
264 """
565 """
265 if task.clear_before:
266 d = self.queuedEngine.reset()
267 else:
268 d = defer.succeed(None)
566 d = defer.succeed(None)
269
567 d.addCallback(task.start_time)
270 if task.push is not None:
568 task.pre_task(d, self.queuedEngine)
271 d.addCallback(lambda r: self.queuedEngine.push(task.push))
569 task.submit_task(d, self.queuedEngine)
272
570 task.post_task(d, self.queuedEngine)
273 d.addCallback(lambda r: self.queuedEngine.execute(task.expression))
571 d.addBoth(task.stop_time)
274
572 d.addBoth(task.process_result, self.queuedEngine.id)
275 if task.pull is not None:
573 # At this point, there will be (success, result) coming down the line
276 d.addCallback(lambda r: self.queuedEngine.pull(task.pull))
574 return d
277 else:
278 d.addCallback(lambda r: None)
279
280 def reseter(result):
281 self.queuedEngine.reset()
282 return result
283
284 if task.clear_after:
285 d.addBoth(reseter)
286
287 return d.addBoth(self._zipResults, task.pull, time.time(), time.localtime())
288
289 def _zipResults(self, result, names, start, start_struct):
290 """Callback for construting the TaskResult object."""
291 if isinstance(result, failure.Failure):
292 tr = TaskResult(result, self.queuedEngine.id)
293 else:
294 if names is None:
295 resultDict = {}
296 elif len(names) == 1:
297 resultDict = {names[0]:result}
298 else:
299 resultDict = dict(zip(names, result))
300 tr = TaskResult(resultDict, self.queuedEngine.id)
301 # the time info
302 tr.submitted = time.strftime(time_format, start_struct)
303 tr.completed = time.strftime(time_format)
304 tr.duration = time.time()-start
305 return tr
306
575
307
576
308 components.registerAdapter(WorkerFromQueuedEngine, es.IEngineQueued, IWorker)
577 components.registerAdapter(WorkerFromQueuedEngine, es.IEngineQueued, IWorker)
309
578
310 class IScheduler(zi.Interface):
579 class IScheduler(zi.Interface):
311 """The interface for a Scheduler.
580 """The interface for a Scheduler.
312 """
581 """
313 zi.Attribute("nworkers", "the number of unassigned workers")
582 zi.Attribute("nworkers", "the number of unassigned workers")
314 zi.Attribute("ntasks", "the number of unscheduled tasks")
583 zi.Attribute("ntasks", "the number of unscheduled tasks")
315 zi.Attribute("workerids", "a list of the worker ids")
584 zi.Attribute("workerids", "a list of the worker ids")
316 zi.Attribute("taskids", "a list of the task ids")
585 zi.Attribute("taskids", "a list of the task ids")
317
586
318 def add_task(task, **flags):
587 def add_task(task, **flags):
319 """Add a task to the queue of the Scheduler.
588 """Add a task to the queue of the Scheduler.
320
589
321 :Parameters:
590 :Parameters:
322 task : a `Task` object
591 task : an `ITask` implementer
323 The task to be queued.
592 The task to be queued.
324 flags : dict
593 flags : dict
325 General keywords for more sophisticated scheduling
594 General keywords for more sophisticated scheduling
326 """
595 """
327
596
328 def pop_task(id=None):
597 def pop_task(id=None):
329 """Pops a Task object.
598 """Pops a task object from the queue.
330
599
331 This gets the next task to be run. If no `id` is requested, the highest priority
600 This gets the next task to be run. If no `id` is requested, the highest priority
332 task is returned.
601 task is returned.
333
602
334 :Parameters:
603 :Parameters:
335 id
604 id
336 The id of the task to be popped. The default (None) is to return
605 The id of the task to be popped. The default (None) is to return
337 the highest priority task.
606 the highest priority task.
338
607
339 :Returns: a `Task` object
608 :Returns: an `ITask` implementer
340
609
341 :Exceptions:
610 :Exceptions:
342 IndexError : raised if no taskid in queue
611 IndexError : raised if no taskid in queue
343 """
612 """
344
613
345 def add_worker(worker, **flags):
614 def add_worker(worker, **flags):
346 """Add a worker to the worker queue.
615 """Add a worker to the worker queue.
347
616
348 :Parameters:
617 :Parameters:
349 worker : an IWorker implementing object
618 worker : an `IWorker` implementer
350 flags : General keywords for more sophisticated scheduling
619 flags : dict
620 General keywords for more sophisticated scheduling
351 """
621 """
352
622
353 def pop_worker(id=None):
623 def pop_worker(id=None):
354 """Pops an IWorker object that is ready to do work.
624 """Pops an IWorker object that is ready to do work.
355
625
356 This gets the next IWorker that is ready to do work.
626 This gets the next IWorker that is ready to do work.
357
627
358 :Parameters:
628 :Parameters:
359 id : if specified, will pop worker with workerid=id, else pops
629 id : if specified, will pop worker with workerid=id, else pops
360 highest priority worker. Defaults to None.
630 highest priority worker. Defaults to None.
361
631
362 :Returns:
632 :Returns:
363 an IWorker object
633 an IWorker object
364
634
365 :Exceptions:
635 :Exceptions:
366 IndexError : raised if no workerid in queue
636 IndexError : raised if no workerid in queue
367 """
637 """
368
638
369 def ready():
639 def ready():
370 """Returns True if there is something to do, False otherwise"""
640 """Returns True if there is something to do, False otherwise"""
371
641
372 def schedule():
642 def schedule():
373 """Returns a tuple of the worker and task pair for the next
643 """Returns (worker,task) pair for the next task to be run."""
374 task to be run.
375 """
376
644
377
645
378 class FIFOScheduler(object):
646 class FIFOScheduler(object):
379 """A basic First-In-First-Out (Queue) Scheduler.
647 """
380 This is the default Scheduler for the TaskController.
648 A basic First-In-First-Out (Queue) Scheduler.
381 See the docstrings for IScheduler for interface details.
649
650 This is the default Scheduler for the `TaskController`.
651 See the docstrings for `IScheduler` for interface details.
382 """
652 """
383
653
384 zi.implements(IScheduler)
654 zi.implements(IScheduler)
385
655
386 def __init__(self):
656 def __init__(self):
387 self.tasks = []
657 self.tasks = []
388 self.workers = []
658 self.workers = []
389
659
390 def _ntasks(self):
660 def _ntasks(self):
391 return len(self.tasks)
661 return len(self.tasks)
392
662
393 def _nworkers(self):
663 def _nworkers(self):
394 return len(self.workers)
664 return len(self.workers)
395
665
396 ntasks = property(_ntasks, lambda self, _:None)
666 ntasks = property(_ntasks, lambda self, _:None)
397 nworkers = property(_nworkers, lambda self, _:None)
667 nworkers = property(_nworkers, lambda self, _:None)
398
668
399 def _taskids(self):
669 def _taskids(self):
400 return [t.taskid for t in self.tasks]
670 return [t.taskid for t in self.tasks]
401
671
402 def _workerids(self):
672 def _workerids(self):
403 return [w.workerid for w in self.workers]
673 return [w.workerid for w in self.workers]
404
674
405 taskids = property(_taskids, lambda self,_:None)
675 taskids = property(_taskids, lambda self,_:None)
406 workerids = property(_workerids, lambda self,_:None)
676 workerids = property(_workerids, lambda self,_:None)
407
677
408 def add_task(self, task, **flags):
678 def add_task(self, task, **flags):
409 self.tasks.append(task)
679 self.tasks.append(task)
410
680
411 def pop_task(self, id=None):
681 def pop_task(self, id=None):
412 if id is None:
682 if id is None:
413 return self.tasks.pop(0)
683 return self.tasks.pop(0)
414 else:
684 else:
415 for i in range(len(self.tasks)):
685 for i in range(len(self.tasks)):
416 taskid = self.tasks[i].taskid
686 taskid = self.tasks[i].taskid
417 if id == taskid:
687 if id == taskid:
418 return self.tasks.pop(i)
688 return self.tasks.pop(i)
419 raise IndexError("No task #%i"%id)
689 raise IndexError("No task #%i"%id)
420
690
421 def add_worker(self, worker, **flags):
691 def add_worker(self, worker, **flags):
422 self.workers.append(worker)
692 self.workers.append(worker)
423
693
424 def pop_worker(self, id=None):
694 def pop_worker(self, id=None):
425 if id is None:
695 if id is None:
426 return self.workers.pop(0)
696 return self.workers.pop(0)
427 else:
697 else:
428 for i in range(len(self.workers)):
698 for i in range(len(self.workers)):
429 workerid = self.workers[i].workerid
699 workerid = self.workers[i].workerid
430 if id == workerid:
700 if id == workerid:
431 return self.workers.pop(i)
701 return self.workers.pop(i)
432 raise IndexError("No worker #%i"%id)
702 raise IndexError("No worker #%i"%id)
433
703
434 def schedule(self):
704 def schedule(self):
435 for t in self.tasks:
705 for t in self.tasks:
436 for w in self.workers:
706 for w in self.workers:
437 try:# do not allow exceptions to break this
707 try:# do not allow exceptions to break this
438 cando = t.depend is None or t.depend(w.properties)
708 # Allow the task to check itself using its
709 # check_depend method.
710 cando = t.check_depend(w.properties)
439 except:
711 except:
440 cando = False
712 cando = False
441 if cando:
713 if cando:
442 return self.pop_worker(w.workerid), self.pop_task(t.taskid)
714 return self.pop_worker(w.workerid), self.pop_task(t.taskid)
443 return None, None
715 return None, None
444
716
445
717
446
718
447 class LIFOScheduler(FIFOScheduler):
719 class LIFOScheduler(FIFOScheduler):
448 """A Last-In-First-Out (Stack) Scheduler. This scheduler should naively
720 """
449 reward fast engines by giving them more jobs. This risks starvation, but
721 A Last-In-First-Out (Stack) Scheduler.
450 only in cases with low load, where starvation does not really matter.
722
723 This scheduler should naively reward fast engines by giving
724 them more jobs. This risks starvation, but only in cases with
725 low load, where starvation does not really matter.
451 """
726 """
452
727
453 def add_task(self, task, **flags):
728 def add_task(self, task, **flags):
454 # self.tasks.reverse()
729 # self.tasks.reverse()
455 self.tasks.insert(0, task)
730 self.tasks.insert(0, task)
456 # self.tasks.reverse()
731 # self.tasks.reverse()
457
732
458 def add_worker(self, worker, **flags):
733 def add_worker(self, worker, **flags):
459 # self.workers.reverse()
734 # self.workers.reverse()
460 self.workers.insert(0, worker)
735 self.workers.insert(0, worker)
461 # self.workers.reverse()
736 # self.workers.reverse()
462
737
463
738
464 class ITaskController(cs.IControllerBase):
739 class ITaskController(cs.IControllerBase):
465 """The Task based interface to a `ControllerService` object
740 """
741 The Task based interface to a `ControllerService` object
466
742
467 This adapts a `ControllerService` to the ITaskController interface.
743 This adapts a `ControllerService` to the ITaskController interface.
468 """
744 """
469
745
470 def run(task):
746 def run(task):
471 """Run a task.
747 """
748 Run a task.
472
749
473 :Parameters:
750 :Parameters:
474 task : an IPython `Task` object
751 task : an IPython `Task` object
475
752
476 :Returns: the integer ID of the task
753 :Returns: the integer ID of the task
477 """
754 """
478
755
479 def get_task_result(taskid, block=False):
756 def get_task_result(taskid, block=False):
480 """Get the result of a task by its ID.
757 """
758 Get the result of a task by its ID.
481
759
482 :Parameters:
760 :Parameters:
483 taskid : int
761 taskid : int
484 the id of the task whose result is requested
762 the id of the task whose result is requested
485
763
486 :Returns: `Deferred` to (taskid, actualResult) if the task is done, and None
764 :Returns: `Deferred` to the task result if the task is done, and None
487 if not.
765 if not.
488
766
489 :Exceptions:
767 :Exceptions:
490 actualResult will be an `IndexError` if no such task has been submitted
768 actualResult will be an `IndexError` if no such task has been submitted
491 """
769 """
492
770
493 def abort(taskid):
771 def abort(taskid):
494 """Remove task from queue if task is has not been submitted.
772 """Remove task from queue if task is has not been submitted.
495
773
496 If the task has already been submitted, wait for it to finish and discard
774 If the task has already been submitted, wait for it to finish and discard
497 results and prevent resubmission.
775 results and prevent resubmission.
498
776
499 :Parameters:
777 :Parameters:
500 taskid : the id of the task to be aborted
778 taskid : the id of the task to be aborted
501
779
502 :Returns:
780 :Returns:
503 `Deferred` to abort attempt completion. Will be None on success.
781 `Deferred` to abort attempt completion. Will be None on success.
504
782
505 :Exceptions:
783 :Exceptions:
506 deferred will fail with `IndexError` if no such task has been submitted
784 deferred will fail with `IndexError` if no such task has been submitted
507 or the task has already completed.
785 or the task has already completed.
508 """
786 """
509
787
510 def barrier(taskids):
788 def barrier(taskids):
511 """Block until the list of taskids are completed.
789 """
790 Block until the list of taskids are completed.
512
791
513 Returns None on success.
792 Returns None on success.
514 """
793 """
515
794
516 def spin():
795 def spin():
517 """touch the scheduler, to resume scheduling without submitting
796 """
518 a task.
797 Touch the scheduler, to resume scheduling without submitting a task.
519 """
798 """
520
799
521 def queue_status(self, verbose=False):
800 def queue_status(verbose=False):
522 """Get a dictionary with the current state of the task queue.
801 """
802 Get a dictionary with the current state of the task queue.
523
803
524 If verbose is True, then return lists of taskids, otherwise,
804 If verbose is True, then return lists of taskids, otherwise,
525 return the number of tasks with each status.
805 return the number of tasks with each status.
526 """
806 """
527
807
808 def clear():
809 """
810 Clear all previously run tasks from the task controller.
811
812 This is needed because the task controller keep all task results
813 in memory. This can be a problem is there are many completed
814 tasks. Users should call this periodically to clean out these
815 cached task results.
816 """
817
528
818
529 class TaskController(cs.ControllerAdapterBase):
819 class TaskController(cs.ControllerAdapterBase):
530 """The Task based interface to a Controller object.
820 """The Task based interface to a Controller object.
531
821
532 If you want to use a different scheduler, just subclass this and set
822 If you want to use a different scheduler, just subclass this and set
533 the `SchedulerClass` member to the *class* of your chosen scheduler.
823 the `SchedulerClass` member to the *class* of your chosen scheduler.
534 """
824 """
535
825
536 zi.implements(ITaskController)
826 zi.implements(ITaskController)
537 SchedulerClass = FIFOScheduler
827 SchedulerClass = FIFOScheduler
538
828
539 timeout = 30
829 timeout = 30
540
830
541 def __init__(self, controller):
831 def __init__(self, controller):
542 self.controller = controller
832 self.controller = controller
543 self.controller.on_register_engine_do(self.registerWorker, True)
833 self.controller.on_register_engine_do(self.registerWorker, True)
544 self.controller.on_unregister_engine_do(self.unregisterWorker, True)
834 self.controller.on_unregister_engine_do(self.unregisterWorker, True)
545 self.taskid = 0
835 self.taskid = 0
546 self.failurePenalty = 1 # the time in seconds to penalize
836 self.failurePenalty = 1 # the time in seconds to penalize
547 # a worker for failing a task
837 # a worker for failing a task
548 self.pendingTasks = {} # dict of {workerid:(taskid, task)}
838 self.pendingTasks = {} # dict of {workerid:(taskid, task)}
549 self.deferredResults = {} # dict of {taskid:deferred}
839 self.deferredResults = {} # dict of {taskid:deferred}
550 self.finishedResults = {} # dict of {taskid:actualResult}
840 self.finishedResults = {} # dict of {taskid:actualResult}
551 self.workers = {} # dict of {workerid:worker}
841 self.workers = {} # dict of {workerid:worker}
552 self.abortPending = [] # dict of {taskid:abortDeferred}
842 self.abortPending = [] # dict of {taskid:abortDeferred}
553 self.idleLater = None # delayed call object for timeout
843 self.idleLater = None # delayed call object for timeout
554 self.scheduler = self.SchedulerClass()
844 self.scheduler = self.SchedulerClass()
555
845
556 for id in self.controller.engines.keys():
846 for id in self.controller.engines.keys():
557 self.workers[id] = IWorker(self.controller.engines[id])
847 self.workers[id] = IWorker(self.controller.engines[id])
558 self.workers[id].workerid = id
848 self.workers[id].workerid = id
559 self.schedule.add_worker(self.workers[id])
849 self.schedule.add_worker(self.workers[id])
560
850
561 def registerWorker(self, id):
851 def registerWorker(self, id):
562 """Called by controller.register_engine."""
852 """Called by controller.register_engine."""
563 if self.workers.get(id):
853 if self.workers.get(id):
564 raise "We already have one! This should not happen."
854 raise ValueError("worker with id %s already exists. This should not happen." % id)
565 self.workers[id] = IWorker(self.controller.engines[id])
855 self.workers[id] = IWorker(self.controller.engines[id])
566 self.workers[id].workerid = id
856 self.workers[id].workerid = id
567 if not self.pendingTasks.has_key(id):# if not working
857 if not self.pendingTasks.has_key(id):# if not working
568 self.scheduler.add_worker(self.workers[id])
858 self.scheduler.add_worker(self.workers[id])
569 self.distributeTasks()
859 self.distributeTasks()
570
860
571 def unregisterWorker(self, id):
861 def unregisterWorker(self, id):
572 """Called by controller.unregister_engine"""
862 """Called by controller.unregister_engine"""
573
863
574 if self.workers.has_key(id):
864 if self.workers.has_key(id):
575 try:
865 try:
576 self.scheduler.pop_worker(id)
866 self.scheduler.pop_worker(id)
577 except IndexError:
867 except IndexError:
578 pass
868 pass
579 self.workers.pop(id)
869 self.workers.pop(id)
580
870
581 def _pendingTaskIDs(self):
871 def _pendingTaskIDs(self):
582 return [t.taskid for t in self.pendingTasks.values()]
872 return [t.taskid for t in self.pendingTasks.values()]
583
873
584 #---------------------------------------------------------------------------
874 #---------------------------------------------------------------------------
585 # Interface methods
875 # Interface methods
586 #---------------------------------------------------------------------------
876 #---------------------------------------------------------------------------
587
877
588 def run(self, task):
878 def run(self, task):
589 """Run a task and return `Deferred` to its taskid."""
879 """
880 Run a task and return `Deferred` to its taskid.
881 """
590 task.taskid = self.taskid
882 task.taskid = self.taskid
591 task.start = time.localtime()
883 task.start = time.localtime()
592 self.taskid += 1
884 self.taskid += 1
593 d = defer.Deferred()
885 d = defer.Deferred()
594 self.scheduler.add_task(task)
886 self.scheduler.add_task(task)
595 # log.msg('Queuing task: %i' % task.taskid)
887 log.msg('Queuing task: %i' % task.taskid)
596
888
597 self.deferredResults[task.taskid] = []
889 self.deferredResults[task.taskid] = []
598 self.distributeTasks()
890 self.distributeTasks()
599 return defer.succeed(task.taskid)
891 return defer.succeed(task.taskid)
600
892
601 def get_task_result(self, taskid, block=False):
893 def get_task_result(self, taskid, block=False):
602 """Returns a `Deferred` to a TaskResult tuple or None."""
894 """
603 # log.msg("Getting task result: %i" % taskid)
895 Returns a `Deferred` to the task result, or None.
896 """
897 log.msg("Getting task result: %i" % taskid)
604 if self.finishedResults.has_key(taskid):
898 if self.finishedResults.has_key(taskid):
605 tr = self.finishedResults[taskid]
899 tr = self.finishedResults[taskid]
606 return defer.succeed(tr)
900 return defer.succeed(tr)
607 elif self.deferredResults.has_key(taskid):
901 elif self.deferredResults.has_key(taskid):
608 if block:
902 if block:
609 d = defer.Deferred()
903 d = defer.Deferred()
610 self.deferredResults[taskid].append(d)
904 self.deferredResults[taskid].append(d)
611 return d
905 return d
612 else:
906 else:
613 return defer.succeed(None)
907 return defer.succeed(None)
614 else:
908 else:
615 return defer.fail(IndexError("task ID not registered: %r" % taskid))
909 return defer.fail(IndexError("task ID not registered: %r" % taskid))
616
910
617 def abort(self, taskid):
911 def abort(self, taskid):
618 """Remove a task from the queue if it has not been run already."""
912 """
913 Remove a task from the queue if it has not been run already.
914 """
619 if not isinstance(taskid, int):
915 if not isinstance(taskid, int):
620 return defer.fail(failure.Failure(TypeError("an integer task id expected: %r" % taskid)))
916 return defer.fail(failure.Failure(TypeError("an integer task id expected: %r" % taskid)))
621 try:
917 try:
622 self.scheduler.pop_task(taskid)
918 self.scheduler.pop_task(taskid)
623 except IndexError, e:
919 except IndexError, e:
624 if taskid in self.finishedResults.keys():
920 if taskid in self.finishedResults.keys():
625 d = defer.fail(IndexError("Task Already Completed"))
921 d = defer.fail(IndexError("Task Already Completed"))
626 elif taskid in self.abortPending:
922 elif taskid in self.abortPending:
627 d = defer.fail(IndexError("Task Already Aborted"))
923 d = defer.fail(IndexError("Task Already Aborted"))
628 elif taskid in self._pendingTaskIDs():# task is pending
924 elif taskid in self._pendingTaskIDs():# task is pending
629 self.abortPending.append(taskid)
925 self.abortPending.append(taskid)
630 d = defer.succeed(None)
926 d = defer.succeed(None)
631 else:
927 else:
632 d = defer.fail(e)
928 d = defer.fail(e)
633 else:
929 else:
634 d = defer.execute(self._doAbort, taskid)
930 d = defer.execute(self._doAbort, taskid)
635
931
636 return d
932 return d
637
933
638 def barrier(self, taskids):
934 def barrier(self, taskids):
639 dList = []
935 dList = []
640 if isinstance(taskids, int):
936 if isinstance(taskids, int):
641 taskids = [taskids]
937 taskids = [taskids]
642 for id in taskids:
938 for id in taskids:
643 d = self.get_task_result(id, block=True)
939 d = self.get_task_result(id, block=True)
644 dList.append(d)
940 dList.append(d)
645 d = DeferredList(dList, consumeErrors=1)
941 d = DeferredList(dList, consumeErrors=1)
646 d.addCallbacks(lambda r: None)
942 d.addCallbacks(lambda r: None)
647 return d
943 return d
648
944
649 def spin(self):
945 def spin(self):
650 return defer.succeed(self.distributeTasks())
946 return defer.succeed(self.distributeTasks())
651
947
652 def queue_status(self, verbose=False):
948 def queue_status(self, verbose=False):
653 pending = self._pendingTaskIDs()
949 pending = self._pendingTaskIDs()
654 failed = []
950 failed = []
655 succeeded = []
951 succeeded = []
656 for k,v in self.finishedResults.iteritems():
952 for k,v in self.finishedResults.iteritems():
657 if not isinstance(v, failure.Failure):
953 if not isinstance(v, failure.Failure):
658 if hasattr(v,'failure'):
954 if hasattr(v,'failure'):
659 if v.failure is None:
955 if v.failure is None:
660 succeeded.append(k)
956 succeeded.append(k)
661 else:
957 else:
662 failed.append(k)
958 failed.append(k)
663 scheduled = self.scheduler.taskids
959 scheduled = self.scheduler.taskids
664 if verbose:
960 if verbose:
665 result = dict(pending=pending, failed=failed,
961 result = dict(pending=pending, failed=failed,
666 succeeded=succeeded, scheduled=scheduled)
962 succeeded=succeeded, scheduled=scheduled)
667 else:
963 else:
668 result = dict(pending=len(pending),failed=len(failed),
964 result = dict(pending=len(pending),failed=len(failed),
669 succeeded=len(succeeded),scheduled=len(scheduled))
965 succeeded=len(succeeded),scheduled=len(scheduled))
670 return defer.succeed(result)
966 return defer.succeed(result)
671
967
672 #---------------------------------------------------------------------------
968 #---------------------------------------------------------------------------
673 # Queue methods
969 # Queue methods
674 #---------------------------------------------------------------------------
970 #---------------------------------------------------------------------------
675
971
676 def _doAbort(self, taskid):
972 def _doAbort(self, taskid):
677 """Helper function for aborting a pending task."""
973 """
678 # log.msg("Task aborted: %i" % taskid)
974 Helper function for aborting a pending task.
975 """
976 log.msg("Task aborted: %i" % taskid)
679 result = failure.Failure(error.TaskAborted())
977 result = failure.Failure(error.TaskAborted())
680 self._finishTask(taskid, result)
978 self._finishTask(taskid, result)
681 if taskid in self.abortPending:
979 if taskid in self.abortPending:
682 self.abortPending.remove(taskid)
980 self.abortPending.remove(taskid)
683
981
684 def _finishTask(self, taskid, result):
982 def _finishTask(self, taskid, result):
685 dlist = self.deferredResults.pop(taskid)
983 dlist = self.deferredResults.pop(taskid)
686 result.taskid = taskid # The TaskResult should save the taskid
984 # result.taskid = taskid # The TaskResult should save the taskid
687 self.finishedResults[taskid] = result
985 self.finishedResults[taskid] = result
688 for d in dlist:
986 for d in dlist:
689 d.callback(result)
987 d.callback(result)
690
988
691 def distributeTasks(self):
989 def distributeTasks(self):
692 """Distribute tasks while self.scheduler has things to do."""
990 """
693 # log.msg("distributing Tasks")
991 Distribute tasks while self.scheduler has things to do.
992 """
993 log.msg("distributing Tasks")
694 worker, task = self.scheduler.schedule()
994 worker, task = self.scheduler.schedule()
695 if not worker and not task:
995 if not worker and not task:
696 if self.idleLater and self.idleLater.called:# we are inside failIdle
996 if self.idleLater and self.idleLater.called:# we are inside failIdle
697 self.idleLater = None
997 self.idleLater = None
698 else:
998 else:
699 self.checkIdle()
999 self.checkIdle()
700 return False
1000 return False
701 # else something to do:
1001 # else something to do:
702 while worker and task:
1002 while worker and task:
703 # get worker and task
1003 # get worker and task
704 # add to pending
1004 # add to pending
705 self.pendingTasks[worker.workerid] = task
1005 self.pendingTasks[worker.workerid] = task
706 # run/link callbacks
1006 # run/link callbacks
707 d = worker.run(task)
1007 d = worker.run(task)
708 # log.msg("Running task %i on worker %i" %(task.taskid, worker.workerid))
1008 log.msg("Running task %i on worker %i" %(task.taskid, worker.workerid))
709 d.addBoth(self.taskCompleted, task.taskid, worker.workerid)
1009 d.addBoth(self.taskCompleted, task.taskid, worker.workerid)
710 worker, task = self.scheduler.schedule()
1010 worker, task = self.scheduler.schedule()
711 # check for idle timeout:
1011 # check for idle timeout:
712 self.checkIdle()
1012 self.checkIdle()
713 return True
1013 return True
714
1014
715 def checkIdle(self):
1015 def checkIdle(self):
716 if self.idleLater and not self.idleLater.called:
1016 if self.idleLater and not self.idleLater.called:
717 self.idleLater.cancel()
1017 self.idleLater.cancel()
718 if self.scheduler.ntasks and self.workers and \
1018 if self.scheduler.ntasks and self.workers and \
719 self.scheduler.nworkers == len(self.workers):
1019 self.scheduler.nworkers == len(self.workers):
720 self.idleLater = reactor.callLater(self.timeout, self.failIdle)
1020 self.idleLater = reactor.callLater(self.timeout, self.failIdle)
721 else:
1021 else:
722 self.idleLater = None
1022 self.idleLater = None
723
1023
724 def failIdle(self):
1024 def failIdle(self):
725 if not self.distributeTasks():
1025 if not self.distributeTasks():
726 while self.scheduler.ntasks:
1026 while self.scheduler.ntasks:
727 t = self.scheduler.pop_task()
1027 t = self.scheduler.pop_task()
728 msg = "task %i failed to execute due to unmet dependencies"%t.taskid
1028 msg = "task %i failed to execute due to unmet dependencies"%t.taskid
729 msg += " for %i seconds"%self.timeout
1029 msg += " for %i seconds"%self.timeout
730 # log.msg("Task aborted by timeout: %i" % t.taskid)
1030 log.msg("Task aborted by timeout: %i" % t.taskid)
731 f = failure.Failure(error.TaskTimeout(msg))
1031 f = failure.Failure(error.TaskTimeout(msg))
732 self._finishTask(t.taskid, f)
1032 self._finishTask(t.taskid, f)
733 self.idleLater = None
1033 self.idleLater = None
734
1034
735
1035
736 def taskCompleted(self, result, taskid, workerid):
1036 def taskCompleted(self, success_and_result, taskid, workerid):
737 """This is the err/callback for a completed task."""
1037 """This is the err/callback for a completed task."""
1038 success, result = success_and_result
738 try:
1039 try:
739 task = self.pendingTasks.pop(workerid)
1040 task = self.pendingTasks.pop(workerid)
740 except:
1041 except:
741 # this should not happen
1042 # this should not happen
742 log.msg("Tried to pop bad pending task %i from worker %i"%(taskid, workerid))
1043 log.msg("Tried to pop bad pending task %i from worker %i"%(taskid, workerid))
743 log.msg("Result: %r"%result)
1044 log.msg("Result: %r"%result)
744 log.msg("Pending tasks: %s"%self.pendingTasks)
1045 log.msg("Pending tasks: %s"%self.pendingTasks)
745 return
1046 return
746
1047
747 # Check if aborted while pending
1048 # Check if aborted while pending
748 aborted = False
1049 aborted = False
749 if taskid in self.abortPending:
1050 if taskid in self.abortPending:
750 self._doAbort(taskid)
1051 self._doAbort(taskid)
751 aborted = True
1052 aborted = True
752
1053
753 if not aborted:
1054 if not aborted:
754 if result.failure is not None and isinstance(result.failure, failure.Failure): # we failed
1055 if not success:
755 log.msg("Task %i failed on worker %i"% (taskid, workerid))
1056 log.msg("Task %i failed on worker %i"% (taskid, workerid))
756 if task.retries > 0: # resubmit
1057 if task.retries > 0: # resubmit
757 task.retries -= 1
1058 task.retries -= 1
758 self.scheduler.add_task(task)
1059 self.scheduler.add_task(task)
759 s = "Resubmitting task %i, %i retries remaining" %(taskid, task.retries)
1060 s = "Resubmitting task %i, %i retries remaining" %(taskid, task.retries)
760 log.msg(s)
1061 log.msg(s)
761 self.distributeTasks()
1062 self.distributeTasks()
762 elif isinstance(task.recovery_task, Task) and \
1063 elif isinstance(task.recovery_task, BaseTask) and \
763 task.recovery_task.retries > -1:
1064 task.recovery_task.retries > -1:
764 # retries = -1 is to prevent infinite recovery_task loop
1065 # retries = -1 is to prevent infinite recovery_task loop
765 task.retries = -1
1066 task.retries = -1
766 task.recovery_task.taskid = taskid
1067 task.recovery_task.taskid = taskid
767 task = task.recovery_task
1068 task = task.recovery_task
768 self.scheduler.add_task(task)
1069 self.scheduler.add_task(task)
769 s = "Recovering task %i, %i retries remaining" %(taskid, task.retries)
1070 s = "Recovering task %i, %i retries remaining" %(taskid, task.retries)
770 log.msg(s)
1071 log.msg(s)
771 self.distributeTasks()
1072 self.distributeTasks()
772 else: # done trying
1073 else: # done trying
773 self._finishTask(taskid, result)
1074 self._finishTask(taskid, result)
774 # wait a second before readmitting a worker that failed
1075 # wait a second before readmitting a worker that failed
775 # it may have died, and not yet been unregistered
1076 # it may have died, and not yet been unregistered
776 reactor.callLater(self.failurePenalty, self.readmitWorker, workerid)
1077 reactor.callLater(self.failurePenalty, self.readmitWorker, workerid)
777 else: # we succeeded
1078 else: # we succeeded
778 # log.msg("Task completed: %i"% taskid)
1079 log.msg("Task completed: %i"% taskid)
779 self._finishTask(taskid, result)
1080 self._finishTask(taskid, result)
780 self.readmitWorker(workerid)
1081 self.readmitWorker(workerid)
781 else:# we aborted the task
1082 else: # we aborted the task
782 if result.failure is not None and isinstance(result.failure, failure.Failure): # it failed, penalize worker
1083 if not success:
783 reactor.callLater(self.failurePenalty, self.readmitWorker, workerid)
1084 reactor.callLater(self.failurePenalty, self.readmitWorker, workerid)
784 else:
1085 else:
785 self.readmitWorker(workerid)
1086 self.readmitWorker(workerid)
786
1087
787 def readmitWorker(self, workerid):
1088 def readmitWorker(self, workerid):
788 """Readmit a worker to the scheduler.
1089 """
1090 Readmit a worker to the scheduler.
789
1091
790 This is outside `taskCompleted` because of the `failurePenalty` being
1092 This is outside `taskCompleted` because of the `failurePenalty` being
791 implemented through `reactor.callLater`.
1093 implemented through `reactor.callLater`.
792 """
1094 """
793
1095
794 if workerid in self.workers.keys() and workerid not in self.pendingTasks.keys():
1096 if workerid in self.workers.keys() and workerid not in self.pendingTasks.keys():
795 self.scheduler.add_worker(self.workers[workerid])
1097 self.scheduler.add_worker(self.workers[workerid])
796 self.distributeTasks()
1098 self.distributeTasks()
797
1099
1100 def clear(self):
1101 """
1102 Clear all previously run tasks from the task controller.
1103
1104 This is needed because the task controller keep all task results
1105 in memory. This can be a problem is there are many completed
1106 tasks. Users should call this periodically to clean out these
1107 cached task results.
1108 """
1109 self.finishedResults = {}
1110 return defer.succeed(None)
1111
798
1112
799 components.registerAdapter(TaskController, cs.IControllerBase, ITaskController)
1113 components.registerAdapter(TaskController, cs.IControllerBase, ITaskController)
@@ -1,161 +1,180 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 # -*- test-case-name: IPython.kernel.tests.test_taskcontrollerxmlrpc -*-
2 # -*- test-case-name: IPython.kernel.tests.test_taskcontrollerxmlrpc -*-
3
3
4 """The Generic Task Client object.
4 """
5
5 A blocking version of the task client.
6 This must be subclassed based on your connection method.
7 """
6 """
8
7
9 __docformat__ = "restructuredtext en"
8 __docformat__ = "restructuredtext en"
10
9
11 #-------------------------------------------------------------------------------
10 #-------------------------------------------------------------------------------
12 # Copyright (C) 2008 The IPython Development Team
11 # Copyright (C) 2008 The IPython Development Team
13 #
12 #
14 # Distributed under the terms of the BSD License. The full license is in
13 # Distributed under the terms of the BSD License. The full license is in
15 # the file COPYING, distributed as part of this software.
14 # the file COPYING, distributed as part of this software.
16 #-------------------------------------------------------------------------------
15 #-------------------------------------------------------------------------------
17
16
18 #-------------------------------------------------------------------------------
17 #-------------------------------------------------------------------------------
19 # Imports
18 # Imports
20 #-------------------------------------------------------------------------------
19 #-------------------------------------------------------------------------------
21
20
22 from zope.interface import Interface, implements
21 from zope.interface import Interface, implements
23 from twisted.python import components, log
22 from twisted.python import components, log
24
23
25 from IPython.kernel.twistedutil import blockingCallFromThread
24 from IPython.kernel.twistedutil import blockingCallFromThread
26 from IPython.kernel import task, error
25 from IPython.kernel import task, error
26 from IPython.kernel.mapper import (
27 SynchronousTaskMapper,
28 ITaskMapperFactory,
29 IMapper
30 )
31 from IPython.kernel.parallelfunction import (
32 ParallelFunction,
33 ITaskParallelDecorator
34 )
27
35
28 #-------------------------------------------------------------------------------
36 #-------------------------------------------------------------------------------
29 # Connecting Task Client
37 # The task client
30 #-------------------------------------------------------------------------------
38 #-------------------------------------------------------------------------------
31
39
32 class InteractiveTaskClient(object):
33
34 def irun(self, *args, **kwargs):
35 """Run a task on the `TaskController`.
36
37 This method is a shorthand for run(task) and its arguments are simply
38 passed onto a `Task` object:
39
40 irun(*args, **kwargs) -> run(Task(*args, **kwargs))
41
42 :Parameters:
43 expression : str
44 A str that is valid python code that is the task.
45 pull : str or list of str
46 The names of objects to be pulled as results.
47 push : dict
48 A dict of objects to be pushed into the engines namespace before
49 execution of the expression.
50 clear_before : boolean
51 Should the engine's namespace be cleared before the task is run.
52 Default=False.
53 clear_after : boolean
54 Should the engine's namespace be cleared after the task is run.
55 Default=False.
56 retries : int
57 The number of times to resumbit the task if it fails. Default=0.
58 options : dict
59 Any other keyword options for more elaborate uses of tasks
60
61 :Returns: A `TaskResult` object.
62 """
63 block = kwargs.pop('block', False)
64 if len(args) == 1 and isinstance(args[0], task.Task):
65 t = args[0]
66 else:
67 t = task.Task(*args, **kwargs)
68 taskid = self.run(t)
69 print "TaskID = %i"%taskid
70 if block:
71 return self.get_task_result(taskid, block)
72 else:
73 return taskid
74
75 class IBlockingTaskClient(Interface):
40 class IBlockingTaskClient(Interface):
76 """
41 """
77 An interface for blocking task clients.
42 A vague interface of the blocking task client
78 """
43 """
79 pass
44 pass
80
45
81
46 class BlockingTaskClient(object):
82 class BlockingTaskClient(InteractiveTaskClient):
83 """
47 """
84 This class provides a blocking task client.
48 A blocking task client that adapts a non-blocking one.
85 """
49 """
86
50
87 implements(IBlockingTaskClient)
51 implements(
52 IBlockingTaskClient,
53 ITaskMapperFactory,
54 IMapper,
55 ITaskParallelDecorator
56 )
88
57
89 def __init__(self, task_controller):
58 def __init__(self, task_controller):
90 self.task_controller = task_controller
59 self.task_controller = task_controller
91 self.block = True
60 self.block = True
92
61
93 def run(self, task):
62 def run(self, task, block=False):
94 """
63 """Run a task on the `TaskController`.
95 Run a task and return a task id that can be used to get the task result.
64
65 See the documentation of the `MapTask` and `StringTask` classes for
66 details on how to build a task of different types.
96
67
97 :Parameters:
68 :Parameters:
98 task : `Task`
69 task : an `ITask` implementer
99 The `Task` object to run
70
71 :Returns: The int taskid of the submitted task. Pass this to
72 `get_task_result` to get the `TaskResult` object.
100 """
73 """
101 return blockingCallFromThread(self.task_controller.run, task)
74 tid = blockingCallFromThread(self.task_controller.run, task)
75 if block:
76 return self.get_task_result(tid, block=True)
77 else:
78 return tid
102
79
103 def get_task_result(self, taskid, block=False):
80 def get_task_result(self, taskid, block=False):
104 """
81 """
105 Get or poll for a task result.
82 Get a task result by taskid.
106
83
107 :Parameters:
84 :Parameters:
108 taskid : int
85 taskid : int
109 The id of the task whose result to get
86 The taskid of the task to be retrieved.
110 block : boolean
87 block : boolean
111 If True, wait until the task is done and then result the
88 Should I block until the task is done?
112 `TaskResult` object. If False, just poll for the result and
89
113 return None if the task is not done.
90 :Returns: A `TaskResult` object that encapsulates the task result.
114 """
91 """
115 return blockingCallFromThread(self.task_controller.get_task_result,
92 return blockingCallFromThread(self.task_controller.get_task_result,
116 taskid, block)
93 taskid, block)
117
94
118 def abort(self, taskid):
95 def abort(self, taskid):
119 """
96 """
120 Abort a task by task id if it has not been started.
97 Abort a task by taskid.
98
99 :Parameters:
100 taskid : int
101 The taskid of the task to be aborted.
121 """
102 """
122 return blockingCallFromThread(self.task_controller.abort, taskid)
103 return blockingCallFromThread(self.task_controller.abort, taskid)
123
104
124 def barrier(self, taskids):
105 def barrier(self, taskids):
125 """
106 """Block until a set of tasks are completed.
126 Wait for a set of tasks to finish.
127
107
128 :Parameters:
108 :Parameters:
129 taskids : list of ints
109 taskids : list, tuple
130 A list of task ids to wait for.
110 A sequence of taskids to block on.
131 """
111 """
132 return blockingCallFromThread(self.task_controller.barrier, taskids)
112 return blockingCallFromThread(self.task_controller.barrier, taskids)
133
113
134 def spin(self):
114 def spin(self):
135 """
115 """
136 Cause the scheduler to schedule tasks.
116 Touch the scheduler, to resume scheduling without submitting a task.
137
117
138 This method only needs to be called in unusual situations where the
118 This method only needs to be called in unusual situations where the
139 scheduler is idle for some reason.
119 scheduler is idle for some reason.
140 """
120 """
141 return blockingCallFromThread(self.task_controller.spin)
121 return blockingCallFromThread(self.task_controller.spin)
142
122
143 def queue_status(self, verbose=False):
123 def queue_status(self, verbose=False):
144 """
124 """
145 Get a dictionary with the current state of the task queue.
125 Get a dictionary with the current state of the task queue.
146
126
147 :Parameters:
127 :Parameters:
148 verbose : boolean
128 verbose : boolean
149 If True, return a list of taskids. If False, simply give
129 If True, return a list of taskids. If False, simply give
150 the number of tasks with each status.
130 the number of tasks with each status.
151
131
152 :Returns:
132 :Returns:
153 A dict with the queue status.
133 A dict with the queue status.
154 """
134 """
155 return blockingCallFromThread(self.task_controller.queue_status, verbose)
135 return blockingCallFromThread(self.task_controller.queue_status, verbose)
156
136
137 def clear(self):
138 """
139 Clear all previously run tasks from the task controller.
140
141 This is needed because the task controller keep all task results
142 in memory. This can be a problem is there are many completed
143 tasks. Users should call this periodically to clean out these
144 cached task results.
145 """
146 return blockingCallFromThread(self.task_controller.clear)
147
148 def map(self, func, *sequences):
149 """
150 Apply func to *sequences elementwise. Like Python's builtin map.
151
152 This version is load balanced.
153 """
154 return self.mapper().map(func, *sequences)
155
156 def mapper(self, clear_before=False, clear_after=False, retries=0,
157 recovery_task=None, depend=None, block=True):
158 """
159 Create an `IMapper` implementer with a given set of arguments.
160
161 The `IMapper` created using a task controller is load balanced.
162
163 See the documentation for `IPython.kernel.task.BaseTask` for
164 documentation on the arguments to this method.
165 """
166 return SynchronousTaskMapper(self, clear_before=clear_before,
167 clear_after=clear_after, retries=retries,
168 recovery_task=recovery_task, depend=depend, block=block)
169
170 def parallel(self, clear_before=False, clear_after=False, retries=0,
171 recovery_task=None, depend=None, block=True):
172 mapper = self.mapper(clear_before, clear_after, retries,
173 recovery_task, depend, block)
174 pf = ParallelFunction(mapper)
175 return pf
157
176
158 components.registerAdapter(BlockingTaskClient,
177 components.registerAdapter(BlockingTaskClient,
159 task.ITaskController, IBlockingTaskClient)
178 task.ITaskController, IBlockingTaskClient)
160
179
161
180
@@ -1,267 +1,329 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 # -*- test-case-name: IPython.kernel.tests.test_taskxmlrpc -*-
2 # -*- test-case-name: IPython.kernel.tests.test_taskxmlrpc -*-
3 """A Foolscap interface to a TaskController.
3 """A Foolscap interface to a TaskController.
4
4
5 This class lets Foolscap clients talk to a TaskController.
5 This class lets Foolscap clients talk to a TaskController.
6 """
6 """
7
7
8 __docformat__ = "restructuredtext en"
8 __docformat__ = "restructuredtext en"
9
9
10 #-------------------------------------------------------------------------------
10 #-------------------------------------------------------------------------------
11 # Copyright (C) 2008 The IPython Development Team
11 # Copyright (C) 2008 The IPython Development Team
12 #
12 #
13 # Distributed under the terms of the BSD License. The full license is in
13 # Distributed under the terms of the BSD License. The full license is in
14 # the file COPYING, distributed as part of this software.
14 # the file COPYING, distributed as part of this software.
15 #-------------------------------------------------------------------------------
15 #-------------------------------------------------------------------------------
16
16
17 #-------------------------------------------------------------------------------
17 #-------------------------------------------------------------------------------
18 # Imports
18 # Imports
19 #-------------------------------------------------------------------------------
19 #-------------------------------------------------------------------------------
20
20
21 import cPickle as pickle
21 import cPickle as pickle
22 import xmlrpclib, copy
22 import xmlrpclib, copy
23
23
24 from zope.interface import Interface, implements
24 from zope.interface import Interface, implements
25 from twisted.internet import defer
25 from twisted.internet import defer
26 from twisted.python import components, failure
26 from twisted.python import components, failure
27
27
28 from foolscap import Referenceable
28 from foolscap import Referenceable
29
29
30 from IPython.kernel.twistedutil import blockingCallFromThread
30 from IPython.kernel.twistedutil import blockingCallFromThread
31 from IPython.kernel import error, task as taskmodule, taskclient
31 from IPython.kernel import error, task as taskmodule, taskclient
32 from IPython.kernel.pickleutil import can, uncan
32 from IPython.kernel.pickleutil import can, uncan
33 from IPython.kernel.clientinterfaces import (
33 from IPython.kernel.clientinterfaces import (
34 IFCClientInterfaceProvider,
34 IFCClientInterfaceProvider,
35 IBlockingClientAdaptor
35 IBlockingClientAdaptor
36 )
36 )
37 from IPython.kernel.mapper import (
38 TaskMapper,
39 ITaskMapperFactory,
40 IMapper
41 )
42 from IPython.kernel.parallelfunction import (
43 ParallelFunction,
44 ITaskParallelDecorator
45 )
37
46
38 #-------------------------------------------------------------------------------
47 #-------------------------------------------------------------------------------
39 # The Controller side of things
48 # The Controller side of things
40 #-------------------------------------------------------------------------------
49 #-------------------------------------------------------------------------------
41
50
42
51
43 class IFCTaskController(Interface):
52 class IFCTaskController(Interface):
44 """Foolscap interface to task controller.
53 """Foolscap interface to task controller.
45
54
46 See the documentation of ITaskController for documentation about the methods.
55 See the documentation of `ITaskController` for more information.
47 """
56 """
48 def remote_run(request, binTask):
57 def remote_run(binTask):
49 """"""
58 """"""
50
59
51 def remote_abort(request, taskid):
60 def remote_abort(taskid):
52 """"""
61 """"""
53
62
54 def remote_get_task_result(request, taskid, block=False):
63 def remote_get_task_result(taskid, block=False):
55 """"""
64 """"""
56
65
57 def remote_barrier(request, taskids):
66 def remote_barrier(taskids):
58 """"""
67 """"""
59
68
60 def remote_spin(request):
69 def remote_spin():
61 """"""
70 """"""
62
71
63 def remote_queue_status(request, verbose):
72 def remote_queue_status(verbose):
73 """"""
74
75 def remote_clear():
64 """"""
76 """"""
65
77
66
78
67 class FCTaskControllerFromTaskController(Referenceable):
79 class FCTaskControllerFromTaskController(Referenceable):
68 """XML-RPC attachmeot for controller.
80 """
81 Adapt a `TaskController` to an `IFCTaskController`
69
82
70 See IXMLRPCTaskController and ITaskController (and its children) for documentation.
83 This class is used to expose a `TaskController` over the wire using
84 the Foolscap network protocol.
71 """
85 """
86
72 implements(IFCTaskController, IFCClientInterfaceProvider)
87 implements(IFCTaskController, IFCClientInterfaceProvider)
73
88
74 def __init__(self, taskController):
89 def __init__(self, taskController):
75 self.taskController = taskController
90 self.taskController = taskController
76
91
77 #---------------------------------------------------------------------------
92 #---------------------------------------------------------------------------
78 # Non interface methods
93 # Non interface methods
79 #---------------------------------------------------------------------------
94 #---------------------------------------------------------------------------
80
95
81 def packageFailure(self, f):
96 def packageFailure(self, f):
82 f.cleanFailure()
97 f.cleanFailure()
83 return self.packageSuccess(f)
98 return self.packageSuccess(f)
84
99
85 def packageSuccess(self, obj):
100 def packageSuccess(self, obj):
86 serial = pickle.dumps(obj, 2)
101 serial = pickle.dumps(obj, 2)
87 return serial
102 return serial
88
103
89 #---------------------------------------------------------------------------
104 #---------------------------------------------------------------------------
90 # ITaskController related methods
105 # ITaskController related methods
91 #---------------------------------------------------------------------------
106 #---------------------------------------------------------------------------
92
107
93 def remote_run(self, ptask):
108 def remote_run(self, ptask):
94 try:
109 try:
95 ctask = pickle.loads(ptask)
110 task = pickle.loads(ptask)
96 task = taskmodule.uncanTask(ctask)
111 task.uncan_task()
97 except:
112 except:
98 d = defer.fail(pickle.UnpickleableError("Could not unmarshal task"))
113 d = defer.fail(pickle.UnpickleableError("Could not unmarshal task"))
99 else:
114 else:
100 d = self.taskController.run(task)
115 d = self.taskController.run(task)
101 d.addCallback(self.packageSuccess)
116 d.addCallback(self.packageSuccess)
102 d.addErrback(self.packageFailure)
117 d.addErrback(self.packageFailure)
103 return d
118 return d
104
119
105 def remote_abort(self, taskid):
120 def remote_abort(self, taskid):
106 d = self.taskController.abort(taskid)
121 d = self.taskController.abort(taskid)
107 d.addCallback(self.packageSuccess)
122 d.addCallback(self.packageSuccess)
108 d.addErrback(self.packageFailure)
123 d.addErrback(self.packageFailure)
109 return d
124 return d
110
125
111 def remote_get_task_result(self, taskid, block=False):
126 def remote_get_task_result(self, taskid, block=False):
112 d = self.taskController.get_task_result(taskid, block)
127 d = self.taskController.get_task_result(taskid, block)
113 d.addCallback(self.packageSuccess)
128 d.addCallback(self.packageSuccess)
114 d.addErrback(self.packageFailure)
129 d.addErrback(self.packageFailure)
115 return d
130 return d
116
131
117 def remote_barrier(self, taskids):
132 def remote_barrier(self, taskids):
118 d = self.taskController.barrier(taskids)
133 d = self.taskController.barrier(taskids)
119 d.addCallback(self.packageSuccess)
134 d.addCallback(self.packageSuccess)
120 d.addErrback(self.packageFailure)
135 d.addErrback(self.packageFailure)
121 return d
136 return d
122
137
123 def remote_spin(self):
138 def remote_spin(self):
124 d = self.taskController.spin()
139 d = self.taskController.spin()
125 d.addCallback(self.packageSuccess)
140 d.addCallback(self.packageSuccess)
126 d.addErrback(self.packageFailure)
141 d.addErrback(self.packageFailure)
127 return d
142 return d
128
143
129 def remote_queue_status(self, verbose):
144 def remote_queue_status(self, verbose):
130 d = self.taskController.queue_status(verbose)
145 d = self.taskController.queue_status(verbose)
131 d.addCallback(self.packageSuccess)
146 d.addCallback(self.packageSuccess)
132 d.addErrback(self.packageFailure)
147 d.addErrback(self.packageFailure)
133 return d
148 return d
134
149
150 def remote_clear(self):
151 return self.taskController.clear()
152
135 def remote_get_client_name(self):
153 def remote_get_client_name(self):
136 return 'IPython.kernel.taskfc.FCTaskClient'
154 return 'IPython.kernel.taskfc.FCTaskClient'
137
155
138 components.registerAdapter(FCTaskControllerFromTaskController,
156 components.registerAdapter(FCTaskControllerFromTaskController,
139 taskmodule.ITaskController, IFCTaskController)
157 taskmodule.ITaskController, IFCTaskController)
140
158
141
159
142 #-------------------------------------------------------------------------------
160 #-------------------------------------------------------------------------------
143 # The Client side of things
161 # The Client side of things
144 #-------------------------------------------------------------------------------
162 #-------------------------------------------------------------------------------
145
163
146 class FCTaskClient(object):
164 class FCTaskClient(object):
147 """XML-RPC based TaskController client that implements ITaskController.
165 """
166 Client class for Foolscap exposed `TaskController`.
148
167
149 :Parameters:
168 This class is an adapter that makes a `RemoteReference` to a
150 addr : (ip, port)
169 `TaskController` look like an actual `ITaskController` on the client side.
151 The ip (str) and port (int) tuple of the `TaskController`.
170
171 This class also implements `IBlockingClientAdaptor` so that clients can
172 automatically get a blocking version of this class.
152 """
173 """
153 implements(taskmodule.ITaskController, IBlockingClientAdaptor)
174
175 implements(
176 taskmodule.ITaskController,
177 IBlockingClientAdaptor,
178 ITaskMapperFactory,
179 IMapper,
180 ITaskParallelDecorator
181 )
154
182
155 def __init__(self, remote_reference):
183 def __init__(self, remote_reference):
156 self.remote_reference = remote_reference
184 self.remote_reference = remote_reference
157
185
158 #---------------------------------------------------------------------------
186 #---------------------------------------------------------------------------
159 # Non interface methods
187 # Non interface methods
160 #---------------------------------------------------------------------------
188 #---------------------------------------------------------------------------
161
189
162 def unpackage(self, r):
190 def unpackage(self, r):
163 return pickle.loads(r)
191 return pickle.loads(r)
164
192
165 #---------------------------------------------------------------------------
193 #---------------------------------------------------------------------------
166 # ITaskController related methods
194 # ITaskController related methods
167 #---------------------------------------------------------------------------
195 #---------------------------------------------------------------------------
168 def run(self, task):
196 def run(self, task):
169 """Run a task on the `TaskController`.
197 """Run a task on the `TaskController`.
170
198
199 See the documentation of the `MapTask` and `StringTask` classes for
200 details on how to build a task of different types.
201
171 :Parameters:
202 :Parameters:
172 task : a `Task` object
203 task : an `ITask` implementer
173
174 The Task object is created using the following signature:
175
176 Task(expression, pull=None, push={}, clear_before=False,
177 clear_after=False, retries=0, **options):)
178
179 The meaning of the arguments is as follows:
180
181 :Task Parameters:
182 expression : str
183 A str that is valid python code that is the task.
184 pull : str or list of str
185 The names of objects to be pulled as results.
186 push : dict
187 A dict of objects to be pushed into the engines namespace before
188 execution of the expression.
189 clear_before : boolean
190 Should the engine's namespace be cleared before the task is run.
191 Default=False.
192 clear_after : boolean
193 Should the engine's namespace be cleared after the task is run.
194 Default=False.
195 retries : int
196 The number of times to resumbit the task if it fails. Default=0.
197 options : dict
198 Any other keyword options for more elaborate uses of tasks
199
204
200 :Returns: The int taskid of the submitted task. Pass this to
205 :Returns: The int taskid of the submitted task. Pass this to
201 `get_task_result` to get the `TaskResult` object.
206 `get_task_result` to get the `TaskResult` object.
202 """
207 """
203 assert isinstance(task, taskmodule.Task), "task must be a Task object!"
208 assert isinstance(task, taskmodule.BaseTask), "task must be a Task object!"
204 ctask = taskmodule.canTask(task) # handles arbitrary function in .depend
209 task.can_task()
205 # as well as arbitrary recovery_task chains
210 ptask = pickle.dumps(task, 2)
206 ptask = pickle.dumps(ctask, 2)
211 task.uncan_task()
207 d = self.remote_reference.callRemote('run', ptask)
212 d = self.remote_reference.callRemote('run', ptask)
208 d.addCallback(self.unpackage)
213 d.addCallback(self.unpackage)
209 return d
214 return d
210
215
211 def get_task_result(self, taskid, block=False):
216 def get_task_result(self, taskid, block=False):
212 """The task result by taskid.
217 """
218 Get a task result by taskid.
213
219
214 :Parameters:
220 :Parameters:
215 taskid : int
221 taskid : int
216 The taskid of the task to be retrieved.
222 The taskid of the task to be retrieved.
217 block : boolean
223 block : boolean
218 Should I block until the task is done?
224 Should I block until the task is done?
219
225
220 :Returns: A `TaskResult` object that encapsulates the task result.
226 :Returns: A `TaskResult` object that encapsulates the task result.
221 """
227 """
222 d = self.remote_reference.callRemote('get_task_result', taskid, block)
228 d = self.remote_reference.callRemote('get_task_result', taskid, block)
223 d.addCallback(self.unpackage)
229 d.addCallback(self.unpackage)
224 return d
230 return d
225
231
226 def abort(self, taskid):
232 def abort(self, taskid):
227 """Abort a task by taskid.
233 """
234 Abort a task by taskid.
228
235
229 :Parameters:
236 :Parameters:
230 taskid : int
237 taskid : int
231 The taskid of the task to be aborted.
238 The taskid of the task to be aborted.
232 block : boolean
233 Should I block until the task is aborted.
234 """
239 """
235 d = self.remote_reference.callRemote('abort', taskid)
240 d = self.remote_reference.callRemote('abort', taskid)
236 d.addCallback(self.unpackage)
241 d.addCallback(self.unpackage)
237 return d
242 return d
238
243
239 def barrier(self, taskids):
244 def barrier(self, taskids):
240 """Block until all tasks are completed.
245 """Block until a set of tasks are completed.
241
246
242 :Parameters:
247 :Parameters:
243 taskids : list, tuple
248 taskids : list, tuple
244 A sequence of taskids to block on.
249 A sequence of taskids to block on.
245 """
250 """
246 d = self.remote_reference.callRemote('barrier', taskids)
251 d = self.remote_reference.callRemote('barrier', taskids)
247 d.addCallback(self.unpackage)
252 d.addCallback(self.unpackage)
248 return d
253 return d
249
254
250 def spin(self):
255 def spin(self):
251 """touch the scheduler, to resume scheduling without submitting
256 """
252 a task.
257 Touch the scheduler, to resume scheduling without submitting a task.
258
259 This method only needs to be called in unusual situations where the
260 scheduler is idle for some reason.
253 """
261 """
254 d = self.remote_reference.callRemote('spin')
262 d = self.remote_reference.callRemote('spin')
255 d.addCallback(self.unpackage)
263 d.addCallback(self.unpackage)
256 return d
264 return d
257
265
258 def queue_status(self, verbose=False):
266 def queue_status(self, verbose=False):
259 """Return a dict with the status of the task queue."""
267 """
268 Get a dictionary with the current state of the task queue.
269
270 :Parameters:
271 verbose : boolean
272 If True, return a list of taskids. If False, simply give
273 the number of tasks with each status.
274
275 :Returns:
276 A dict with the queue status.
277 """
260 d = self.remote_reference.callRemote('queue_status', verbose)
278 d = self.remote_reference.callRemote('queue_status', verbose)
261 d.addCallback(self.unpackage)
279 d.addCallback(self.unpackage)
262 return d
280 return d
263
281
282 def clear(self):
283 """
284 Clear all previously run tasks from the task controller.
285
286 This is needed because the task controller keep all task results
287 in memory. This can be a problem is there are many completed
288 tasks. Users should call this periodically to clean out these
289 cached task results.
290 """
291 d = self.remote_reference.callRemote('clear')
292 return d
293
264 def adapt_to_blocking_client(self):
294 def adapt_to_blocking_client(self):
295 """
296 Wrap self in a blocking version that implements `IBlockingTaskClient.
297 """
265 from IPython.kernel.taskclient import IBlockingTaskClient
298 from IPython.kernel.taskclient import IBlockingTaskClient
266 return IBlockingTaskClient(self)
299 return IBlockingTaskClient(self)
267
300
301 def map(self, func, *sequences):
302 """
303 Apply func to *sequences elementwise. Like Python's builtin map.
304
305 This version is load balanced.
306 """
307 return self.mapper().map(func, *sequences)
308
309 def mapper(self, clear_before=False, clear_after=False, retries=0,
310 recovery_task=None, depend=None, block=True):
311 """
312 Create an `IMapper` implementer with a given set of arguments.
313
314 The `IMapper` created using a task controller is load balanced.
315
316 See the documentation for `IPython.kernel.task.BaseTask` for
317 documentation on the arguments to this method.
318 """
319 return TaskMapper(self, clear_before=clear_before,
320 clear_after=clear_after, retries=retries,
321 recovery_task=recovery_task, depend=depend, block=block)
322
323 def parallel(self, clear_before=False, clear_after=False, retries=0,
324 recovery_task=None, depend=None, block=True):
325 mapper = self.mapper(clear_before, clear_after, retries,
326 recovery_task, depend, block)
327 pf = ParallelFunction(mapper)
328 return pf
329
@@ -1,373 +1,372 b''
1 # encoding: utf-8
1 # encoding: utf-8
2
2
3 """Test template for complete engine object"""
3 """Test template for complete engine object"""
4
4
5 __docformat__ = "restructuredtext en"
5 __docformat__ = "restructuredtext en"
6
6
7 #-------------------------------------------------------------------------------
7 #-------------------------------------------------------------------------------
8 # Copyright (C) 2008 The IPython Development Team
8 # Copyright (C) 2008 The IPython Development Team
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #-------------------------------------------------------------------------------
12 #-------------------------------------------------------------------------------
13
13
14 #-------------------------------------------------------------------------------
14 #-------------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #-------------------------------------------------------------------------------
16 #-------------------------------------------------------------------------------
17
17
18 import cPickle as pickle
18 import cPickle as pickle
19
19
20 from twisted.internet import defer, reactor
20 from twisted.internet import defer, reactor
21 from twisted.python import failure
21 from twisted.python import failure
22 from twisted.application import service
22 from twisted.application import service
23 import zope.interface as zi
23 import zope.interface as zi
24
24
25 from IPython.kernel import newserialized
25 from IPython.kernel import newserialized
26 from IPython.kernel import error
26 from IPython.kernel import error
27 from IPython.kernel.pickleutil import can, uncan
27 from IPython.kernel.pickleutil import can, uncan
28 import IPython.kernel.engineservice as es
28 import IPython.kernel.engineservice as es
29 from IPython.kernel.core.interpreter import Interpreter
29 from IPython.kernel.core.interpreter import Interpreter
30 from IPython.testing.parametric import Parametric, parametric
30 from IPython.testing.parametric import Parametric, parametric
31
31
32 #-------------------------------------------------------------------------------
32 #-------------------------------------------------------------------------------
33 # Tests
33 # Tests
34 #-------------------------------------------------------------------------------
34 #-------------------------------------------------------------------------------
35
35
36
36
37 # A sequence of valid commands run through execute
37 # A sequence of valid commands run through execute
38 validCommands = ['a=5',
38 validCommands = ['a=5',
39 'b=10',
39 'b=10',
40 'a=5; b=10; c=a+b',
40 'a=5; b=10; c=a+b',
41 'import math; 2.0*math.pi',
41 'import math; 2.0*math.pi',
42 """def f():
42 """def f():
43 result = 0.0
43 result = 0.0
44 for i in range(10):
44 for i in range(10):
45 result += i
45 result += i
46 """,
46 """,
47 'if 1<2: a=5',
47 'if 1<2: a=5',
48 """import time
48 """import time
49 time.sleep(0.1)""",
49 time.sleep(0.1)""",
50 """from math import cos;
50 """from math import cos;
51 x = 1.0*cos(0.5)""", # Semicolons lead to Discard ast nodes that should be discarded
51 x = 1.0*cos(0.5)""", # Semicolons lead to Discard ast nodes that should be discarded
52 """from sets import Set
52 """from sets import Set
53 s = Set()
53 s = Set()
54 """, # Trailing whitespace should be allowed.
54 """, # Trailing whitespace should be allowed.
55 """import math
55 """import math
56 math.cos(1.0)""", # Test a method call with a discarded return value
56 math.cos(1.0)""", # Test a method call with a discarded return value
57 """x=1.0234
57 """x=1.0234
58 a=5; b=10""", # Test an embedded semicolon
58 a=5; b=10""", # Test an embedded semicolon
59 """x=1.0234
59 """x=1.0234
60 a=5; b=10;""" # Test both an embedded and trailing semicolon
60 a=5; b=10;""" # Test both an embedded and trailing semicolon
61 ]
61 ]
62
62
63 # A sequence of commands that raise various exceptions
63 # A sequence of commands that raise various exceptions
64 invalidCommands = [('a=1/0',ZeroDivisionError),
64 invalidCommands = [('a=1/0',ZeroDivisionError),
65 ('print v',NameError),
65 ('print v',NameError),
66 ('l=[];l[0]',IndexError),
66 ('l=[];l[0]',IndexError),
67 ("d={};d['a']",KeyError),
67 ("d={};d['a']",KeyError),
68 ("assert 1==0",AssertionError),
68 ("assert 1==0",AssertionError),
69 ("import abababsdbfsbaljasdlja",ImportError),
69 ("import abababsdbfsbaljasdlja",ImportError),
70 ("raise Exception()",Exception)]
70 ("raise Exception()",Exception)]
71
71
72 def testf(x):
72 def testf(x):
73 return 2.0*x
73 return 2.0*x
74
74
75 globala = 99
75 globala = 99
76
76
77 def testg(x):
77 def testg(x):
78 return globala*x
78 return globala*x
79
79
80 class IEngineCoreTestCase(object):
80 class IEngineCoreTestCase(object):
81 """Test an IEngineCore implementer."""
81 """Test an IEngineCore implementer."""
82
82
83 def createShell(self):
83 def createShell(self):
84 return Interpreter()
84 return Interpreter()
85
85
86 def catchQueueCleared(self, f):
86 def catchQueueCleared(self, f):
87 try:
87 try:
88 f.raiseException()
88 f.raiseException()
89 except error.QueueCleared:
89 except error.QueueCleared:
90 pass
90 pass
91
91
92 def testIEngineCoreInterface(self):
92 def testIEngineCoreInterface(self):
93 """Does self.engine claim to implement IEngineCore?"""
93 """Does self.engine claim to implement IEngineCore?"""
94 self.assert_(es.IEngineCore.providedBy(self.engine))
94 self.assert_(es.IEngineCore.providedBy(self.engine))
95
95
96 def testIEngineCoreInterfaceMethods(self):
96 def testIEngineCoreInterfaceMethods(self):
97 """Does self.engine have the methods and attributes in IEngineCore."""
97 """Does self.engine have the methods and attributes in IEngineCore."""
98 for m in list(es.IEngineCore):
98 for m in list(es.IEngineCore):
99 self.assert_(hasattr(self.engine, m))
99 self.assert_(hasattr(self.engine, m))
100
100
101 def testIEngineCoreDeferreds(self):
101 def testIEngineCoreDeferreds(self):
102 d = self.engine.execute('a=5')
102 d = self.engine.execute('a=5')
103 d.addCallback(lambda _: self.engine.pull('a'))
103 d.addCallback(lambda _: self.engine.pull('a'))
104 d.addCallback(lambda _: self.engine.get_result())
104 d.addCallback(lambda _: self.engine.get_result())
105 d.addCallback(lambda _: self.engine.keys())
105 d.addCallback(lambda _: self.engine.keys())
106 d.addCallback(lambda _: self.engine.push(dict(a=10)))
106 d.addCallback(lambda _: self.engine.push(dict(a=10)))
107 return d
107 return d
108
108
109 def runTestExecute(self, cmd):
109 def runTestExecute(self, cmd):
110 self.shell = Interpreter()
110 self.shell = Interpreter()
111 actual = self.shell.execute(cmd)
111 actual = self.shell.execute(cmd)
112 def compare(computed):
112 def compare(computed):
113 actual['id'] = computed['id']
113 actual['id'] = computed['id']
114 self.assertEquals(actual, computed)
114 self.assertEquals(actual, computed)
115 d = self.engine.execute(cmd)
115 d = self.engine.execute(cmd)
116 d.addCallback(compare)
116 d.addCallback(compare)
117 return d
117 return d
118
118
119 @parametric
119 @parametric
120 def testExecute(cls):
120 def testExecute(cls):
121 return [(cls.runTestExecute, cmd) for cmd in validCommands]
121 return [(cls.runTestExecute, cmd) for cmd in validCommands]
122
122
123 def runTestExecuteFailures(self, cmd, exc):
123 def runTestExecuteFailures(self, cmd, exc):
124 def compare(f):
124 def compare(f):
125 self.assertRaises(exc, f.raiseException)
125 self.assertRaises(exc, f.raiseException)
126 d = self.engine.execute(cmd)
126 d = self.engine.execute(cmd)
127 d.addErrback(compare)
127 d.addErrback(compare)
128 return d
128 return d
129
129
130 @parametric
130 @parametric
131 def testExecuteFailures(cls):
131 def testExecuteFailures(cls):
132 return [(cls.runTestExecuteFailures, cmd, exc) for cmd, exc in invalidCommands]
132 return [(cls.runTestExecuteFailures, cmd, exc) for cmd, exc in invalidCommands]
133
133
134 def runTestPushPull(self, o):
134 def runTestPushPull(self, o):
135 d = self.engine.push(dict(a=o))
135 d = self.engine.push(dict(a=o))
136 d.addCallback(lambda r: self.engine.pull('a'))
136 d.addCallback(lambda r: self.engine.pull('a'))
137 d.addCallback(lambda r: self.assertEquals(o,r))
137 d.addCallback(lambda r: self.assertEquals(o,r))
138 return d
138 return d
139
139
140 @parametric
140 @parametric
141 def testPushPull(cls):
141 def testPushPull(cls):
142 objs = [10,"hi there",1.2342354,{"p":(1,2)},None]
142 objs = [10,"hi there",1.2342354,{"p":(1,2)},None]
143 return [(cls.runTestPushPull, o) for o in objs]
143 return [(cls.runTestPushPull, o) for o in objs]
144
144
145 def testPullNameError(self):
145 def testPullNameError(self):
146 d = self.engine.push(dict(a=5))
146 d = self.engine.push(dict(a=5))
147 d.addCallback(lambda _:self.engine.reset())
147 d.addCallback(lambda _:self.engine.reset())
148 d.addCallback(lambda _: self.engine.pull("a"))
148 d.addCallback(lambda _: self.engine.pull("a"))
149 d.addErrback(lambda f: self.assertRaises(NameError, f.raiseException))
149 d.addErrback(lambda f: self.assertRaises(NameError, f.raiseException))
150 return d
150 return d
151
151
152 def testPushPullFailures(self):
152 def testPushPullFailures(self):
153 d = self.engine.pull('a')
153 d = self.engine.pull('a')
154 d.addErrback(lambda f: self.assertRaises(NameError, f.raiseException))
154 d.addErrback(lambda f: self.assertRaises(NameError, f.raiseException))
155 d.addCallback(lambda _: self.engine.execute('l = lambda x: x'))
155 d.addCallback(lambda _: self.engine.execute('l = lambda x: x'))
156 d.addCallback(lambda _: self.engine.pull('l'))
156 d.addCallback(lambda _: self.engine.pull('l'))
157 d.addErrback(lambda f: self.assertRaises(pickle.PicklingError, f.raiseException))
157 d.addErrback(lambda f: self.assertRaises(pickle.PicklingError, f.raiseException))
158 d.addCallback(lambda _: self.engine.push(dict(l=lambda x: x)))
158 d.addCallback(lambda _: self.engine.push(dict(l=lambda x: x)))
159 d.addErrback(lambda f: self.assertRaises(pickle.PicklingError, f.raiseException))
159 d.addErrback(lambda f: self.assertRaises(pickle.PicklingError, f.raiseException))
160 return d
160 return d
161
161
162 def testPushPullArray(self):
162 def testPushPullArray(self):
163 try:
163 try:
164 import numpy
164 import numpy
165 except:
165 except:
166 print 'no numpy, ',
167 return
166 return
168 a = numpy.random.random(1000)
167 a = numpy.random.random(1000)
169 d = self.engine.push(dict(a=a))
168 d = self.engine.push(dict(a=a))
170 d.addCallback(lambda _: self.engine.pull('a'))
169 d.addCallback(lambda _: self.engine.pull('a'))
171 d.addCallback(lambda b: b==a)
170 d.addCallback(lambda b: b==a)
172 d.addCallback(lambda c: c.all())
171 d.addCallback(lambda c: c.all())
173 return self.assertDeferredEquals(d, True)
172 return self.assertDeferredEquals(d, True)
174
173
175 def testPushFunction(self):
174 def testPushFunction(self):
176
175
177 d = self.engine.push_function(dict(f=testf))
176 d = self.engine.push_function(dict(f=testf))
178 d.addCallback(lambda _: self.engine.execute('result = f(10)'))
177 d.addCallback(lambda _: self.engine.execute('result = f(10)'))
179 d.addCallback(lambda _: self.engine.pull('result'))
178 d.addCallback(lambda _: self.engine.pull('result'))
180 d.addCallback(lambda r: self.assertEquals(r, testf(10)))
179 d.addCallback(lambda r: self.assertEquals(r, testf(10)))
181 return d
180 return d
182
181
183 def testPullFunction(self):
182 def testPullFunction(self):
184 d = self.engine.push_function(dict(f=testf, g=testg))
183 d = self.engine.push_function(dict(f=testf, g=testg))
185 d.addCallback(lambda _: self.engine.pull_function(('f','g')))
184 d.addCallback(lambda _: self.engine.pull_function(('f','g')))
186 d.addCallback(lambda r: self.assertEquals(r[0](10), testf(10)))
185 d.addCallback(lambda r: self.assertEquals(r[0](10), testf(10)))
187 return d
186 return d
188
187
189 def testPushFunctionGlobal(self):
188 def testPushFunctionGlobal(self):
190 """Make sure that pushed functions pick up the user's namespace for globals."""
189 """Make sure that pushed functions pick up the user's namespace for globals."""
191 d = self.engine.push(dict(globala=globala))
190 d = self.engine.push(dict(globala=globala))
192 d.addCallback(lambda _: self.engine.push_function(dict(g=testg)))
191 d.addCallback(lambda _: self.engine.push_function(dict(g=testg)))
193 d.addCallback(lambda _: self.engine.execute('result = g(10)'))
192 d.addCallback(lambda _: self.engine.execute('result = g(10)'))
194 d.addCallback(lambda _: self.engine.pull('result'))
193 d.addCallback(lambda _: self.engine.pull('result'))
195 d.addCallback(lambda r: self.assertEquals(r, testg(10)))
194 d.addCallback(lambda r: self.assertEquals(r, testg(10)))
196 return d
195 return d
197
196
198 def testGetResultFailure(self):
197 def testGetResultFailure(self):
199 d = self.engine.get_result(None)
198 d = self.engine.get_result(None)
200 d.addErrback(lambda f: self.assertRaises(IndexError, f.raiseException))
199 d.addErrback(lambda f: self.assertRaises(IndexError, f.raiseException))
201 d.addCallback(lambda _: self.engine.get_result(10))
200 d.addCallback(lambda _: self.engine.get_result(10))
202 d.addErrback(lambda f: self.assertRaises(IndexError, f.raiseException))
201 d.addErrback(lambda f: self.assertRaises(IndexError, f.raiseException))
203 return d
202 return d
204
203
205 def runTestGetResult(self, cmd):
204 def runTestGetResult(self, cmd):
206 self.shell = Interpreter()
205 self.shell = Interpreter()
207 actual = self.shell.execute(cmd)
206 actual = self.shell.execute(cmd)
208 def compare(computed):
207 def compare(computed):
209 actual['id'] = computed['id']
208 actual['id'] = computed['id']
210 self.assertEquals(actual, computed)
209 self.assertEquals(actual, computed)
211 d = self.engine.execute(cmd)
210 d = self.engine.execute(cmd)
212 d.addCallback(lambda r: self.engine.get_result(r['number']))
211 d.addCallback(lambda r: self.engine.get_result(r['number']))
213 d.addCallback(compare)
212 d.addCallback(compare)
214 return d
213 return d
215
214
216 @parametric
215 @parametric
217 def testGetResult(cls):
216 def testGetResult(cls):
218 return [(cls.runTestGetResult, cmd) for cmd in validCommands]
217 return [(cls.runTestGetResult, cmd) for cmd in validCommands]
219
218
220 def testGetResultDefault(self):
219 def testGetResultDefault(self):
221 cmd = 'a=5'
220 cmd = 'a=5'
222 shell = self.createShell()
221 shell = self.createShell()
223 shellResult = shell.execute(cmd)
222 shellResult = shell.execute(cmd)
224 def popit(dikt, key):
223 def popit(dikt, key):
225 dikt.pop(key)
224 dikt.pop(key)
226 return dikt
225 return dikt
227 d = self.engine.execute(cmd)
226 d = self.engine.execute(cmd)
228 d.addCallback(lambda _: self.engine.get_result())
227 d.addCallback(lambda _: self.engine.get_result())
229 d.addCallback(lambda r: self.assertEquals(shellResult, popit(r,'id')))
228 d.addCallback(lambda r: self.assertEquals(shellResult, popit(r,'id')))
230 return d
229 return d
231
230
232 def testKeys(self):
231 def testKeys(self):
233 d = self.engine.keys()
232 d = self.engine.keys()
234 d.addCallback(lambda s: isinstance(s, list))
233 d.addCallback(lambda s: isinstance(s, list))
235 d.addCallback(lambda r: self.assertEquals(r, True))
234 d.addCallback(lambda r: self.assertEquals(r, True))
236 return d
235 return d
237
236
238 Parametric(IEngineCoreTestCase)
237 Parametric(IEngineCoreTestCase)
239
238
240 class IEngineSerializedTestCase(object):
239 class IEngineSerializedTestCase(object):
241 """Test an IEngineCore implementer."""
240 """Test an IEngineCore implementer."""
242
241
243 def testIEngineSerializedInterface(self):
242 def testIEngineSerializedInterface(self):
244 """Does self.engine claim to implement IEngineCore?"""
243 """Does self.engine claim to implement IEngineCore?"""
245 self.assert_(es.IEngineSerialized.providedBy(self.engine))
244 self.assert_(es.IEngineSerialized.providedBy(self.engine))
246
245
247 def testIEngineSerializedInterfaceMethods(self):
246 def testIEngineSerializedInterfaceMethods(self):
248 """Does self.engine have the methods and attributes in IEngireCore."""
247 """Does self.engine have the methods and attributes in IEngireCore."""
249 for m in list(es.IEngineSerialized):
248 for m in list(es.IEngineSerialized):
250 self.assert_(hasattr(self.engine, m))
249 self.assert_(hasattr(self.engine, m))
251
250
252 def testIEngineSerializedDeferreds(self):
251 def testIEngineSerializedDeferreds(self):
253 dList = []
252 dList = []
254 d = self.engine.push_serialized(dict(key=newserialized.serialize(12345)))
253 d = self.engine.push_serialized(dict(key=newserialized.serialize(12345)))
255 self.assert_(isinstance(d, defer.Deferred))
254 self.assert_(isinstance(d, defer.Deferred))
256 dList.append(d)
255 dList.append(d)
257 d = self.engine.pull_serialized('key')
256 d = self.engine.pull_serialized('key')
258 self.assert_(isinstance(d, defer.Deferred))
257 self.assert_(isinstance(d, defer.Deferred))
259 dList.append(d)
258 dList.append(d)
260 D = defer.DeferredList(dList)
259 D = defer.DeferredList(dList)
261 return D
260 return D
262
261
263 def testPushPullSerialized(self):
262 def testPushPullSerialized(self):
264 objs = [10,"hi there",1.2342354,{"p":(1,2)}]
263 objs = [10,"hi there",1.2342354,{"p":(1,2)}]
265 d = defer.succeed(None)
264 d = defer.succeed(None)
266 for o in objs:
265 for o in objs:
267 self.engine.push_serialized(dict(key=newserialized.serialize(o)))
266 self.engine.push_serialized(dict(key=newserialized.serialize(o)))
268 value = self.engine.pull_serialized('key')
267 value = self.engine.pull_serialized('key')
269 value.addCallback(lambda serial: newserialized.IUnSerialized(serial).getObject())
268 value.addCallback(lambda serial: newserialized.IUnSerialized(serial).getObject())
270 d = self.assertDeferredEquals(value,o,d)
269 d = self.assertDeferredEquals(value,o,d)
271 return d
270 return d
272
271
273 def testPullSerializedFailures(self):
272 def testPullSerializedFailures(self):
274 d = self.engine.pull_serialized('a')
273 d = self.engine.pull_serialized('a')
275 d.addErrback(lambda f: self.assertRaises(NameError, f.raiseException))
274 d.addErrback(lambda f: self.assertRaises(NameError, f.raiseException))
276 d.addCallback(lambda _: self.engine.execute('l = lambda x: x'))
275 d.addCallback(lambda _: self.engine.execute('l = lambda x: x'))
277 d.addCallback(lambda _: self.engine.pull_serialized('l'))
276 d.addCallback(lambda _: self.engine.pull_serialized('l'))
278 d.addErrback(lambda f: self.assertRaises(pickle.PicklingError, f.raiseException))
277 d.addErrback(lambda f: self.assertRaises(pickle.PicklingError, f.raiseException))
279 return d
278 return d
280
279
281 Parametric(IEngineSerializedTestCase)
280 Parametric(IEngineSerializedTestCase)
282
281
283 class IEngineQueuedTestCase(object):
282 class IEngineQueuedTestCase(object):
284 """Test an IEngineQueued implementer."""
283 """Test an IEngineQueued implementer."""
285
284
286 def testIEngineQueuedInterface(self):
285 def testIEngineQueuedInterface(self):
287 """Does self.engine claim to implement IEngineQueued?"""
286 """Does self.engine claim to implement IEngineQueued?"""
288 self.assert_(es.IEngineQueued.providedBy(self.engine))
287 self.assert_(es.IEngineQueued.providedBy(self.engine))
289
288
290 def testIEngineQueuedInterfaceMethods(self):
289 def testIEngineQueuedInterfaceMethods(self):
291 """Does self.engine have the methods and attributes in IEngireQueued."""
290 """Does self.engine have the methods and attributes in IEngireQueued."""
292 for m in list(es.IEngineQueued):
291 for m in list(es.IEngineQueued):
293 self.assert_(hasattr(self.engine, m))
292 self.assert_(hasattr(self.engine, m))
294
293
295 def testIEngineQueuedDeferreds(self):
294 def testIEngineQueuedDeferreds(self):
296 dList = []
295 dList = []
297 d = self.engine.clear_queue()
296 d = self.engine.clear_queue()
298 self.assert_(isinstance(d, defer.Deferred))
297 self.assert_(isinstance(d, defer.Deferred))
299 dList.append(d)
298 dList.append(d)
300 d = self.engine.queue_status()
299 d = self.engine.queue_status()
301 self.assert_(isinstance(d, defer.Deferred))
300 self.assert_(isinstance(d, defer.Deferred))
302 dList.append(d)
301 dList.append(d)
303 D = defer.DeferredList(dList)
302 D = defer.DeferredList(dList)
304 return D
303 return D
305
304
306 def testClearQueue(self):
305 def testClearQueue(self):
307 result = self.engine.clear_queue()
306 result = self.engine.clear_queue()
308 d1 = self.assertDeferredEquals(result, None)
307 d1 = self.assertDeferredEquals(result, None)
309 d1.addCallback(lambda _: self.engine.queue_status())
308 d1.addCallback(lambda _: self.engine.queue_status())
310 d2 = self.assertDeferredEquals(d1, {'queue':[], 'pending':'None'})
309 d2 = self.assertDeferredEquals(d1, {'queue':[], 'pending':'None'})
311 return d2
310 return d2
312
311
313 def testQueueStatus(self):
312 def testQueueStatus(self):
314 result = self.engine.queue_status()
313 result = self.engine.queue_status()
315 result.addCallback(lambda r: 'queue' in r and 'pending' in r)
314 result.addCallback(lambda r: 'queue' in r and 'pending' in r)
316 d = self.assertDeferredEquals(result, True)
315 d = self.assertDeferredEquals(result, True)
317 return d
316 return d
318
317
319 Parametric(IEngineQueuedTestCase)
318 Parametric(IEngineQueuedTestCase)
320
319
321 class IEnginePropertiesTestCase(object):
320 class IEnginePropertiesTestCase(object):
322 """Test an IEngineProperties implementor."""
321 """Test an IEngineProperties implementor."""
323
322
324 def testIEnginePropertiesInterface(self):
323 def testIEnginePropertiesInterface(self):
325 """Does self.engine claim to implement IEngineProperties?"""
324 """Does self.engine claim to implement IEngineProperties?"""
326 self.assert_(es.IEngineProperties.providedBy(self.engine))
325 self.assert_(es.IEngineProperties.providedBy(self.engine))
327
326
328 def testIEnginePropertiesInterfaceMethods(self):
327 def testIEnginePropertiesInterfaceMethods(self):
329 """Does self.engine have the methods and attributes in IEngireProperties."""
328 """Does self.engine have the methods and attributes in IEngireProperties."""
330 for m in list(es.IEngineProperties):
329 for m in list(es.IEngineProperties):
331 self.assert_(hasattr(self.engine, m))
330 self.assert_(hasattr(self.engine, m))
332
331
333 def testGetSetProperties(self):
332 def testGetSetProperties(self):
334 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
333 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
335 d = self.engine.set_properties(dikt)
334 d = self.engine.set_properties(dikt)
336 d.addCallback(lambda r: self.engine.get_properties())
335 d.addCallback(lambda r: self.engine.get_properties())
337 d = self.assertDeferredEquals(d, dikt)
336 d = self.assertDeferredEquals(d, dikt)
338 d.addCallback(lambda r: self.engine.get_properties(('c',)))
337 d.addCallback(lambda r: self.engine.get_properties(('c',)))
339 d = self.assertDeferredEquals(d, {'c': dikt['c']})
338 d = self.assertDeferredEquals(d, {'c': dikt['c']})
340 d.addCallback(lambda r: self.engine.set_properties(dict(c=False)))
339 d.addCallback(lambda r: self.engine.set_properties(dict(c=False)))
341 d.addCallback(lambda r: self.engine.get_properties(('c', 'd')))
340 d.addCallback(lambda r: self.engine.get_properties(('c', 'd')))
342 d = self.assertDeferredEquals(d, dict(c=False, d=None))
341 d = self.assertDeferredEquals(d, dict(c=False, d=None))
343 return d
342 return d
344
343
345 def testClearProperties(self):
344 def testClearProperties(self):
346 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
345 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
347 d = self.engine.set_properties(dikt)
346 d = self.engine.set_properties(dikt)
348 d.addCallback(lambda r: self.engine.clear_properties())
347 d.addCallback(lambda r: self.engine.clear_properties())
349 d.addCallback(lambda r: self.engine.get_properties())
348 d.addCallback(lambda r: self.engine.get_properties())
350 d = self.assertDeferredEquals(d, {})
349 d = self.assertDeferredEquals(d, {})
351 return d
350 return d
352
351
353 def testDelHasProperties(self):
352 def testDelHasProperties(self):
354 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
353 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
355 d = self.engine.set_properties(dikt)
354 d = self.engine.set_properties(dikt)
356 d.addCallback(lambda r: self.engine.del_properties(('b','e')))
355 d.addCallback(lambda r: self.engine.del_properties(('b','e')))
357 d.addCallback(lambda r: self.engine.has_properties(('a','b','c','d','e')))
356 d.addCallback(lambda r: self.engine.has_properties(('a','b','c','d','e')))
358 d = self.assertDeferredEquals(d, [True, False, True, True, False])
357 d = self.assertDeferredEquals(d, [True, False, True, True, False])
359 return d
358 return d
360
359
361 def testStrictDict(self):
360 def testStrictDict(self):
362 s = """from IPython.kernel.engineservice import get_engine
361 s = """from IPython.kernel.engineservice import get_engine
363 p = get_engine(%s).properties"""%self.engine.id
362 p = get_engine(%s).properties"""%self.engine.id
364 d = self.engine.execute(s)
363 d = self.engine.execute(s)
365 d.addCallback(lambda r: self.engine.execute("p['a'] = lambda _:None"))
364 d.addCallback(lambda r: self.engine.execute("p['a'] = lambda _:None"))
366 d = self.assertDeferredRaises(d, error.InvalidProperty)
365 d = self.assertDeferredRaises(d, error.InvalidProperty)
367 d.addCallback(lambda r: self.engine.execute("p['a'] = range(5)"))
366 d.addCallback(lambda r: self.engine.execute("p['a'] = range(5)"))
368 d.addCallback(lambda r: self.engine.execute("p['a'].append(5)"))
367 d.addCallback(lambda r: self.engine.execute("p['a'].append(5)"))
369 d.addCallback(lambda r: self.engine.get_properties('a'))
368 d.addCallback(lambda r: self.engine.get_properties('a'))
370 d = self.assertDeferredEquals(d, dict(a=range(5)))
369 d = self.assertDeferredEquals(d, dict(a=range(5)))
371 return d
370 return d
372
371
373 Parametric(IEnginePropertiesTestCase)
372 Parametric(IEnginePropertiesTestCase)
@@ -1,838 +1,828 b''
1 # encoding: utf-8
1 # encoding: utf-8
2
2
3 """"""
3 """"""
4
4
5 __docformat__ = "restructuredtext en"
5 __docformat__ = "restructuredtext en"
6
6
7 #-------------------------------------------------------------------------------
7 #-------------------------------------------------------------------------------
8 # Copyright (C) 2008 The IPython Development Team
8 # Copyright (C) 2008 The IPython Development Team
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #-------------------------------------------------------------------------------
12 #-------------------------------------------------------------------------------
13
13
14 #-------------------------------------------------------------------------------
14 #-------------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #-------------------------------------------------------------------------------
16 #-------------------------------------------------------------------------------
17
17
18 from twisted.internet import defer
18 from twisted.internet import defer
19
19
20 from IPython.kernel import engineservice as es
20 from IPython.kernel import engineservice as es
21 from IPython.kernel import multiengine as me
21 from IPython.kernel import multiengine as me
22 from IPython.kernel import newserialized
22 from IPython.kernel import newserialized
23 from IPython.kernel.error import NotDefined
23 from IPython.kernel.error import NotDefined
24 from IPython.testing import util
24 from IPython.testing import util
25 from IPython.testing.parametric import parametric, Parametric
25 from IPython.testing.parametric import parametric, Parametric
26 from IPython.kernel import newserialized
26 from IPython.kernel import newserialized
27 from IPython.kernel.util import printer
27 from IPython.kernel.util import printer
28 from IPython.kernel.error import (InvalidEngineID,
28 from IPython.kernel.error import (InvalidEngineID,
29 NoEnginesRegistered,
29 NoEnginesRegistered,
30 CompositeError,
30 CompositeError,
31 InvalidDeferredID)
31 InvalidDeferredID)
32 from IPython.kernel.tests.engineservicetest import validCommands, invalidCommands
32 from IPython.kernel.tests.engineservicetest import validCommands, invalidCommands
33 from IPython.kernel.core.interpreter import Interpreter
33 from IPython.kernel.core.interpreter import Interpreter
34
34
35
35
36 #-------------------------------------------------------------------------------
36 #-------------------------------------------------------------------------------
37 # Base classes and utilities
37 # Base classes and utilities
38 #-------------------------------------------------------------------------------
38 #-------------------------------------------------------------------------------
39
39
40 class IMultiEngineBaseTestCase(object):
40 class IMultiEngineBaseTestCase(object):
41 """Basic utilities for working with multiengine tests.
41 """Basic utilities for working with multiengine tests.
42
42
43 Some subclass should define:
43 Some subclass should define:
44
44
45 * self.multiengine
45 * self.multiengine
46 * self.engines to keep track of engines for clean up"""
46 * self.engines to keep track of engines for clean up"""
47
47
48 def createShell(self):
48 def createShell(self):
49 return Interpreter()
49 return Interpreter()
50
50
51 def addEngine(self, n=1):
51 def addEngine(self, n=1):
52 for i in range(n):
52 for i in range(n):
53 e = es.EngineService()
53 e = es.EngineService()
54 e.startService()
54 e.startService()
55 regDict = self.controller.register_engine(es.QueuedEngine(e), None)
55 regDict = self.controller.register_engine(es.QueuedEngine(e), None)
56 e.id = regDict['id']
56 e.id = regDict['id']
57 self.engines.append(e)
57 self.engines.append(e)
58
58
59
59
60 def testf(x):
60 def testf(x):
61 return 2.0*x
61 return 2.0*x
62
62
63
63
64 globala = 99
64 globala = 99
65
65
66
66
67 def testg(x):
67 def testg(x):
68 return globala*x
68 return globala*x
69
69
70
70
71 def isdid(did):
71 def isdid(did):
72 if not isinstance(did, str):
72 if not isinstance(did, str):
73 return False
73 return False
74 if not len(did)==40:
74 if not len(did)==40:
75 return False
75 return False
76 return True
76 return True
77
77
78
78
79 def _raise_it(f):
79 def _raise_it(f):
80 try:
80 try:
81 f.raiseException()
81 f.raiseException()
82 except CompositeError, e:
82 except CompositeError, e:
83 e.raise_exception()
83 e.raise_exception()
84
84
85 #-------------------------------------------------------------------------------
85 #-------------------------------------------------------------------------------
86 # IMultiEngineTestCase
86 # IMultiEngineTestCase
87 #-------------------------------------------------------------------------------
87 #-------------------------------------------------------------------------------
88
88
89 class IMultiEngineTestCase(IMultiEngineBaseTestCase):
89 class IMultiEngineTestCase(IMultiEngineBaseTestCase):
90 """A test for any object that implements IEngineMultiplexer.
90 """A test for any object that implements IEngineMultiplexer.
91
91
92 self.multiengine must be defined and implement IEngineMultiplexer.
92 self.multiengine must be defined and implement IEngineMultiplexer.
93 """
93 """
94
94
95 def testIMultiEngineInterface(self):
95 def testIMultiEngineInterface(self):
96 """Does self.engine claim to implement IEngineCore?"""
96 """Does self.engine claim to implement IEngineCore?"""
97 self.assert_(me.IEngineMultiplexer.providedBy(self.multiengine))
97 self.assert_(me.IEngineMultiplexer.providedBy(self.multiengine))
98 self.assert_(me.IMultiEngine.providedBy(self.multiengine))
98 self.assert_(me.IMultiEngine.providedBy(self.multiengine))
99
99
100 def testIEngineMultiplexerInterfaceMethods(self):
100 def testIEngineMultiplexerInterfaceMethods(self):
101 """Does self.engine have the methods and attributes in IEngineCore."""
101 """Does self.engine have the methods and attributes in IEngineCore."""
102 for m in list(me.IEngineMultiplexer):
102 for m in list(me.IEngineMultiplexer):
103 self.assert_(hasattr(self.multiengine, m))
103 self.assert_(hasattr(self.multiengine, m))
104
104
105 def testIEngineMultiplexerDeferreds(self):
105 def testIEngineMultiplexerDeferreds(self):
106 self.addEngine(1)
106 self.addEngine(1)
107 d= self.multiengine.execute('a=5', targets=0)
107 d= self.multiengine.execute('a=5', targets=0)
108 d.addCallback(lambda _: self.multiengine.push(dict(a=5),targets=0))
108 d.addCallback(lambda _: self.multiengine.push(dict(a=5),targets=0))
109 d.addCallback(lambda _: self.multiengine.push(dict(a=5, b='asdf', c=[1,2,3]),targets=0))
109 d.addCallback(lambda _: self.multiengine.push(dict(a=5, b='asdf', c=[1,2,3]),targets=0))
110 d.addCallback(lambda _: self.multiengine.pull(('a','b','c'),targets=0))
110 d.addCallback(lambda _: self.multiengine.pull(('a','b','c'),targets=0))
111 d.addCallback(lambda _: self.multiengine.get_result(targets=0))
111 d.addCallback(lambda _: self.multiengine.get_result(targets=0))
112 d.addCallback(lambda _: self.multiengine.reset(targets=0))
112 d.addCallback(lambda _: self.multiengine.reset(targets=0))
113 d.addCallback(lambda _: self.multiengine.keys(targets=0))
113 d.addCallback(lambda _: self.multiengine.keys(targets=0))
114 d.addCallback(lambda _: self.multiengine.push_serialized(dict(a=newserialized.serialize(10)),targets=0))
114 d.addCallback(lambda _: self.multiengine.push_serialized(dict(a=newserialized.serialize(10)),targets=0))
115 d.addCallback(lambda _: self.multiengine.pull_serialized('a',targets=0))
115 d.addCallback(lambda _: self.multiengine.pull_serialized('a',targets=0))
116 d.addCallback(lambda _: self.multiengine.clear_queue(targets=0))
116 d.addCallback(lambda _: self.multiengine.clear_queue(targets=0))
117 d.addCallback(lambda _: self.multiengine.queue_status(targets=0))
117 d.addCallback(lambda _: self.multiengine.queue_status(targets=0))
118 return d
118 return d
119
119
120 def testInvalidEngineID(self):
120 def testInvalidEngineID(self):
121 self.addEngine(1)
121 self.addEngine(1)
122 badID = 100
122 badID = 100
123 d = self.multiengine.execute('a=5', targets=badID)
123 d = self.multiengine.execute('a=5', targets=badID)
124 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
124 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
125 d.addCallback(lambda _: self.multiengine.push(dict(a=5), targets=badID))
125 d.addCallback(lambda _: self.multiengine.push(dict(a=5), targets=badID))
126 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
126 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
127 d.addCallback(lambda _: self.multiengine.pull('a', targets=badID))
127 d.addCallback(lambda _: self.multiengine.pull('a', targets=badID))
128 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
128 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
129 d.addCallback(lambda _: self.multiengine.reset(targets=badID))
129 d.addCallback(lambda _: self.multiengine.reset(targets=badID))
130 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
130 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
131 d.addCallback(lambda _: self.multiengine.keys(targets=badID))
131 d.addCallback(lambda _: self.multiengine.keys(targets=badID))
132 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
132 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
133 d.addCallback(lambda _: self.multiengine.push_serialized(dict(a=newserialized.serialize(10)), targets=badID))
133 d.addCallback(lambda _: self.multiengine.push_serialized(dict(a=newserialized.serialize(10)), targets=badID))
134 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
134 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
135 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=badID))
135 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=badID))
136 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
136 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
137 d.addCallback(lambda _: self.multiengine.queue_status(targets=badID))
137 d.addCallback(lambda _: self.multiengine.queue_status(targets=badID))
138 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
138 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
139 return d
139 return d
140
140
141 def testNoEnginesRegistered(self):
141 def testNoEnginesRegistered(self):
142 badID = 'all'
142 badID = 'all'
143 d= self.multiengine.execute('a=5', targets=badID)
143 d= self.multiengine.execute('a=5', targets=badID)
144 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
144 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
145 d.addCallback(lambda _: self.multiengine.push(dict(a=5), targets=badID))
145 d.addCallback(lambda _: self.multiengine.push(dict(a=5), targets=badID))
146 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
146 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
147 d.addCallback(lambda _: self.multiengine.pull('a', targets=badID))
147 d.addCallback(lambda _: self.multiengine.pull('a', targets=badID))
148 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
148 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
149 d.addCallback(lambda _: self.multiengine.get_result(targets=badID))
149 d.addCallback(lambda _: self.multiengine.get_result(targets=badID))
150 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
150 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
151 d.addCallback(lambda _: self.multiengine.reset(targets=badID))
151 d.addCallback(lambda _: self.multiengine.reset(targets=badID))
152 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
152 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
153 d.addCallback(lambda _: self.multiengine.keys(targets=badID))
153 d.addCallback(lambda _: self.multiengine.keys(targets=badID))
154 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
154 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
155 d.addCallback(lambda _: self.multiengine.push_serialized(dict(a=newserialized.serialize(10)), targets=badID))
155 d.addCallback(lambda _: self.multiengine.push_serialized(dict(a=newserialized.serialize(10)), targets=badID))
156 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
156 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
157 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=badID))
157 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=badID))
158 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
158 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
159 d.addCallback(lambda _: self.multiengine.queue_status(targets=badID))
159 d.addCallback(lambda _: self.multiengine.queue_status(targets=badID))
160 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
160 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
161 return d
161 return d
162
162
163 def runExecuteAll(self, d, cmd, shell):
163 def runExecuteAll(self, d, cmd, shell):
164 actual = shell.execute(cmd)
164 actual = shell.execute(cmd)
165 d.addCallback(lambda _: self.multiengine.execute(cmd))
165 d.addCallback(lambda _: self.multiengine.execute(cmd))
166 def compare(result):
166 def compare(result):
167 for r in result:
167 for r in result:
168 actual['id'] = r['id']
168 actual['id'] = r['id']
169 self.assertEquals(r, actual)
169 self.assertEquals(r, actual)
170 d.addCallback(compare)
170 d.addCallback(compare)
171
171
172 def testExecuteAll(self):
172 def testExecuteAll(self):
173 self.addEngine(4)
173 self.addEngine(4)
174 d= defer.Deferred()
174 d= defer.Deferred()
175 shell = Interpreter()
175 shell = Interpreter()
176 for cmd in validCommands:
176 for cmd in validCommands:
177 self.runExecuteAll(d, cmd, shell)
177 self.runExecuteAll(d, cmd, shell)
178 d.callback(None)
178 d.callback(None)
179 return d
179 return d
180
180
181 # The following two methods show how to do parametrized
181 # The following two methods show how to do parametrized
182 # tests. This is really slick! Same is used above.
182 # tests. This is really slick! Same is used above.
183 def runExecuteFailures(self, cmd, exc):
183 def runExecuteFailures(self, cmd, exc):
184 self.addEngine(4)
184 self.addEngine(4)
185 d= self.multiengine.execute(cmd)
185 d= self.multiengine.execute(cmd)
186 d.addErrback(lambda f: self.assertRaises(exc, _raise_it, f))
186 d.addErrback(lambda f: self.assertRaises(exc, _raise_it, f))
187 return d
187 return d
188
188
189 @parametric
189 @parametric
190 def testExecuteFailures(cls):
190 def testExecuteFailures(cls):
191 return [(cls.runExecuteFailures,cmd,exc) for
191 return [(cls.runExecuteFailures,cmd,exc) for
192 cmd,exc in invalidCommands]
192 cmd,exc in invalidCommands]
193
193
194 def testPushPull(self):
194 def testPushPull(self):
195 self.addEngine(1)
195 self.addEngine(1)
196 objs = [10,"hi there",1.2342354,{"p":(1,2)}]
196 objs = [10,"hi there",1.2342354,{"p":(1,2)}]
197 d= self.multiengine.push(dict(key=objs[0]), targets=0)
197 d= self.multiengine.push(dict(key=objs[0]), targets=0)
198 d.addCallback(lambda _: self.multiengine.pull('key', targets=0))
198 d.addCallback(lambda _: self.multiengine.pull('key', targets=0))
199 d.addCallback(lambda r: self.assertEquals(r, [objs[0]]))
199 d.addCallback(lambda r: self.assertEquals(r, [objs[0]]))
200 d.addCallback(lambda _: self.multiengine.push(dict(key=objs[1]), targets=0))
200 d.addCallback(lambda _: self.multiengine.push(dict(key=objs[1]), targets=0))
201 d.addCallback(lambda _: self.multiengine.pull('key', targets=0))
201 d.addCallback(lambda _: self.multiengine.pull('key', targets=0))
202 d.addCallback(lambda r: self.assertEquals(r, [objs[1]]))
202 d.addCallback(lambda r: self.assertEquals(r, [objs[1]]))
203 d.addCallback(lambda _: self.multiengine.push(dict(key=objs[2]), targets=0))
203 d.addCallback(lambda _: self.multiengine.push(dict(key=objs[2]), targets=0))
204 d.addCallback(lambda _: self.multiengine.pull('key', targets=0))
204 d.addCallback(lambda _: self.multiengine.pull('key', targets=0))
205 d.addCallback(lambda r: self.assertEquals(r, [objs[2]]))
205 d.addCallback(lambda r: self.assertEquals(r, [objs[2]]))
206 d.addCallback(lambda _: self.multiengine.push(dict(key=objs[3]), targets=0))
206 d.addCallback(lambda _: self.multiengine.push(dict(key=objs[3]), targets=0))
207 d.addCallback(lambda _: self.multiengine.pull('key', targets=0))
207 d.addCallback(lambda _: self.multiengine.pull('key', targets=0))
208 d.addCallback(lambda r: self.assertEquals(r, [objs[3]]))
208 d.addCallback(lambda r: self.assertEquals(r, [objs[3]]))
209 d.addCallback(lambda _: self.multiengine.reset(targets=0))
209 d.addCallback(lambda _: self.multiengine.reset(targets=0))
210 d.addCallback(lambda _: self.multiengine.pull('a', targets=0))
210 d.addCallback(lambda _: self.multiengine.pull('a', targets=0))
211 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
211 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
212 d.addCallback(lambda _: self.multiengine.push(dict(a=10,b=20)))
212 d.addCallback(lambda _: self.multiengine.push(dict(a=10,b=20)))
213 d.addCallback(lambda _: self.multiengine.pull(('a','b')))
213 d.addCallback(lambda _: self.multiengine.pull(('a','b')))
214 d.addCallback(lambda r: self.assertEquals(r, [[10,20]]))
214 d.addCallback(lambda r: self.assertEquals(r, [[10,20]]))
215 return d
215 return d
216
216
217 def testPushPullAll(self):
217 def testPushPullAll(self):
218 self.addEngine(4)
218 self.addEngine(4)
219 d= self.multiengine.push(dict(a=10))
219 d= self.multiengine.push(dict(a=10))
220 d.addCallback(lambda _: self.multiengine.pull('a'))
220 d.addCallback(lambda _: self.multiengine.pull('a'))
221 d.addCallback(lambda r: self.assert_(r==[10,10,10,10]))
221 d.addCallback(lambda r: self.assert_(r==[10,10,10,10]))
222 d.addCallback(lambda _: self.multiengine.push(dict(a=10, b=20)))
222 d.addCallback(lambda _: self.multiengine.push(dict(a=10, b=20)))
223 d.addCallback(lambda _: self.multiengine.pull(('a','b')))
223 d.addCallback(lambda _: self.multiengine.pull(('a','b')))
224 d.addCallback(lambda r: self.assert_(r==4*[[10,20]]))
224 d.addCallback(lambda r: self.assert_(r==4*[[10,20]]))
225 d.addCallback(lambda _: self.multiengine.push(dict(a=10, b=20), targets=0))
225 d.addCallback(lambda _: self.multiengine.push(dict(a=10, b=20), targets=0))
226 d.addCallback(lambda _: self.multiengine.pull(('a','b'), targets=0))
226 d.addCallback(lambda _: self.multiengine.pull(('a','b'), targets=0))
227 d.addCallback(lambda r: self.assert_(r==[[10,20]]))
227 d.addCallback(lambda r: self.assert_(r==[[10,20]]))
228 d.addCallback(lambda _: self.multiengine.push(dict(a=None, b=None), targets=0))
228 d.addCallback(lambda _: self.multiengine.push(dict(a=None, b=None), targets=0))
229 d.addCallback(lambda _: self.multiengine.pull(('a','b'), targets=0))
229 d.addCallback(lambda _: self.multiengine.pull(('a','b'), targets=0))
230 d.addCallback(lambda r: self.assert_(r==[[None,None]]))
230 d.addCallback(lambda r: self.assert_(r==[[None,None]]))
231 return d
231 return d
232
232
233 def testPushPullSerialized(self):
233 def testPushPullSerialized(self):
234 self.addEngine(1)
234 self.addEngine(1)
235 objs = [10,"hi there",1.2342354,{"p":(1,2)}]
235 objs = [10,"hi there",1.2342354,{"p":(1,2)}]
236 d= self.multiengine.push_serialized(dict(key=newserialized.serialize(objs[0])), targets=0)
236 d= self.multiengine.push_serialized(dict(key=newserialized.serialize(objs[0])), targets=0)
237 d.addCallback(lambda _: self.multiengine.pull_serialized('key', targets=0))
237 d.addCallback(lambda _: self.multiengine.pull_serialized('key', targets=0))
238 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
238 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
239 d.addCallback(lambda r: self.assertEquals(r, objs[0]))
239 d.addCallback(lambda r: self.assertEquals(r, objs[0]))
240 d.addCallback(lambda _: self.multiengine.push_serialized(dict(key=newserialized.serialize(objs[1])), targets=0))
240 d.addCallback(lambda _: self.multiengine.push_serialized(dict(key=newserialized.serialize(objs[1])), targets=0))
241 d.addCallback(lambda _: self.multiengine.pull_serialized('key', targets=0))
241 d.addCallback(lambda _: self.multiengine.pull_serialized('key', targets=0))
242 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
242 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
243 d.addCallback(lambda r: self.assertEquals(r, objs[1]))
243 d.addCallback(lambda r: self.assertEquals(r, objs[1]))
244 d.addCallback(lambda _: self.multiengine.push_serialized(dict(key=newserialized.serialize(objs[2])), targets=0))
244 d.addCallback(lambda _: self.multiengine.push_serialized(dict(key=newserialized.serialize(objs[2])), targets=0))
245 d.addCallback(lambda _: self.multiengine.pull_serialized('key', targets=0))
245 d.addCallback(lambda _: self.multiengine.pull_serialized('key', targets=0))
246 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
246 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
247 d.addCallback(lambda r: self.assertEquals(r, objs[2]))
247 d.addCallback(lambda r: self.assertEquals(r, objs[2]))
248 d.addCallback(lambda _: self.multiengine.push_serialized(dict(key=newserialized.serialize(objs[3])), targets=0))
248 d.addCallback(lambda _: self.multiengine.push_serialized(dict(key=newserialized.serialize(objs[3])), targets=0))
249 d.addCallback(lambda _: self.multiengine.pull_serialized('key', targets=0))
249 d.addCallback(lambda _: self.multiengine.pull_serialized('key', targets=0))
250 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
250 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
251 d.addCallback(lambda r: self.assertEquals(r, objs[3]))
251 d.addCallback(lambda r: self.assertEquals(r, objs[3]))
252 d.addCallback(lambda _: self.multiengine.push(dict(a=10,b=range(5)), targets=0))
252 d.addCallback(lambda _: self.multiengine.push(dict(a=10,b=range(5)), targets=0))
253 d.addCallback(lambda _: self.multiengine.pull_serialized(('a','b'), targets=0))
253 d.addCallback(lambda _: self.multiengine.pull_serialized(('a','b'), targets=0))
254 d.addCallback(lambda serial: [newserialized.IUnSerialized(s).getObject() for s in serial[0]])
254 d.addCallback(lambda serial: [newserialized.IUnSerialized(s).getObject() for s in serial[0]])
255 d.addCallback(lambda r: self.assertEquals(r, [10, range(5)]))
255 d.addCallback(lambda r: self.assertEquals(r, [10, range(5)]))
256 d.addCallback(lambda _: self.multiengine.reset(targets=0))
256 d.addCallback(lambda _: self.multiengine.reset(targets=0))
257 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0))
257 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0))
258 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
258 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
259 return d
259 return d
260
260
261 objs = [10,"hi there",1.2342354,{"p":(1,2)}]
261 objs = [10,"hi there",1.2342354,{"p":(1,2)}]
262 d= defer.succeed(None)
262 d= defer.succeed(None)
263 for o in objs:
263 for o in objs:
264 self.multiengine.push_serialized(0, key=newserialized.serialize(o))
264 self.multiengine.push_serialized(0, key=newserialized.serialize(o))
265 value = self.multiengine.pull_serialized(0, 'key')
265 value = self.multiengine.pull_serialized(0, 'key')
266 value.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
266 value.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
267 d = self.assertDeferredEquals(value,o,d)
267 d = self.assertDeferredEquals(value,o,d)
268 return d
268 return d
269
269
270 def runGetResultAll(self, d, cmd, shell):
270 def runGetResultAll(self, d, cmd, shell):
271 actual = shell.execute(cmd)
271 actual = shell.execute(cmd)
272 d.addCallback(lambda _: self.multiengine.execute(cmd))
272 d.addCallback(lambda _: self.multiengine.execute(cmd))
273 d.addCallback(lambda _: self.multiengine.get_result())
273 d.addCallback(lambda _: self.multiengine.get_result())
274 def compare(result):
274 def compare(result):
275 for r in result:
275 for r in result:
276 actual['id'] = r['id']
276 actual['id'] = r['id']
277 self.assertEquals(r, actual)
277 self.assertEquals(r, actual)
278 d.addCallback(compare)
278 d.addCallback(compare)
279
279
280 def testGetResultAll(self):
280 def testGetResultAll(self):
281 self.addEngine(4)
281 self.addEngine(4)
282 d= defer.Deferred()
282 d= defer.Deferred()
283 shell = Interpreter()
283 shell = Interpreter()
284 for cmd in validCommands:
284 for cmd in validCommands:
285 self.runGetResultAll(d, cmd, shell)
285 self.runGetResultAll(d, cmd, shell)
286 d.callback(None)
286 d.callback(None)
287 return d
287 return d
288
288
289 def testGetResultDefault(self):
289 def testGetResultDefault(self):
290 self.addEngine(1)
290 self.addEngine(1)
291 target = 0
291 target = 0
292 cmd = 'a=5'
292 cmd = 'a=5'
293 shell = self.createShell()
293 shell = self.createShell()
294 shellResult = shell.execute(cmd)
294 shellResult = shell.execute(cmd)
295 def popit(dikt, key):
295 def popit(dikt, key):
296 dikt.pop(key)
296 dikt.pop(key)
297 return dikt
297 return dikt
298 d= self.multiengine.execute(cmd, targets=target)
298 d= self.multiengine.execute(cmd, targets=target)
299 d.addCallback(lambda _: self.multiengine.get_result(targets=target))
299 d.addCallback(lambda _: self.multiengine.get_result(targets=target))
300 d.addCallback(lambda r: self.assertEquals(shellResult, popit(r[0],'id')))
300 d.addCallback(lambda r: self.assertEquals(shellResult, popit(r[0],'id')))
301 return d
301 return d
302
302
303 def testGetResultFailure(self):
303 def testGetResultFailure(self):
304 self.addEngine(1)
304 self.addEngine(1)
305 d= self.multiengine.get_result(None, targets=0)
305 d= self.multiengine.get_result(None, targets=0)
306 d.addErrback(lambda f: self.assertRaises(IndexError, _raise_it, f))
306 d.addErrback(lambda f: self.assertRaises(IndexError, _raise_it, f))
307 d.addCallback(lambda _: self.multiengine.get_result(10, targets=0))
307 d.addCallback(lambda _: self.multiengine.get_result(10, targets=0))
308 d.addErrback(lambda f: self.assertRaises(IndexError, _raise_it, f))
308 d.addErrback(lambda f: self.assertRaises(IndexError, _raise_it, f))
309 return d
309 return d
310
310
311 def testPushFunction(self):
311 def testPushFunction(self):
312 self.addEngine(1)
312 self.addEngine(1)
313 d= self.multiengine.push_function(dict(f=testf), targets=0)
313 d= self.multiengine.push_function(dict(f=testf), targets=0)
314 d.addCallback(lambda _: self.multiengine.execute('result = f(10)', targets=0))
314 d.addCallback(lambda _: self.multiengine.execute('result = f(10)', targets=0))
315 d.addCallback(lambda _: self.multiengine.pull('result', targets=0))
315 d.addCallback(lambda _: self.multiengine.pull('result', targets=0))
316 d.addCallback(lambda r: self.assertEquals(r[0], testf(10)))
316 d.addCallback(lambda r: self.assertEquals(r[0], testf(10)))
317 d.addCallback(lambda _: self.multiengine.push(dict(globala=globala), targets=0))
317 d.addCallback(lambda _: self.multiengine.push(dict(globala=globala), targets=0))
318 d.addCallback(lambda _: self.multiengine.push_function(dict(g=testg), targets=0))
318 d.addCallback(lambda _: self.multiengine.push_function(dict(g=testg), targets=0))
319 d.addCallback(lambda _: self.multiengine.execute('result = g(10)', targets=0))
319 d.addCallback(lambda _: self.multiengine.execute('result = g(10)', targets=0))
320 d.addCallback(lambda _: self.multiengine.pull('result', targets=0))
320 d.addCallback(lambda _: self.multiengine.pull('result', targets=0))
321 d.addCallback(lambda r: self.assertEquals(r[0], testg(10)))
321 d.addCallback(lambda r: self.assertEquals(r[0], testg(10)))
322 return d
322 return d
323
323
324 def testPullFunction(self):
324 def testPullFunction(self):
325 self.addEngine(1)
325 self.addEngine(1)
326 d= self.multiengine.push(dict(a=globala), targets=0)
326 d= self.multiengine.push(dict(a=globala), targets=0)
327 d.addCallback(lambda _: self.multiengine.push_function(dict(f=testf), targets=0))
327 d.addCallback(lambda _: self.multiengine.push_function(dict(f=testf), targets=0))
328 d.addCallback(lambda _: self.multiengine.pull_function('f', targets=0))
328 d.addCallback(lambda _: self.multiengine.pull_function('f', targets=0))
329 d.addCallback(lambda r: self.assertEquals(r[0](10), testf(10)))
329 d.addCallback(lambda r: self.assertEquals(r[0](10), testf(10)))
330 d.addCallback(lambda _: self.multiengine.execute("def g(x): return x*x", targets=0))
330 d.addCallback(lambda _: self.multiengine.execute("def g(x): return x*x", targets=0))
331 d.addCallback(lambda _: self.multiengine.pull_function(('f','g'),targets=0))
331 d.addCallback(lambda _: self.multiengine.pull_function(('f','g'),targets=0))
332 d.addCallback(lambda r: self.assertEquals((r[0][0](10),r[0][1](10)), (testf(10), 100)))
332 d.addCallback(lambda r: self.assertEquals((r[0][0](10),r[0][1](10)), (testf(10), 100)))
333 return d
333 return d
334
334
335 def testPushFunctionAll(self):
335 def testPushFunctionAll(self):
336 self.addEngine(4)
336 self.addEngine(4)
337 d= self.multiengine.push_function(dict(f=testf))
337 d= self.multiengine.push_function(dict(f=testf))
338 d.addCallback(lambda _: self.multiengine.execute('result = f(10)'))
338 d.addCallback(lambda _: self.multiengine.execute('result = f(10)'))
339 d.addCallback(lambda _: self.multiengine.pull('result'))
339 d.addCallback(lambda _: self.multiengine.pull('result'))
340 d.addCallback(lambda r: self.assertEquals(r, 4*[testf(10)]))
340 d.addCallback(lambda r: self.assertEquals(r, 4*[testf(10)]))
341 d.addCallback(lambda _: self.multiengine.push(dict(globala=globala)))
341 d.addCallback(lambda _: self.multiengine.push(dict(globala=globala)))
342 d.addCallback(lambda _: self.multiengine.push_function(dict(testg=testg)))
342 d.addCallback(lambda _: self.multiengine.push_function(dict(testg=testg)))
343 d.addCallback(lambda _: self.multiengine.execute('result = testg(10)'))
343 d.addCallback(lambda _: self.multiengine.execute('result = testg(10)'))
344 d.addCallback(lambda _: self.multiengine.pull('result'))
344 d.addCallback(lambda _: self.multiengine.pull('result'))
345 d.addCallback(lambda r: self.assertEquals(r, 4*[testg(10)]))
345 d.addCallback(lambda r: self.assertEquals(r, 4*[testg(10)]))
346 return d
346 return d
347
347
348 def testPullFunctionAll(self):
348 def testPullFunctionAll(self):
349 self.addEngine(4)
349 self.addEngine(4)
350 d= self.multiengine.push_function(dict(f=testf))
350 d= self.multiengine.push_function(dict(f=testf))
351 d.addCallback(lambda _: self.multiengine.pull_function('f'))
351 d.addCallback(lambda _: self.multiengine.pull_function('f'))
352 d.addCallback(lambda r: self.assertEquals([func(10) for func in r], 4*[testf(10)]))
352 d.addCallback(lambda r: self.assertEquals([func(10) for func in r], 4*[testf(10)]))
353 return d
353 return d
354
354
355 def testGetIDs(self):
355 def testGetIDs(self):
356 self.addEngine(1)
356 self.addEngine(1)
357 d= self.multiengine.get_ids()
357 d= self.multiengine.get_ids()
358 d.addCallback(lambda r: self.assertEquals(r, [0]))
358 d.addCallback(lambda r: self.assertEquals(r, [0]))
359 d.addCallback(lambda _: self.addEngine(3))
359 d.addCallback(lambda _: self.addEngine(3))
360 d.addCallback(lambda _: self.multiengine.get_ids())
360 d.addCallback(lambda _: self.multiengine.get_ids())
361 d.addCallback(lambda r: self.assertEquals(r, [0,1,2,3]))
361 d.addCallback(lambda r: self.assertEquals(r, [0,1,2,3]))
362 return d
362 return d
363
363
364 def testClearQueue(self):
364 def testClearQueue(self):
365 self.addEngine(4)
365 self.addEngine(4)
366 d= self.multiengine.clear_queue()
366 d= self.multiengine.clear_queue()
367 d.addCallback(lambda r: self.assertEquals(r,4*[None]))
367 d.addCallback(lambda r: self.assertEquals(r,4*[None]))
368 return d
368 return d
369
369
370 def testQueueStatus(self):
370 def testQueueStatus(self):
371 self.addEngine(4)
371 self.addEngine(4)
372 d= self.multiengine.queue_status(targets=0)
372 d= self.multiengine.queue_status(targets=0)
373 d.addCallback(lambda r: self.assert_(isinstance(r[0],tuple)))
373 d.addCallback(lambda r: self.assert_(isinstance(r[0],tuple)))
374 return d
374 return d
375
375
376 def testGetSetProperties(self):
376 def testGetSetProperties(self):
377 self.addEngine(4)
377 self.addEngine(4)
378 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
378 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
379 d= self.multiengine.set_properties(dikt)
379 d= self.multiengine.set_properties(dikt)
380 d.addCallback(lambda r: self.multiengine.get_properties())
380 d.addCallback(lambda r: self.multiengine.get_properties())
381 d.addCallback(lambda r: self.assertEquals(r, 4*[dikt]))
381 d.addCallback(lambda r: self.assertEquals(r, 4*[dikt]))
382 d.addCallback(lambda r: self.multiengine.get_properties(('c',)))
382 d.addCallback(lambda r: self.multiengine.get_properties(('c',)))
383 d.addCallback(lambda r: self.assertEquals(r, 4*[{'c': dikt['c']}]))
383 d.addCallback(lambda r: self.assertEquals(r, 4*[{'c': dikt['c']}]))
384 d.addCallback(lambda r: self.multiengine.set_properties(dict(c=False)))
384 d.addCallback(lambda r: self.multiengine.set_properties(dict(c=False)))
385 d.addCallback(lambda r: self.multiengine.get_properties(('c', 'd')))
385 d.addCallback(lambda r: self.multiengine.get_properties(('c', 'd')))
386 d.addCallback(lambda r: self.assertEquals(r, 4*[dict(c=False, d=None)]))
386 d.addCallback(lambda r: self.assertEquals(r, 4*[dict(c=False, d=None)]))
387 return d
387 return d
388
388
389 def testClearProperties(self):
389 def testClearProperties(self):
390 self.addEngine(4)
390 self.addEngine(4)
391 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
391 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
392 d= self.multiengine.set_properties(dikt)
392 d= self.multiengine.set_properties(dikt)
393 d.addCallback(lambda r: self.multiengine.clear_properties())
393 d.addCallback(lambda r: self.multiengine.clear_properties())
394 d.addCallback(lambda r: self.multiengine.get_properties())
394 d.addCallback(lambda r: self.multiengine.get_properties())
395 d.addCallback(lambda r: self.assertEquals(r, 4*[{}]))
395 d.addCallback(lambda r: self.assertEquals(r, 4*[{}]))
396 return d
396 return d
397
397
398 def testDelHasProperties(self):
398 def testDelHasProperties(self):
399 self.addEngine(4)
399 self.addEngine(4)
400 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
400 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
401 d= self.multiengine.set_properties(dikt)
401 d= self.multiengine.set_properties(dikt)
402 d.addCallback(lambda r: self.multiengine.del_properties(('b','e')))
402 d.addCallback(lambda r: self.multiengine.del_properties(('b','e')))
403 d.addCallback(lambda r: self.multiengine.has_properties(('a','b','c','d','e')))
403 d.addCallback(lambda r: self.multiengine.has_properties(('a','b','c','d','e')))
404 d.addCallback(lambda r: self.assertEquals(r, 4*[[True, False, True, True, False]]))
404 d.addCallback(lambda r: self.assertEquals(r, 4*[[True, False, True, True, False]]))
405 return d
405 return d
406
406
407 Parametric(IMultiEngineTestCase)
407 Parametric(IMultiEngineTestCase)
408
408
409 #-------------------------------------------------------------------------------
409 #-------------------------------------------------------------------------------
410 # ISynchronousMultiEngineTestCase
410 # ISynchronousMultiEngineTestCase
411 #-------------------------------------------------------------------------------
411 #-------------------------------------------------------------------------------
412
412
413 class ISynchronousMultiEngineTestCase(IMultiEngineBaseTestCase):
413 class ISynchronousMultiEngineTestCase(IMultiEngineBaseTestCase):
414
414
415 def testISynchronousMultiEngineInterface(self):
415 def testISynchronousMultiEngineInterface(self):
416 """Does self.engine claim to implement IEngineCore?"""
416 """Does self.engine claim to implement IEngineCore?"""
417 self.assert_(me.ISynchronousEngineMultiplexer.providedBy(self.multiengine))
417 self.assert_(me.ISynchronousEngineMultiplexer.providedBy(self.multiengine))
418 self.assert_(me.ISynchronousMultiEngine.providedBy(self.multiengine))
418 self.assert_(me.ISynchronousMultiEngine.providedBy(self.multiengine))
419
419
420 def testExecute(self):
420 def testExecute(self):
421 self.addEngine(4)
421 self.addEngine(4)
422 execute = self.multiengine.execute
422 execute = self.multiengine.execute
423 d= execute('a=5', targets=0, block=True)
423 d= execute('a=5', targets=0, block=True)
424 d.addCallback(lambda r: self.assert_(len(r)==1))
424 d.addCallback(lambda r: self.assert_(len(r)==1))
425 d.addCallback(lambda _: execute('b=10'))
425 d.addCallback(lambda _: execute('b=10'))
426 d.addCallback(lambda r: self.assert_(len(r)==4))
426 d.addCallback(lambda r: self.assert_(len(r)==4))
427 d.addCallback(lambda _: execute('c=30', block=False))
427 d.addCallback(lambda _: execute('c=30', block=False))
428 d.addCallback(lambda did: self.assert_(isdid(did)))
428 d.addCallback(lambda did: self.assert_(isdid(did)))
429 d.addCallback(lambda _: execute('d=[0,1,2]', block=False))
429 d.addCallback(lambda _: execute('d=[0,1,2]', block=False))
430 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
430 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
431 d.addCallback(lambda r: self.assert_(len(r)==4))
431 d.addCallback(lambda r: self.assert_(len(r)==4))
432 return d
432 return d
433
433
434 def testPushPull(self):
434 def testPushPull(self):
435 data = dict(a=10, b=1.05, c=range(10), d={'e':(1,2),'f':'hi'})
435 data = dict(a=10, b=1.05, c=range(10), d={'e':(1,2),'f':'hi'})
436 self.addEngine(4)
436 self.addEngine(4)
437 push = self.multiengine.push
437 push = self.multiengine.push
438 pull = self.multiengine.pull
438 pull = self.multiengine.pull
439 d= push({'data':data}, targets=0)
439 d= push({'data':data}, targets=0)
440 d.addCallback(lambda r: pull('data', targets=0))
440 d.addCallback(lambda r: pull('data', targets=0))
441 d.addCallback(lambda r: self.assertEqual(r,[data]))
441 d.addCallback(lambda r: self.assertEqual(r,[data]))
442 d.addCallback(lambda _: push({'data':data}))
442 d.addCallback(lambda _: push({'data':data}))
443 d.addCallback(lambda r: pull('data'))
443 d.addCallback(lambda r: pull('data'))
444 d.addCallback(lambda r: self.assertEqual(r,4*[data]))
444 d.addCallback(lambda r: self.assertEqual(r,4*[data]))
445 d.addCallback(lambda _: push({'data':data}, block=False))
445 d.addCallback(lambda _: push({'data':data}, block=False))
446 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
446 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
447 d.addCallback(lambda _: pull('data', block=False))
447 d.addCallback(lambda _: pull('data', block=False))
448 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
448 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
449 d.addCallback(lambda r: self.assertEqual(r,4*[data]))
449 d.addCallback(lambda r: self.assertEqual(r,4*[data]))
450 d.addCallback(lambda _: push(dict(a=10,b=20)))
450 d.addCallback(lambda _: push(dict(a=10,b=20)))
451 d.addCallback(lambda _: pull(('a','b')))
451 d.addCallback(lambda _: pull(('a','b')))
452 d.addCallback(lambda r: self.assertEquals(r, 4*[[10,20]]))
452 d.addCallback(lambda r: self.assertEquals(r, 4*[[10,20]]))
453 return d
453 return d
454
454
455 def testPushPullFunction(self):
455 def testPushPullFunction(self):
456 self.addEngine(4)
456 self.addEngine(4)
457 pushf = self.multiengine.push_function
457 pushf = self.multiengine.push_function
458 pullf = self.multiengine.pull_function
458 pullf = self.multiengine.pull_function
459 push = self.multiengine.push
459 push = self.multiengine.push
460 pull = self.multiengine.pull
460 pull = self.multiengine.pull
461 execute = self.multiengine.execute
461 execute = self.multiengine.execute
462 d= pushf({'testf':testf}, targets=0)
462 d= pushf({'testf':testf}, targets=0)
463 d.addCallback(lambda r: pullf('testf', targets=0))
463 d.addCallback(lambda r: pullf('testf', targets=0))
464 d.addCallback(lambda r: self.assertEqual(r[0](1.0), testf(1.0)))
464 d.addCallback(lambda r: self.assertEqual(r[0](1.0), testf(1.0)))
465 d.addCallback(lambda _: execute('r = testf(10)', targets=0))
465 d.addCallback(lambda _: execute('r = testf(10)', targets=0))
466 d.addCallback(lambda _: pull('r', targets=0))
466 d.addCallback(lambda _: pull('r', targets=0))
467 d.addCallback(lambda r: self.assertEquals(r[0], testf(10)))
467 d.addCallback(lambda r: self.assertEquals(r[0], testf(10)))
468 d.addCallback(lambda _: pushf({'testf':testf}, block=False))
468 d.addCallback(lambda _: pushf({'testf':testf}, block=False))
469 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
469 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
470 d.addCallback(lambda _: pullf('testf', block=False))
470 d.addCallback(lambda _: pullf('testf', block=False))
471 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
471 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
472 d.addCallback(lambda r: self.assertEqual(r[0](1.0), testf(1.0)))
472 d.addCallback(lambda r: self.assertEqual(r[0](1.0), testf(1.0)))
473 d.addCallback(lambda _: execute("def g(x): return x*x", targets=0))
473 d.addCallback(lambda _: execute("def g(x): return x*x", targets=0))
474 d.addCallback(lambda _: pullf(('testf','g'),targets=0))
474 d.addCallback(lambda _: pullf(('testf','g'),targets=0))
475 d.addCallback(lambda r: self.assertEquals((r[0][0](10),r[0][1](10)), (testf(10), 100)))
475 d.addCallback(lambda r: self.assertEquals((r[0][0](10),r[0][1](10)), (testf(10), 100)))
476 return d
476 return d
477
477
478 def testGetResult(self):
478 def testGetResult(self):
479 shell = Interpreter()
479 shell = Interpreter()
480 result1 = shell.execute('a=10')
480 result1 = shell.execute('a=10')
481 result1['id'] = 0
481 result1['id'] = 0
482 result2 = shell.execute('b=20')
482 result2 = shell.execute('b=20')
483 result2['id'] = 0
483 result2['id'] = 0
484 execute= self.multiengine.execute
484 execute= self.multiengine.execute
485 get_result = self.multiengine.get_result
485 get_result = self.multiengine.get_result
486 self.addEngine(1)
486 self.addEngine(1)
487 d= execute('a=10')
487 d= execute('a=10')
488 d.addCallback(lambda _: get_result())
488 d.addCallback(lambda _: get_result())
489 d.addCallback(lambda r: self.assertEquals(r[0], result1))
489 d.addCallback(lambda r: self.assertEquals(r[0], result1))
490 d.addCallback(lambda _: execute('b=20'))
490 d.addCallback(lambda _: execute('b=20'))
491 d.addCallback(lambda _: get_result(1))
491 d.addCallback(lambda _: get_result(1))
492 d.addCallback(lambda r: self.assertEquals(r[0], result1))
492 d.addCallback(lambda r: self.assertEquals(r[0], result1))
493 d.addCallback(lambda _: get_result(2, block=False))
493 d.addCallback(lambda _: get_result(2, block=False))
494 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
494 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
495 d.addCallback(lambda r: self.assertEquals(r[0], result2))
495 d.addCallback(lambda r: self.assertEquals(r[0], result2))
496 return d
496 return d
497
497
498 def testResetAndKeys(self):
498 def testResetAndKeys(self):
499 self.addEngine(1)
499 self.addEngine(1)
500
500
501 #Blocking mode
501 #Blocking mode
502 d= self.multiengine.push(dict(a=10, b=20, c=range(10)), targets=0)
502 d= self.multiengine.push(dict(a=10, b=20, c=range(10)), targets=0)
503 d.addCallback(lambda _: self.multiengine.keys(targets=0))
503 d.addCallback(lambda _: self.multiengine.keys(targets=0))
504 def keys_found(keys):
504 def keys_found(keys):
505 self.assert_('a' in keys[0])
505 self.assert_('a' in keys[0])
506 self.assert_('b' in keys[0])
506 self.assert_('b' in keys[0])
507 self.assert_('b' in keys[0])
507 self.assert_('b' in keys[0])
508 d.addCallback(keys_found)
508 d.addCallback(keys_found)
509 d.addCallback(lambda _: self.multiengine.reset(targets=0))
509 d.addCallback(lambda _: self.multiengine.reset(targets=0))
510 d.addCallback(lambda _: self.multiengine.keys(targets=0))
510 d.addCallback(lambda _: self.multiengine.keys(targets=0))
511 def keys_not_found(keys):
511 def keys_not_found(keys):
512 self.assert_('a' not in keys[0])
512 self.assert_('a' not in keys[0])
513 self.assert_('b' not in keys[0])
513 self.assert_('b' not in keys[0])
514 self.assert_('b' not in keys[0])
514 self.assert_('b' not in keys[0])
515 d.addCallback(keys_not_found)
515 d.addCallback(keys_not_found)
516
516
517 #Non-blocking mode
517 #Non-blocking mode
518 d.addCallback(lambda _: self.multiengine.push(dict(a=10, b=20, c=range(10)), targets=0))
518 d.addCallback(lambda _: self.multiengine.push(dict(a=10, b=20, c=range(10)), targets=0))
519 d.addCallback(lambda _: self.multiengine.keys(targets=0, block=False))
519 d.addCallback(lambda _: self.multiengine.keys(targets=0, block=False))
520 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
520 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
521 def keys_found(keys):
521 def keys_found(keys):
522 self.assert_('a' in keys[0])
522 self.assert_('a' in keys[0])
523 self.assert_('b' in keys[0])
523 self.assert_('b' in keys[0])
524 self.assert_('b' in keys[0])
524 self.assert_('b' in keys[0])
525 d.addCallback(keys_found)
525 d.addCallback(keys_found)
526 d.addCallback(lambda _: self.multiengine.reset(targets=0, block=False))
526 d.addCallback(lambda _: self.multiengine.reset(targets=0, block=False))
527 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
527 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
528 d.addCallback(lambda _: self.multiengine.keys(targets=0, block=False))
528 d.addCallback(lambda _: self.multiengine.keys(targets=0, block=False))
529 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
529 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
530 def keys_not_found(keys):
530 def keys_not_found(keys):
531 self.assert_('a' not in keys[0])
531 self.assert_('a' not in keys[0])
532 self.assert_('b' not in keys[0])
532 self.assert_('b' not in keys[0])
533 self.assert_('b' not in keys[0])
533 self.assert_('b' not in keys[0])
534 d.addCallback(keys_not_found)
534 d.addCallback(keys_not_found)
535
535
536 return d
536 return d
537
537
538 def testPushPullSerialized(self):
538 def testPushPullSerialized(self):
539 self.addEngine(1)
539 self.addEngine(1)
540 dikt = dict(a=10,b='hi there',c=1.2345,d={'p':(1,2)})
540 dikt = dict(a=10,b='hi there',c=1.2345,d={'p':(1,2)})
541 sdikt = {}
541 sdikt = {}
542 for k,v in dikt.iteritems():
542 for k,v in dikt.iteritems():
543 sdikt[k] = newserialized.serialize(v)
543 sdikt[k] = newserialized.serialize(v)
544 d= self.multiengine.push_serialized(dict(a=sdikt['a']), targets=0)
544 d= self.multiengine.push_serialized(dict(a=sdikt['a']), targets=0)
545 d.addCallback(lambda _: self.multiengine.pull('a',targets=0))
545 d.addCallback(lambda _: self.multiengine.pull('a',targets=0))
546 d.addCallback(lambda r: self.assertEquals(r[0], dikt['a']))
546 d.addCallback(lambda r: self.assertEquals(r[0], dikt['a']))
547 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0))
547 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0))
548 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
548 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
549 d.addCallback(lambda r: self.assertEquals(r, dikt['a']))
549 d.addCallback(lambda r: self.assertEquals(r, dikt['a']))
550 d.addCallback(lambda _: self.multiengine.push_serialized(sdikt, targets=0))
550 d.addCallback(lambda _: self.multiengine.push_serialized(sdikt, targets=0))
551 d.addCallback(lambda _: self.multiengine.pull_serialized(sdikt.keys(), targets=0))
551 d.addCallback(lambda _: self.multiengine.pull_serialized(sdikt.keys(), targets=0))
552 d.addCallback(lambda serial: [newserialized.IUnSerialized(s).getObject() for s in serial[0]])
552 d.addCallback(lambda serial: [newserialized.IUnSerialized(s).getObject() for s in serial[0]])
553 d.addCallback(lambda r: self.assertEquals(r, dikt.values()))
553 d.addCallback(lambda r: self.assertEquals(r, dikt.values()))
554 d.addCallback(lambda _: self.multiengine.reset(targets=0))
554 d.addCallback(lambda _: self.multiengine.reset(targets=0))
555 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0))
555 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0))
556 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
556 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
557
557
558 #Non-blocking mode
558 #Non-blocking mode
559 d.addCallback(lambda r: self.multiengine.push_serialized(dict(a=sdikt['a']), targets=0, block=False))
559 d.addCallback(lambda r: self.multiengine.push_serialized(dict(a=sdikt['a']), targets=0, block=False))
560 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
560 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
561 d.addCallback(lambda _: self.multiengine.pull('a',targets=0))
561 d.addCallback(lambda _: self.multiengine.pull('a',targets=0))
562 d.addCallback(lambda r: self.assertEquals(r[0], dikt['a']))
562 d.addCallback(lambda r: self.assertEquals(r[0], dikt['a']))
563 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0, block=False))
563 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0, block=False))
564 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
564 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
565 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
565 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
566 d.addCallback(lambda r: self.assertEquals(r, dikt['a']))
566 d.addCallback(lambda r: self.assertEquals(r, dikt['a']))
567 d.addCallback(lambda _: self.multiengine.push_serialized(sdikt, targets=0, block=False))
567 d.addCallback(lambda _: self.multiengine.push_serialized(sdikt, targets=0, block=False))
568 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
568 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
569 d.addCallback(lambda _: self.multiengine.pull_serialized(sdikt.keys(), targets=0, block=False))
569 d.addCallback(lambda _: self.multiengine.pull_serialized(sdikt.keys(), targets=0, block=False))
570 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
570 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
571 d.addCallback(lambda serial: [newserialized.IUnSerialized(s).getObject() for s in serial[0]])
571 d.addCallback(lambda serial: [newserialized.IUnSerialized(s).getObject() for s in serial[0]])
572 d.addCallback(lambda r: self.assertEquals(r, dikt.values()))
572 d.addCallback(lambda r: self.assertEquals(r, dikt.values()))
573 d.addCallback(lambda _: self.multiengine.reset(targets=0))
573 d.addCallback(lambda _: self.multiengine.reset(targets=0))
574 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0, block=False))
574 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0, block=False))
575 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
575 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
576 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
576 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
577 return d
577 return d
578
578
579 def testClearQueue(self):
579 def testClearQueue(self):
580 self.addEngine(4)
580 self.addEngine(4)
581 d= self.multiengine.clear_queue()
581 d= self.multiengine.clear_queue()
582 d.addCallback(lambda r: self.multiengine.clear_queue(block=False))
582 d.addCallback(lambda r: self.multiengine.clear_queue(block=False))
583 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
583 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
584 d.addCallback(lambda r: self.assertEquals(r,4*[None]))
584 d.addCallback(lambda r: self.assertEquals(r,4*[None]))
585 return d
585 return d
586
586
587 def testQueueStatus(self):
587 def testQueueStatus(self):
588 self.addEngine(4)
588 self.addEngine(4)
589 d= self.multiengine.queue_status(targets=0)
589 d= self.multiengine.queue_status(targets=0)
590 d.addCallback(lambda r: self.assert_(isinstance(r[0],tuple)))
590 d.addCallback(lambda r: self.assert_(isinstance(r[0],tuple)))
591 d.addCallback(lambda r: self.multiengine.queue_status(targets=0, block=False))
591 d.addCallback(lambda r: self.multiengine.queue_status(targets=0, block=False))
592 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
592 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
593 d.addCallback(lambda r: self.assert_(isinstance(r[0],tuple)))
593 d.addCallback(lambda r: self.assert_(isinstance(r[0],tuple)))
594 return d
594 return d
595
595
596 def testGetIDs(self):
596 def testGetIDs(self):
597 self.addEngine(1)
597 self.addEngine(1)
598 d= self.multiengine.get_ids()
598 d= self.multiengine.get_ids()
599 d.addCallback(lambda r: self.assertEquals(r, [0]))
599 d.addCallback(lambda r: self.assertEquals(r, [0]))
600 d.addCallback(lambda _: self.addEngine(3))
600 d.addCallback(lambda _: self.addEngine(3))
601 d.addCallback(lambda _: self.multiengine.get_ids())
601 d.addCallback(lambda _: self.multiengine.get_ids())
602 d.addCallback(lambda r: self.assertEquals(r, [0,1,2,3]))
602 d.addCallback(lambda r: self.assertEquals(r, [0,1,2,3]))
603 return d
603 return d
604
604
605 def testGetSetProperties(self):
605 def testGetSetProperties(self):
606 self.addEngine(4)
606 self.addEngine(4)
607 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
607 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
608 d= self.multiengine.set_properties(dikt)
608 d= self.multiengine.set_properties(dikt)
609 d.addCallback(lambda r: self.multiengine.get_properties())
609 d.addCallback(lambda r: self.multiengine.get_properties())
610 d.addCallback(lambda r: self.assertEquals(r, 4*[dikt]))
610 d.addCallback(lambda r: self.assertEquals(r, 4*[dikt]))
611 d.addCallback(lambda r: self.multiengine.get_properties(('c',)))
611 d.addCallback(lambda r: self.multiengine.get_properties(('c',)))
612 d.addCallback(lambda r: self.assertEquals(r, 4*[{'c': dikt['c']}]))
612 d.addCallback(lambda r: self.assertEquals(r, 4*[{'c': dikt['c']}]))
613 d.addCallback(lambda r: self.multiengine.set_properties(dict(c=False)))
613 d.addCallback(lambda r: self.multiengine.set_properties(dict(c=False)))
614 d.addCallback(lambda r: self.multiengine.get_properties(('c', 'd')))
614 d.addCallback(lambda r: self.multiengine.get_properties(('c', 'd')))
615 d.addCallback(lambda r: self.assertEquals(r, 4*[dict(c=False, d=None)]))
615 d.addCallback(lambda r: self.assertEquals(r, 4*[dict(c=False, d=None)]))
616
616
617 #Non-blocking
617 #Non-blocking
618 d.addCallback(lambda r: self.multiengine.set_properties(dikt, block=False))
618 d.addCallback(lambda r: self.multiengine.set_properties(dikt, block=False))
619 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
619 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
620 d.addCallback(lambda r: self.multiengine.get_properties(block=False))
620 d.addCallback(lambda r: self.multiengine.get_properties(block=False))
621 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
621 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
622 d.addCallback(lambda r: self.assertEquals(r, 4*[dikt]))
622 d.addCallback(lambda r: self.assertEquals(r, 4*[dikt]))
623 d.addCallback(lambda r: self.multiengine.get_properties(('c',), block=False))
623 d.addCallback(lambda r: self.multiengine.get_properties(('c',), block=False))
624 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
624 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
625 d.addCallback(lambda r: self.assertEquals(r, 4*[{'c': dikt['c']}]))
625 d.addCallback(lambda r: self.assertEquals(r, 4*[{'c': dikt['c']}]))
626 d.addCallback(lambda r: self.multiengine.set_properties(dict(c=False), block=False))
626 d.addCallback(lambda r: self.multiengine.set_properties(dict(c=False), block=False))
627 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
627 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
628 d.addCallback(lambda r: self.multiengine.get_properties(('c', 'd'), block=False))
628 d.addCallback(lambda r: self.multiengine.get_properties(('c', 'd'), block=False))
629 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
629 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
630 d.addCallback(lambda r: self.assertEquals(r, 4*[dict(c=False, d=None)]))
630 d.addCallback(lambda r: self.assertEquals(r, 4*[dict(c=False, d=None)]))
631 return d
631 return d
632
632
633 def testClearProperties(self):
633 def testClearProperties(self):
634 self.addEngine(4)
634 self.addEngine(4)
635 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
635 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
636 d= self.multiengine.set_properties(dikt)
636 d= self.multiengine.set_properties(dikt)
637 d.addCallback(lambda r: self.multiengine.clear_properties())
637 d.addCallback(lambda r: self.multiengine.clear_properties())
638 d.addCallback(lambda r: self.multiengine.get_properties())
638 d.addCallback(lambda r: self.multiengine.get_properties())
639 d.addCallback(lambda r: self.assertEquals(r, 4*[{}]))
639 d.addCallback(lambda r: self.assertEquals(r, 4*[{}]))
640
640
641 #Non-blocking
641 #Non-blocking
642 d.addCallback(lambda r: self.multiengine.set_properties(dikt, block=False))
642 d.addCallback(lambda r: self.multiengine.set_properties(dikt, block=False))
643 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
643 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
644 d.addCallback(lambda r: self.multiengine.clear_properties(block=False))
644 d.addCallback(lambda r: self.multiengine.clear_properties(block=False))
645 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
645 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
646 d.addCallback(lambda r: self.multiengine.get_properties(block=False))
646 d.addCallback(lambda r: self.multiengine.get_properties(block=False))
647 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
647 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
648 d.addCallback(lambda r: self.assertEquals(r, 4*[{}]))
648 d.addCallback(lambda r: self.assertEquals(r, 4*[{}]))
649 return d
649 return d
650
650
651 def testDelHasProperties(self):
651 def testDelHasProperties(self):
652 self.addEngine(4)
652 self.addEngine(4)
653 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
653 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
654 d= self.multiengine.set_properties(dikt)
654 d= self.multiengine.set_properties(dikt)
655 d.addCallback(lambda r: self.multiengine.del_properties(('b','e')))
655 d.addCallback(lambda r: self.multiengine.del_properties(('b','e')))
656 d.addCallback(lambda r: self.multiengine.has_properties(('a','b','c','d','e')))
656 d.addCallback(lambda r: self.multiengine.has_properties(('a','b','c','d','e')))
657 d.addCallback(lambda r: self.assertEquals(r, 4*[[True, False, True, True, False]]))
657 d.addCallback(lambda r: self.assertEquals(r, 4*[[True, False, True, True, False]]))
658
658
659 #Non-blocking
659 #Non-blocking
660 d.addCallback(lambda r: self.multiengine.set_properties(dikt, block=False))
660 d.addCallback(lambda r: self.multiengine.set_properties(dikt, block=False))
661 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
661 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
662 d.addCallback(lambda r: self.multiengine.del_properties(('b','e'), block=False))
662 d.addCallback(lambda r: self.multiengine.del_properties(('b','e'), block=False))
663 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
663 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
664 d.addCallback(lambda r: self.multiengine.has_properties(('a','b','c','d','e'), block=False))
664 d.addCallback(lambda r: self.multiengine.has_properties(('a','b','c','d','e'), block=False))
665 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
665 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
666 d.addCallback(lambda r: self.assertEquals(r, 4*[[True, False, True, True, False]]))
666 d.addCallback(lambda r: self.assertEquals(r, 4*[[True, False, True, True, False]]))
667 return d
667 return d
668
668
669 def test_clear_pending_deferreds(self):
669 def test_clear_pending_deferreds(self):
670 self.addEngine(4)
670 self.addEngine(4)
671 did_list = []
671 did_list = []
672 d= self.multiengine.execute('a=10',block=False)
672 d= self.multiengine.execute('a=10',block=False)
673 d.addCallback(lambda did: did_list.append(did))
673 d.addCallback(lambda did: did_list.append(did))
674 d.addCallback(lambda _: self.multiengine.push(dict(b=10),block=False))
674 d.addCallback(lambda _: self.multiengine.push(dict(b=10),block=False))
675 d.addCallback(lambda did: did_list.append(did))
675 d.addCallback(lambda did: did_list.append(did))
676 d.addCallback(lambda _: self.multiengine.pull(('a','b'),block=False))
676 d.addCallback(lambda _: self.multiengine.pull(('a','b'),block=False))
677 d.addCallback(lambda did: did_list.append(did))
677 d.addCallback(lambda did: did_list.append(did))
678 d.addCallback(lambda _: self.multiengine.clear_pending_deferreds())
678 d.addCallback(lambda _: self.multiengine.clear_pending_deferreds())
679 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[0],True))
679 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[0],True))
680 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
680 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
681 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[1],True))
681 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[1],True))
682 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
682 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
683 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[2],True))
683 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[2],True))
684 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
684 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
685 return d
685 return d
686
686
687 #-------------------------------------------------------------------------------
687 #-------------------------------------------------------------------------------
688 # Coordinator test cases
688 # Coordinator test cases
689 #-------------------------------------------------------------------------------
689 #-------------------------------------------------------------------------------
690
690
691 class IMultiEngineCoordinatorTestCase(object):
691 class IMultiEngineCoordinatorTestCase(object):
692
692
693 def testScatterGather(self):
693 def testScatterGather(self):
694 self.addEngine(4)
694 self.addEngine(4)
695 d= self.multiengine.scatter('a', range(16))
695 d= self.multiengine.scatter('a', range(16))
696 d.addCallback(lambda r: self.multiengine.gather('a'))
696 d.addCallback(lambda r: self.multiengine.gather('a'))
697 d.addCallback(lambda r: self.assertEquals(r, range(16)))
697 d.addCallback(lambda r: self.assertEquals(r, range(16)))
698 d.addCallback(lambda _: self.multiengine.gather('asdf'))
698 d.addCallback(lambda _: self.multiengine.gather('asdf'))
699 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
699 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
700 return d
700 return d
701
701
702 def testScatterGatherNumpy(self):
702 def testScatterGatherNumpy(self):
703 try:
703 try:
704 import numpy
704 import numpy
705 from numpy.testing.utils import assert_array_equal, assert_array_almost_equal
705 from numpy.testing.utils import assert_array_equal, assert_array_almost_equal
706 except:
706 except:
707 return
707 return
708 else:
708 else:
709 self.addEngine(4)
709 self.addEngine(4)
710 a = numpy.arange(16)
710 a = numpy.arange(16)
711 d = self.multiengine.scatter('a', a)
711 d = self.multiengine.scatter('a', a)
712 d.addCallback(lambda r: self.multiengine.gather('a'))
712 d.addCallback(lambda r: self.multiengine.gather('a'))
713 d.addCallback(lambda r: assert_array_equal(r, a))
713 d.addCallback(lambda r: assert_array_equal(r, a))
714 return d
714 return d
715
715
716 def testMap(self):
716 def testMap(self):
717 self.addEngine(4)
717 self.addEngine(4)
718 def f(x):
718 def f(x):
719 return x**2
719 return x**2
720 data = range(16)
720 data = range(16)
721 d= self.multiengine.map(f, data)
721 d= self.multiengine.map(f, data)
722 d.addCallback(lambda r: self.assertEquals(r,[f(x) for x in data]))
722 d.addCallback(lambda r: self.assertEquals(r,[f(x) for x in data]))
723 return d
723 return d
724
724
725
725
726 class ISynchronousMultiEngineCoordinatorTestCase(IMultiEngineCoordinatorTestCase):
726 class ISynchronousMultiEngineCoordinatorTestCase(IMultiEngineCoordinatorTestCase):
727
727
728 def testScatterGatherNonblocking(self):
728 def testScatterGatherNonblocking(self):
729 self.addEngine(4)
729 self.addEngine(4)
730 d= self.multiengine.scatter('a', range(16), block=False)
730 d= self.multiengine.scatter('a', range(16), block=False)
731 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
731 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
732 d.addCallback(lambda r: self.multiengine.gather('a', block=False))
732 d.addCallback(lambda r: self.multiengine.gather('a', block=False))
733 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
733 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
734 d.addCallback(lambda r: self.assertEquals(r, range(16)))
734 d.addCallback(lambda r: self.assertEquals(r, range(16)))
735 return d
735 return d
736
736
737 def testScatterGatherNumpyNonblocking(self):
737 def testScatterGatherNumpyNonblocking(self):
738 try:
738 try:
739 import numpy
739 import numpy
740 from numpy.testing.utils import assert_array_equal, assert_array_almost_equal
740 from numpy.testing.utils import assert_array_equal, assert_array_almost_equal
741 except:
741 except:
742 return
742 return
743 else:
743 else:
744 self.addEngine(4)
744 self.addEngine(4)
745 a = numpy.arange(16)
745 a = numpy.arange(16)
746 d = self.multiengine.scatter('a', a, block=False)
746 d = self.multiengine.scatter('a', a, block=False)
747 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
747 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
748 d.addCallback(lambda r: self.multiengine.gather('a', block=False))
748 d.addCallback(lambda r: self.multiengine.gather('a', block=False))
749 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
749 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
750 d.addCallback(lambda r: assert_array_equal(r, a))
750 d.addCallback(lambda r: assert_array_equal(r, a))
751 return d
751 return d
752
752
753 def testMapNonblocking(self):
754 self.addEngine(4)
755 def f(x):
756 return x**2
757 data = range(16)
758 d= self.multiengine.map(f, data, block=False)
759 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
760 d.addCallback(lambda r: self.assertEquals(r,[f(x) for x in data]))
761 return d
762
763 def test_clear_pending_deferreds(self):
753 def test_clear_pending_deferreds(self):
764 self.addEngine(4)
754 self.addEngine(4)
765 did_list = []
755 did_list = []
766 d= self.multiengine.scatter('a',range(16),block=False)
756 d= self.multiengine.scatter('a',range(16),block=False)
767 d.addCallback(lambda did: did_list.append(did))
757 d.addCallback(lambda did: did_list.append(did))
768 d.addCallback(lambda _: self.multiengine.gather('a',block=False))
758 d.addCallback(lambda _: self.multiengine.gather('a',block=False))
769 d.addCallback(lambda did: did_list.append(did))
759 d.addCallback(lambda did: did_list.append(did))
770 d.addCallback(lambda _: self.multiengine.map(lambda x: x, range(16),block=False))
760 d.addCallback(lambda _: self.multiengine.map(lambda x: x, range(16),block=False))
771 d.addCallback(lambda did: did_list.append(did))
761 d.addCallback(lambda did: did_list.append(did))
772 d.addCallback(lambda _: self.multiengine.clear_pending_deferreds())
762 d.addCallback(lambda _: self.multiengine.clear_pending_deferreds())
773 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[0],True))
763 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[0],True))
774 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
764 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
775 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[1],True))
765 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[1],True))
776 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
766 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
777 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[2],True))
767 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[2],True))
778 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
768 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
779 return d
769 return d
780
770
781 #-------------------------------------------------------------------------------
771 #-------------------------------------------------------------------------------
782 # Extras test cases
772 # Extras test cases
783 #-------------------------------------------------------------------------------
773 #-------------------------------------------------------------------------------
784
774
785 class IMultiEngineExtrasTestCase(object):
775 class IMultiEngineExtrasTestCase(object):
786
776
787 def testZipPull(self):
777 def testZipPull(self):
788 self.addEngine(4)
778 self.addEngine(4)
789 d= self.multiengine.push(dict(a=10,b=20))
779 d= self.multiengine.push(dict(a=10,b=20))
790 d.addCallback(lambda r: self.multiengine.zip_pull(('a','b')))
780 d.addCallback(lambda r: self.multiengine.zip_pull(('a','b')))
791 d.addCallback(lambda r: self.assert_(r, [4*[10],4*[20]]))
781 d.addCallback(lambda r: self.assert_(r, [4*[10],4*[20]]))
792 return d
782 return d
793
783
794 def testRun(self):
784 def testRun(self):
795 self.addEngine(4)
785 self.addEngine(4)
796 import tempfile
786 import tempfile
797 fname = tempfile.mktemp('foo.py')
787 fname = tempfile.mktemp('foo.py')
798 f= open(fname, 'w')
788 f= open(fname, 'w')
799 f.write('a = 10\nb=30')
789 f.write('a = 10\nb=30')
800 f.close()
790 f.close()
801 d= self.multiengine.run(fname)
791 d= self.multiengine.run(fname)
802 d.addCallback(lambda r: self.multiengine.pull(('a','b')))
792 d.addCallback(lambda r: self.multiengine.pull(('a','b')))
803 d.addCallback(lambda r: self.assertEquals(r, 4*[[10,30]]))
793 d.addCallback(lambda r: self.assertEquals(r, 4*[[10,30]]))
804 return d
794 return d
805
795
806
796
807 class ISynchronousMultiEngineExtrasTestCase(IMultiEngineExtrasTestCase):
797 class ISynchronousMultiEngineExtrasTestCase(IMultiEngineExtrasTestCase):
808
798
809 def testZipPullNonblocking(self):
799 def testZipPullNonblocking(self):
810 self.addEngine(4)
800 self.addEngine(4)
811 d= self.multiengine.push(dict(a=10,b=20))
801 d= self.multiengine.push(dict(a=10,b=20))
812 d.addCallback(lambda r: self.multiengine.zip_pull(('a','b'), block=False))
802 d.addCallback(lambda r: self.multiengine.zip_pull(('a','b'), block=False))
813 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
803 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
814 d.addCallback(lambda r: self.assert_(r, [4*[10],4*[20]]))
804 d.addCallback(lambda r: self.assert_(r, [4*[10],4*[20]]))
815 return d
805 return d
816
806
817 def testRunNonblocking(self):
807 def testRunNonblocking(self):
818 self.addEngine(4)
808 self.addEngine(4)
819 import tempfile
809 import tempfile
820 fname = tempfile.mktemp('foo.py')
810 fname = tempfile.mktemp('foo.py')
821 f= open(fname, 'w')
811 f= open(fname, 'w')
822 f.write('a = 10\nb=30')
812 f.write('a = 10\nb=30')
823 f.close()
813 f.close()
824 d= self.multiengine.run(fname, block=False)
814 d= self.multiengine.run(fname, block=False)
825 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
815 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
826 d.addCallback(lambda r: self.multiengine.pull(('a','b')))
816 d.addCallback(lambda r: self.multiengine.pull(('a','b')))
827 d.addCallback(lambda r: self.assertEquals(r, 4*[[10,30]]))
817 d.addCallback(lambda r: self.assertEquals(r, 4*[[10,30]]))
828 return d
818 return d
829
819
830
820
831 #-------------------------------------------------------------------------------
821 #-------------------------------------------------------------------------------
832 # IFullSynchronousMultiEngineTestCase
822 # IFullSynchronousMultiEngineTestCase
833 #-------------------------------------------------------------------------------
823 #-------------------------------------------------------------------------------
834
824
835 class IFullSynchronousMultiEngineTestCase(ISynchronousMultiEngineTestCase,
825 class IFullSynchronousMultiEngineTestCase(ISynchronousMultiEngineTestCase,
836 ISynchronousMultiEngineCoordinatorTestCase,
826 ISynchronousMultiEngineCoordinatorTestCase,
837 ISynchronousMultiEngineExtrasTestCase):
827 ISynchronousMultiEngineExtrasTestCase):
838 pass
828 pass
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file renamed from IPython/testing/ipdoctest.py to IPython/testing/attic/ipdoctest.py
NO CONTENT: file renamed from IPython/testing/ipdoctest.py to IPython/testing/attic/ipdoctest.py
1 NO CONTENT: file renamed from IPython/testing/tcommon.py to IPython/testing/attic/tcommon.py
NO CONTENT: file renamed from IPython/testing/tcommon.py to IPython/testing/attic/tcommon.py
1 NO CONTENT: file renamed from IPython/testing/testTEMPLATE.py to IPython/testing/attic/testTEMPLATE.py
NO CONTENT: file renamed from IPython/testing/testTEMPLATE.py to IPython/testing/attic/testTEMPLATE.py
1 NO CONTENT: file renamed from IPython/testing/tstTEMPLATE_doctest.py to IPython/testing/attic/tstTEMPLATE_doctest.py
NO CONTENT: file renamed from IPython/testing/tstTEMPLATE_doctest.py to IPython/testing/attic/tstTEMPLATE_doctest.py
1 NO CONTENT: file renamed from IPython/testing/tstTEMPLATE_doctest.txt to IPython/testing/attic/tstTEMPLATE_doctest.txt
NO CONTENT: file renamed from IPython/testing/tstTEMPLATE_doctest.txt to IPython/testing/attic/tstTEMPLATE_doctest.txt
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file renamed from IPython/tools/tests/tst_tools_utils_doctest2.txt to IPython/tools/tests/test_tools_utils.txt
NO CONTENT: file renamed from IPython/tools/tests/tst_tools_utils_doctest2.txt to IPython/tools/tests/test_tools_utils.txt
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now