##// END OF EJS Templates
Fixing installation related issues.
Brian Granger -
Show More
@@ -1,252 +1,252 b''
1 1 # encoding: utf-8
2 2 """
3 3 Test process execution and IO redirection.
4 4 """
5 5
6 6 __docformat__ = "restructuredtext en"
7 7
8 8 #-------------------------------------------------------------------------------
9 9 # Copyright (C) 2008 The IPython Development Team
10 10 #
11 11 # Distributed under the terms of the BSD License. The full license is
12 12 # in the file COPYING, distributed as part of this software.
13 13 #-------------------------------------------------------------------------------
14 14
15 15 from copy import copy, deepcopy
16 16 from cStringIO import StringIO
17 17 import string
18 18
19 19 from nose.tools import assert_equal
20 20
21 21 from IPython.frontend.prefilterfrontend import PrefilterFrontEnd
22 22 from IPython.core.ipapi import get as get_ipython0
23 23 from IPython.testing.plugin.ipdoctest import default_argv
24 24
25 25
26 26 def safe_deepcopy(d):
27 27 """ Deep copy every key of the given dict, when possible. Elsewhere
28 28 do a copy.
29 29 """
30 30 copied_d = dict()
31 31 for key, value in d.iteritems():
32 32 try:
33 33 copied_d[key] = deepcopy(value)
34 34 except:
35 35 try:
36 36 copied_d[key] = copy(value)
37 37 except:
38 38 copied_d[key] = value
39 39 return copied_d
40 40
41 41
42 42 class TestPrefilterFrontEnd(PrefilterFrontEnd):
43 43
44 44 input_prompt_template = string.Template('')
45 45 output_prompt_template = string.Template('')
46 46 banner = ''
47 47
48 48 def __init__(self):
49 49 self.out = StringIO()
50 50 PrefilterFrontEnd.__init__(self,argv=default_argv())
51 51 # Some more code for isolation (yeah, crazy)
52 52 self._on_enter()
53 53 self.out.flush()
54 54 self.out.reset()
55 55 self.out.truncate()
56 56
57 57 def write(self, string, *args, **kwargs):
58 58 self.out.write(string)
59 59
60 60 def _on_enter(self):
61 61 self.input_buffer += '\n'
62 62 PrefilterFrontEnd._on_enter(self)
63 63
64 64
65 65 def isolate_ipython0(func):
66 66 """ Decorator to isolate execution that involves an iptyhon0.
67 67
68 68 Notes
69 69 -----
70 70
71 71 Apply only to functions with no arguments. Nose skips functions
72 72 with arguments.
73 73 """
74 74 def my_func():
75 75 iplib = get_ipython0()
76 76 if iplib is None:
77 77 return func()
78 78 ipython0 = iplib.IP
79 79 global_ns = safe_deepcopy(ipython0.user_global_ns)
80 80 user_ns = safe_deepcopy(ipython0.user_ns)
81 81 try:
82 82 out = func()
83 83 finally:
84 84 ipython0.user_ns = user_ns
85 85 ipython0.user_global_ns = global_ns
86 86 # Undo the hack at creation of PrefilterFrontEnd
87 from IPythoncore. import iplib
87 from IPython.core import iplib
88 88 iplib.InteractiveShell.isthreaded = False
89 89 return out
90 90
91 91 my_func.__name__ = func.__name__
92 92 return my_func
93 93
94 94
95 95 @isolate_ipython0
96 96 def test_execution():
97 97 """ Test execution of a command.
98 98 """
99 99 f = TestPrefilterFrontEnd()
100 100 f.input_buffer = 'print 1'
101 101 f._on_enter()
102 102 out_value = f.out.getvalue()
103 103 assert_equal(out_value, '1\n')
104 104
105 105
106 106 @isolate_ipython0
107 107 def test_multiline():
108 108 """ Test execution of a multiline command.
109 109 """
110 110 f = TestPrefilterFrontEnd()
111 111 f.input_buffer = 'if True:'
112 112 f._on_enter()
113 113 f.input_buffer += 'print 1'
114 114 f._on_enter()
115 115 out_value = f.out.getvalue()
116 116 yield assert_equal, out_value, ''
117 117 f._on_enter()
118 118 out_value = f.out.getvalue()
119 119 yield assert_equal, out_value, '1\n'
120 120 f = TestPrefilterFrontEnd()
121 121 f.input_buffer='(1 +'
122 122 f._on_enter()
123 123 f.input_buffer += '0)'
124 124 f._on_enter()
125 125 out_value = f.out.getvalue()
126 126 yield assert_equal, out_value, ''
127 127 f._on_enter()
128 128 out_value = f.out.getvalue()
129 129 yield assert_equal, out_value, '1\n'
130 130
131 131
132 132 @isolate_ipython0
133 133 def test_capture():
134 134 """ Test the capture of output in different channels.
135 135 """
136 136 # Test on the OS-level stdout, stderr.
137 137 f = TestPrefilterFrontEnd()
138 138 f.input_buffer = \
139 139 'import os; out=os.fdopen(1, "w"); out.write("1") ; out.flush()'
140 140 f._on_enter()
141 141 out_value = f.out.getvalue()
142 142 yield assert_equal, out_value, '1'
143 143 f = TestPrefilterFrontEnd()
144 144 f.input_buffer = \
145 145 'import os; out=os.fdopen(2, "w"); out.write("1") ; out.flush()'
146 146 f._on_enter()
147 147 out_value = f.out.getvalue()
148 148 yield assert_equal, out_value, '1'
149 149
150 150
151 151 @isolate_ipython0
152 152 def test_magic():
153 153 """ Test the magic expansion and history.
154 154
155 155 This test is fairly fragile and will break when magics change.
156 156 """
157 157 f = TestPrefilterFrontEnd()
158 158 # Before checking the interactive namespace, make sure it's clear (it can
159 159 # otherwise pick up things stored in the user's local db)
160 160 f.input_buffer += '%reset -f'
161 161 f._on_enter()
162 162 f.complete_current_input()
163 163 # Now, run the %who magic and check output
164 164 f.input_buffer += '%who'
165 165 f._on_enter()
166 166 out_value = f.out.getvalue()
167 167 assert_equal(out_value, 'Interactive namespace is empty.\n')
168 168
169 169
170 170 @isolate_ipython0
171 171 def test_help():
172 172 """ Test object inspection.
173 173 """
174 174 f = TestPrefilterFrontEnd()
175 175 f.input_buffer += "def f():"
176 176 f._on_enter()
177 177 f.input_buffer += "'foobar'"
178 178 f._on_enter()
179 179 f.input_buffer += "pass"
180 180 f._on_enter()
181 181 f._on_enter()
182 182 f.input_buffer += "f?"
183 183 f._on_enter()
184 184 assert 'traceback' not in f.last_result
185 185 ## XXX: ipython doctest magic breaks this. I have no clue why
186 186 #out_value = f.out.getvalue()
187 187 #assert out_value.split()[-1] == 'foobar'
188 188
189 189
190 190 @isolate_ipython0
191 191 def test_completion_simple():
192 192 """ Test command-line completion on trivial examples.
193 193 """
194 194 f = TestPrefilterFrontEnd()
195 195 f.input_buffer = 'zzza = 1'
196 196 f._on_enter()
197 197 f.input_buffer = 'zzzb = 2'
198 198 f._on_enter()
199 199 f.input_buffer = 'zz'
200 200 f.complete_current_input()
201 201 out_value = f.out.getvalue()
202 202 yield assert_equal, out_value, '\nzzza zzzb '
203 203 yield assert_equal, f.input_buffer, 'zzz'
204 204
205 205
206 206 @isolate_ipython0
207 207 def test_completion_parenthesis():
208 208 """ Test command-line completion when a parenthesis is open.
209 209 """
210 210 f = TestPrefilterFrontEnd()
211 211 f.input_buffer = 'zzza = 1'
212 212 f._on_enter()
213 213 f.input_buffer = 'zzzb = 2'
214 214 f._on_enter()
215 215 f.input_buffer = 'map(zz'
216 216 f.complete_current_input()
217 217 out_value = f.out.getvalue()
218 218 yield assert_equal, out_value, '\nzzza zzzb '
219 219 yield assert_equal, f.input_buffer, 'map(zzz'
220 220
221 221
222 222 @isolate_ipython0
223 223 def test_completion_indexing():
224 224 """ Test command-line completion when indexing on objects.
225 225 """
226 226 f = TestPrefilterFrontEnd()
227 227 f.input_buffer = 'a = [0]'
228 228 f._on_enter()
229 229 f.input_buffer = 'a[0].'
230 230 f.complete_current_input()
231 231 assert_equal(f.input_buffer, 'a[0].__')
232 232
233 233
234 234 @isolate_ipython0
235 235 def test_completion_equal():
236 236 """ Test command-line completion when the delimiter is "=", not " ".
237 237 """
238 238 f = TestPrefilterFrontEnd()
239 239 f.input_buffer = 'a=1.'
240 240 f.complete_current_input()
241 241 assert_equal(f.input_buffer, 'a=1.__')
242 242
243 243
244 244
245 245 if __name__ == '__main__':
246 246 test_magic()
247 247 test_help()
248 248 test_execution()
249 249 test_multiline()
250 250 test_capture()
251 251 test_completion_simple()
252 252 test_completion_complex()
@@ -1,96 +1,96 b''
1 1 # encoding: utf-8
2 2
3 3 """This module contains blocking clients for the controller interfaces.
4 4
5 5 Unlike the clients in `asyncclient.py`, the clients in this module are fully
6 6 blocking. This means that methods on the clients return the actual results
7 7 rather than a deferred to the result. Also, we manage the Twisted reactor
8 8 for you. This is done by running the reactor in a thread.
9 9
10 10 The main classes in this module are:
11 11
12 12 * MultiEngineClient
13 13 * TaskClient
14 14 * Task
15 15 * CompositeError
16 16 """
17 17
18 18 __docformat__ = "restructuredtext en"
19 19
20 20 #-------------------------------------------------------------------------------
21 21 # Copyright (C) 2008 The IPython Development Team
22 22 #
23 23 # Distributed under the terms of the BSD License. The full license is in
24 24 # the file COPYING, distributed as part of this software.
25 25 #-------------------------------------------------------------------------------
26 26
27 27 #-------------------------------------------------------------------------------
28 28 # Imports
29 29 #-------------------------------------------------------------------------------
30 30
31 31 import sys
32 32
33 # from IPython.tools import growl
33 # from IPython.utils import growl
34 34 # growl.start("IPython1 Client")
35 35
36 36
37 37 from twisted.internet import reactor
38 38 from IPython.kernel.clientconnector import ClientConnector
39 39 from IPython.kernel.twistedutil import ReactorInThread
40 40 from IPython.kernel.twistedutil import blockingCallFromThread
41 41
42 42 # These enable various things
43 43 from IPython.kernel import codeutil
44 44 import IPython.kernel.magic
45 45
46 46 # Other things that the user will need
47 47 from IPython.kernel.task import MapTask, StringTask
48 48 from IPython.kernel.error import CompositeError
49 49
50 50 #-------------------------------------------------------------------------------
51 51 # Code
52 52 #-------------------------------------------------------------------------------
53 53
54 54 _client_tub = ClientConnector()
55 55
56 56
57 57 def get_multiengine_client(furl_or_file=''):
58 58 """Get the blocking MultiEngine client.
59 59
60 60 :Parameters:
61 61 furl_or_file : str
62 62 A furl or a filename containing a furl. If empty, the
63 63 default furl_file will be used
64 64
65 65 :Returns:
66 66 The connected MultiEngineClient instance
67 67 """
68 68 client = blockingCallFromThread(_client_tub.get_multiengine_client,
69 69 furl_or_file)
70 70 return client.adapt_to_blocking_client()
71 71
72 72 def get_task_client(furl_or_file=''):
73 73 """Get the blocking Task client.
74 74
75 75 :Parameters:
76 76 furl_or_file : str
77 77 A furl or a filename containing a furl. If empty, the
78 78 default furl_file will be used
79 79
80 80 :Returns:
81 81 The connected TaskClient instance
82 82 """
83 83 client = blockingCallFromThread(_client_tub.get_task_client,
84 84 furl_or_file)
85 85 return client.adapt_to_blocking_client()
86 86
87 87
88 88 MultiEngineClient = get_multiengine_client
89 89 TaskClient = get_task_client
90 90
91 91
92 92
93 93 # Now we start the reactor in a thread
94 94 rit = ReactorInThread()
95 95 rit.setDaemon(True)
96 96 rit.start() No newline at end of file
@@ -1,53 +1,53 b''
1 1 # encoding: utf-8
2 2
3 3 """Object to manage sys.excepthook().
4 4
5 5 Synchronous version: prints errors when called.
6 6 """
7 7
8 8 __docformat__ = "restructuredtext en"
9 9
10 10 #-------------------------------------------------------------------------------
11 11 # Copyright (C) 2008 The IPython Development Team
12 12 #
13 13 # Distributed under the terms of the BSD License. The full license is in
14 14 # the file COPYING, distributed as part of this software.
15 15 #-------------------------------------------------------------------------------
16 16
17 17 #-------------------------------------------------------------------------------
18 18 # Imports
19 19 #-------------------------------------------------------------------------------
20 20 from traceback_trap import TracebackTrap
21 from IPython.ultraTB import ColorTB
21 from IPython.core.ultratb import ColorTB
22 22
23 23 class SyncTracebackTrap(TracebackTrap):
24 24 """ TracebackTrap that displays immediatly the traceback in addition
25 25 to capturing it. Useful in frontends, as without this traceback trap,
26 26 some tracebacks never get displayed.
27 27 """
28 28
29 29 def __init__(self, sync_formatter=None, formatters=None,
30 30 raiseException=True):
31 31 """
32 32 sync_formatter: Callable to display the traceback.
33 33 formatters: A list of formatters to apply.
34 34 """
35 35 TracebackTrap.__init__(self, formatters=formatters)
36 36 if sync_formatter is None:
37 37 sync_formatter = ColorTB(color_scheme='LightBG')
38 38 self.sync_formatter = sync_formatter
39 39 self.raiseException = raiseException
40 40
41 41
42 42 def hook(self, *args):
43 43 """ This method actually implements the hook.
44 44 """
45 45 self.args = args
46 46 if not self.raiseException:
47 47 print self.sync_formatter(*self.args)
48 48 else:
49 49 raise
50 50
51 51
52 52
53 53
@@ -1,753 +1,753 b''
1 1 # encoding: utf-8
2 2 # -*- test-case-name: IPython.kernel.test.test_multiengine -*-
3 3
4 4 """Adapt the IPython ControllerServer to IMultiEngine.
5 5
6 6 This module provides classes that adapt a ControllerService to the
7 7 IMultiEngine interface. This interface is a basic interactive interface
8 8 for working with a set of engines where it is desired to have explicit
9 9 access to each registered engine.
10 10
11 11 The classes here are exposed to the network in files like:
12 12
13 13 * multienginevanilla.py
14 14 * multienginepb.py
15 15 """
16 16
17 17 __docformat__ = "restructuredtext en"
18 18
19 19 #-------------------------------------------------------------------------------
20 20 # Copyright (C) 2008 The IPython Development Team
21 21 #
22 22 # Distributed under the terms of the BSD License. The full license is in
23 23 # the file COPYING, distributed as part of this software.
24 24 #-------------------------------------------------------------------------------
25 25
26 26 #-------------------------------------------------------------------------------
27 27 # Imports
28 28 #-------------------------------------------------------------------------------
29 29
30 30 from new import instancemethod
31 31 from types import FunctionType
32 32
33 33 from twisted.application import service
34 34 from twisted.internet import defer, reactor
35 35 from twisted.python import log, components, failure
36 36 from zope.interface import Interface, implements, Attribute
37 37
38 from IPython.tools import growl
38 from IPython.utils import growl
39 39 from IPython.kernel.util import printer
40 40 from IPython.kernel.twistedutil import gatherBoth
41 41 from IPython.kernel import map as Map
42 42 from IPython.kernel import error
43 43 from IPython.kernel.pendingdeferred import PendingDeferredManager, two_phase
44 44 from IPython.kernel.controllerservice import \
45 45 ControllerAdapterBase, \
46 46 ControllerService, \
47 47 IControllerBase
48 48
49 49
50 50 #-------------------------------------------------------------------------------
51 51 # Interfaces for the MultiEngine representation of a controller
52 52 #-------------------------------------------------------------------------------
53 53
54 54 class IEngineMultiplexer(Interface):
55 55 """Interface to multiple engines implementing IEngineCore/Serialized/Queued.
56 56
57 57 This class simply acts as a multiplexer of methods that are in the
58 58 various IEngines* interfaces. Thus the methods here are jut like those
59 59 in the IEngine* interfaces, but with an extra first argument, targets.
60 60 The targets argument can have the following forms:
61 61
62 62 * targets = 10 # Engines are indexed by ints
63 63 * targets = [0,1,2,3] # A list of ints
64 64 * targets = 'all' # A string to indicate all targets
65 65
66 66 If targets is bad in any way, an InvalidEngineID will be raised. This
67 67 includes engines not being registered.
68 68
69 69 All IEngineMultiplexer multiplexer methods must return a Deferred to a list
70 70 with length equal to the number of targets. The elements of the list will
71 71 correspond to the return of the corresponding IEngine method.
72 72
73 73 Failures are aggressive, meaning that if an action fails for any target,
74 74 the overall action will fail immediately with that Failure.
75 75
76 76 :Parameters:
77 77 targets : int, list of ints, or 'all'
78 78 Engine ids the action will apply to.
79 79
80 80 :Returns: Deferred to a list of results for each engine.
81 81
82 82 :Exception:
83 83 InvalidEngineID
84 84 If the targets argument is bad or engines aren't registered.
85 85 NoEnginesRegistered
86 86 If there are no engines registered and targets='all'
87 87 """
88 88
89 89 #---------------------------------------------------------------------------
90 90 # Mutiplexed methods
91 91 #---------------------------------------------------------------------------
92 92
93 93 def execute(lines, targets='all'):
94 94 """Execute lines of Python code on targets.
95 95
96 96 See the class docstring for information about targets and possible
97 97 exceptions this method can raise.
98 98
99 99 :Parameters:
100 100 lines : str
101 101 String of python code to be executed on targets.
102 102 """
103 103
104 104 def push(namespace, targets='all'):
105 105 """Push dict namespace into the user's namespace on targets.
106 106
107 107 See the class docstring for information about targets and possible
108 108 exceptions this method can raise.
109 109
110 110 :Parameters:
111 111 namspace : dict
112 112 Dict of key value pairs to be put into the users namspace.
113 113 """
114 114
115 115 def pull(keys, targets='all'):
116 116 """Pull values out of the user's namespace on targets by keys.
117 117
118 118 See the class docstring for information about targets and possible
119 119 exceptions this method can raise.
120 120
121 121 :Parameters:
122 122 keys : tuple of strings
123 123 Sequence of keys to be pulled from user's namespace.
124 124 """
125 125
126 126 def push_function(namespace, targets='all'):
127 127 """"""
128 128
129 129 def pull_function(keys, targets='all'):
130 130 """"""
131 131
132 132 def get_result(i=None, targets='all'):
133 133 """Get the result for command i from targets.
134 134
135 135 See the class docstring for information about targets and possible
136 136 exceptions this method can raise.
137 137
138 138 :Parameters:
139 139 i : int or None
140 140 Command index or None to indicate most recent command.
141 141 """
142 142
143 143 def reset(targets='all'):
144 144 """Reset targets.
145 145
146 146 This clears the users namespace of the Engines, but won't cause
147 147 modules to be reloaded.
148 148 """
149 149
150 150 def keys(targets='all'):
151 151 """Get variable names defined in user's namespace on targets."""
152 152
153 153 def kill(controller=False, targets='all'):
154 154 """Kill the targets Engines and possibly the controller.
155 155
156 156 :Parameters:
157 157 controller : boolean
158 158 Should the controller be killed as well. If so all the
159 159 engines will be killed first no matter what targets is.
160 160 """
161 161
162 162 def push_serialized(namespace, targets='all'):
163 163 """Push a namespace of Serialized objects to targets.
164 164
165 165 :Parameters:
166 166 namespace : dict
167 167 A dict whose keys are the variable names and whose values
168 168 are serialized version of the objects.
169 169 """
170 170
171 171 def pull_serialized(keys, targets='all'):
172 172 """Pull Serialized objects by keys from targets.
173 173
174 174 :Parameters:
175 175 keys : tuple of strings
176 176 Sequence of variable names to pull as serialized objects.
177 177 """
178 178
179 179 def clear_queue(targets='all'):
180 180 """Clear the queue of pending command for targets."""
181 181
182 182 def queue_status(targets='all'):
183 183 """Get the status of the queue on the targets."""
184 184
185 185 def set_properties(properties, targets='all'):
186 186 """set properties by key and value"""
187 187
188 188 def get_properties(keys=None, targets='all'):
189 189 """get a list of properties by `keys`, if no keys specified, get all"""
190 190
191 191 def del_properties(keys, targets='all'):
192 192 """delete properties by `keys`"""
193 193
194 194 def has_properties(keys, targets='all'):
195 195 """get a list of bool values for whether `properties` has `keys`"""
196 196
197 197 def clear_properties(targets='all'):
198 198 """clear the properties dict"""
199 199
200 200
201 201 class IMultiEngine(IEngineMultiplexer):
202 202 """A controller that exposes an explicit interface to all of its engines.
203 203
204 204 This is the primary inteface for interactive usage.
205 205 """
206 206
207 207 def get_ids():
208 208 """Return list of currently registered ids.
209 209
210 210 :Returns: A Deferred to a list of registered engine ids.
211 211 """
212 212
213 213
214 214
215 215 #-------------------------------------------------------------------------------
216 216 # Implementation of the core MultiEngine classes
217 217 #-------------------------------------------------------------------------------
218 218
219 219 class MultiEngine(ControllerAdapterBase):
220 220 """The representation of a ControllerService as a IMultiEngine.
221 221
222 222 Although it is not implemented currently, this class would be where a
223 223 client/notification API is implemented. It could inherit from something
224 224 like results.NotifierParent and then use the notify method to send
225 225 notifications.
226 226 """
227 227
228 228 implements(IMultiEngine)
229 229
230 230 def __init(self, controller):
231 231 ControllerAdapterBase.__init__(self, controller)
232 232
233 233 #---------------------------------------------------------------------------
234 234 # Helper methods
235 235 #---------------------------------------------------------------------------
236 236
237 237 def engineList(self, targets):
238 238 """Parse the targets argument into a list of valid engine objects.
239 239
240 240 :Parameters:
241 241 targets : int, list of ints or 'all'
242 242 The targets argument to be parsed.
243 243
244 244 :Returns: List of engine objects.
245 245
246 246 :Exception:
247 247 InvalidEngineID
248 248 If targets is not valid or if an engine is not registered.
249 249 """
250 250 if isinstance(targets, int):
251 251 if targets not in self.engines.keys():
252 252 log.msg("Engine with id %i is not registered" % targets)
253 253 raise error.InvalidEngineID("Engine with id %i is not registered" % targets)
254 254 else:
255 255 return [self.engines[targets]]
256 256 elif isinstance(targets, (list, tuple)):
257 257 for id in targets:
258 258 if id not in self.engines.keys():
259 259 log.msg("Engine with id %r is not registered" % id)
260 260 raise error.InvalidEngineID("Engine with id %r is not registered" % id)
261 261 return map(self.engines.get, targets)
262 262 elif targets == 'all':
263 263 eList = self.engines.values()
264 264 if len(eList) == 0:
265 265 msg = """There are no engines registered.
266 266 Check the logs in ~/.ipython/log if you think there should have been."""
267 267 raise error.NoEnginesRegistered(msg)
268 268 else:
269 269 return eList
270 270 else:
271 271 raise error.InvalidEngineID("targets argument is not an int, list of ints or 'all': %r"%targets)
272 272
273 273 def _performOnEngines(self, methodName, *args, **kwargs):
274 274 """Calls a method on engines and returns deferred to list of results.
275 275
276 276 :Parameters:
277 277 methodName : str
278 278 Name of the method to be called.
279 279 targets : int, list of ints, 'all'
280 280 The targets argument to be parsed into a list of engine objects.
281 281 args
282 282 The positional keyword arguments to be passed to the engines.
283 283 kwargs
284 284 The keyword arguments passed to the method
285 285
286 286 :Returns: List of deferreds to the results on each engine
287 287
288 288 :Exception:
289 289 InvalidEngineID
290 290 If the targets argument is bad in any way.
291 291 AttributeError
292 292 If the method doesn't exist on one of the engines.
293 293 """
294 294 targets = kwargs.pop('targets')
295 295 log.msg("Performing %s on %r" % (methodName, targets))
296 296 # log.msg("Performing %s(%r, %r) on %r" % (methodName, args, kwargs, targets))
297 297 # This will and should raise if targets is not valid!
298 298 engines = self.engineList(targets)
299 299 dList = []
300 300 for e in engines:
301 301 meth = getattr(e, methodName, None)
302 302 if meth is not None:
303 303 dList.append(meth(*args, **kwargs))
304 304 else:
305 305 raise AttributeError("Engine %i does not have method %s" % (e.id, methodName))
306 306 return dList
307 307
308 308 def _performOnEnginesAndGatherBoth(self, methodName, *args, **kwargs):
309 309 """Called _performOnEngines and wraps result/exception into deferred."""
310 310 try:
311 311 dList = self._performOnEngines(methodName, *args, **kwargs)
312 312 except (error.InvalidEngineID, AttributeError, KeyError, error.NoEnginesRegistered):
313 313 return defer.fail(failure.Failure())
314 314 else:
315 315 # Having fireOnOneErrback is causing problems with the determinacy
316 316 # of the system. Basically, once a single engine has errbacked, this
317 317 # method returns. In some cases, this will cause client to submit
318 318 # another command. Because the previous command is still running
319 319 # on some engines, this command will be queued. When those commands
320 320 # then errback, the second command will raise QueueCleared. Ahhh!
321 321 d = gatherBoth(dList,
322 322 fireOnOneErrback=0,
323 323 consumeErrors=1,
324 324 logErrors=0)
325 325 d.addCallback(error.collect_exceptions, methodName)
326 326 return d
327 327
328 328 #---------------------------------------------------------------------------
329 329 # General IMultiEngine methods
330 330 #---------------------------------------------------------------------------
331 331
332 332 def get_ids(self):
333 333 return defer.succeed(self.engines.keys())
334 334
335 335 #---------------------------------------------------------------------------
336 336 # IEngineMultiplexer methods
337 337 #---------------------------------------------------------------------------
338 338
339 339 def execute(self, lines, targets='all'):
340 340 return self._performOnEnginesAndGatherBoth('execute', lines, targets=targets)
341 341
342 342 def push(self, ns, targets='all'):
343 343 return self._performOnEnginesAndGatherBoth('push', ns, targets=targets)
344 344
345 345 def pull(self, keys, targets='all'):
346 346 return self._performOnEnginesAndGatherBoth('pull', keys, targets=targets)
347 347
348 348 def push_function(self, ns, targets='all'):
349 349 return self._performOnEnginesAndGatherBoth('push_function', ns, targets=targets)
350 350
351 351 def pull_function(self, keys, targets='all'):
352 352 return self._performOnEnginesAndGatherBoth('pull_function', keys, targets=targets)
353 353
354 354 def get_result(self, i=None, targets='all'):
355 355 return self._performOnEnginesAndGatherBoth('get_result', i, targets=targets)
356 356
357 357 def reset(self, targets='all'):
358 358 return self._performOnEnginesAndGatherBoth('reset', targets=targets)
359 359
360 360 def keys(self, targets='all'):
361 361 return self._performOnEnginesAndGatherBoth('keys', targets=targets)
362 362
363 363 def kill(self, controller=False, targets='all'):
364 364 if controller:
365 365 targets = 'all'
366 366 d = self._performOnEnginesAndGatherBoth('kill', targets=targets)
367 367 if controller:
368 368 log.msg("Killing controller")
369 369 d.addCallback(lambda _: reactor.callLater(2.0, reactor.stop))
370 370 # Consume any weird stuff coming back
371 371 d.addBoth(lambda _: None)
372 372 return d
373 373
374 374 def push_serialized(self, namespace, targets='all'):
375 375 for k, v in namespace.iteritems():
376 376 log.msg("Pushed object %s is %f MB" % (k, v.getDataSize()))
377 377 d = self._performOnEnginesAndGatherBoth('push_serialized', namespace, targets=targets)
378 378 return d
379 379
380 380 def pull_serialized(self, keys, targets='all'):
381 381 try:
382 382 dList = self._performOnEngines('pull_serialized', keys, targets=targets)
383 383 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
384 384 return defer.fail(failure.Failure())
385 385 else:
386 386 for d in dList:
387 387 d.addCallback(self._logSizes)
388 388 d = gatherBoth(dList,
389 389 fireOnOneErrback=0,
390 390 consumeErrors=1,
391 391 logErrors=0)
392 392 d.addCallback(error.collect_exceptions, 'pull_serialized')
393 393 return d
394 394
395 395 def _logSizes(self, listOfSerialized):
396 396 if isinstance(listOfSerialized, (list, tuple)):
397 397 for s in listOfSerialized:
398 398 log.msg("Pulled object is %f MB" % s.getDataSize())
399 399 else:
400 400 log.msg("Pulled object is %f MB" % listOfSerialized.getDataSize())
401 401 return listOfSerialized
402 402
403 403 def clear_queue(self, targets='all'):
404 404 return self._performOnEnginesAndGatherBoth('clear_queue', targets=targets)
405 405
406 406 def queue_status(self, targets='all'):
407 407 log.msg("Getting queue status on %r" % targets)
408 408 try:
409 409 engines = self.engineList(targets)
410 410 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
411 411 return defer.fail(failure.Failure())
412 412 else:
413 413 dList = []
414 414 for e in engines:
415 415 dList.append(e.queue_status().addCallback(lambda s:(e.id, s)))
416 416 d = gatherBoth(dList,
417 417 fireOnOneErrback=0,
418 418 consumeErrors=1,
419 419 logErrors=0)
420 420 d.addCallback(error.collect_exceptions, 'queue_status')
421 421 return d
422 422
423 423 def get_properties(self, keys=None, targets='all'):
424 424 log.msg("Getting properties on %r" % targets)
425 425 try:
426 426 engines = self.engineList(targets)
427 427 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
428 428 return defer.fail(failure.Failure())
429 429 else:
430 430 dList = [e.get_properties(keys) for e in engines]
431 431 d = gatherBoth(dList,
432 432 fireOnOneErrback=0,
433 433 consumeErrors=1,
434 434 logErrors=0)
435 435 d.addCallback(error.collect_exceptions, 'get_properties')
436 436 return d
437 437
438 438 def set_properties(self, properties, targets='all'):
439 439 log.msg("Setting properties on %r" % targets)
440 440 try:
441 441 engines = self.engineList(targets)
442 442 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
443 443 return defer.fail(failure.Failure())
444 444 else:
445 445 dList = [e.set_properties(properties) for e in engines]
446 446 d = gatherBoth(dList,
447 447 fireOnOneErrback=0,
448 448 consumeErrors=1,
449 449 logErrors=0)
450 450 d.addCallback(error.collect_exceptions, 'set_properties')
451 451 return d
452 452
453 453 def has_properties(self, keys, targets='all'):
454 454 log.msg("Checking properties on %r" % targets)
455 455 try:
456 456 engines = self.engineList(targets)
457 457 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
458 458 return defer.fail(failure.Failure())
459 459 else:
460 460 dList = [e.has_properties(keys) for e in engines]
461 461 d = gatherBoth(dList,
462 462 fireOnOneErrback=0,
463 463 consumeErrors=1,
464 464 logErrors=0)
465 465 d.addCallback(error.collect_exceptions, 'has_properties')
466 466 return d
467 467
468 468 def del_properties(self, keys, targets='all'):
469 469 log.msg("Deleting properties on %r" % targets)
470 470 try:
471 471 engines = self.engineList(targets)
472 472 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
473 473 return defer.fail(failure.Failure())
474 474 else:
475 475 dList = [e.del_properties(keys) for e in engines]
476 476 d = gatherBoth(dList,
477 477 fireOnOneErrback=0,
478 478 consumeErrors=1,
479 479 logErrors=0)
480 480 d.addCallback(error.collect_exceptions, 'del_properties')
481 481 return d
482 482
483 483 def clear_properties(self, targets='all'):
484 484 log.msg("Clearing properties on %r" % targets)
485 485 try:
486 486 engines = self.engineList(targets)
487 487 except (error.InvalidEngineID, AttributeError, error.NoEnginesRegistered):
488 488 return defer.fail(failure.Failure())
489 489 else:
490 490 dList = [e.clear_properties() for e in engines]
491 491 d = gatherBoth(dList,
492 492 fireOnOneErrback=0,
493 493 consumeErrors=1,
494 494 logErrors=0)
495 495 d.addCallback(error.collect_exceptions, 'clear_properties')
496 496 return d
497 497
498 498
499 499 components.registerAdapter(MultiEngine,
500 500 IControllerBase,
501 501 IMultiEngine)
502 502
503 503
504 504 #-------------------------------------------------------------------------------
505 505 # Interfaces for the Synchronous MultiEngine
506 506 #-------------------------------------------------------------------------------
507 507
508 508 class ISynchronousEngineMultiplexer(Interface):
509 509 pass
510 510
511 511
512 512 class ISynchronousMultiEngine(ISynchronousEngineMultiplexer):
513 513 """Synchronous, two-phase version of IMultiEngine.
514 514
515 515 Methods in this interface are identical to those of IMultiEngine, but they
516 516 take one additional argument:
517 517
518 518 execute(lines, targets='all') -> execute(lines, targets='all, block=True)
519 519
520 520 :Parameters:
521 521 block : boolean
522 522 Should the method return a deferred to a deferredID or the
523 523 actual result. If block=False a deferred to a deferredID is
524 524 returned and the user must call `get_pending_deferred` at a later
525 525 point. If block=True, a deferred to the actual result comes back.
526 526 """
527 527 def get_pending_deferred(deferredID, block=True):
528 528 """"""
529 529
530 530 def clear_pending_deferreds():
531 531 """"""
532 532
533 533
534 534 #-------------------------------------------------------------------------------
535 535 # Implementation of the Synchronous MultiEngine
536 536 #-------------------------------------------------------------------------------
537 537
538 538 class SynchronousMultiEngine(PendingDeferredManager):
539 539 """Adapt an `IMultiEngine` -> `ISynchronousMultiEngine`
540 540
541 541 Warning, this class uses a decorator that currently uses **kwargs.
542 542 Because of this block must be passed as a kwarg, not positionally.
543 543 """
544 544
545 545 implements(ISynchronousMultiEngine)
546 546
547 547 def __init__(self, multiengine):
548 548 self.multiengine = multiengine
549 549 PendingDeferredManager.__init__(self)
550 550
551 551 #---------------------------------------------------------------------------
552 552 # Decorated pending deferred methods
553 553 #---------------------------------------------------------------------------
554 554
555 555 @two_phase
556 556 def execute(self, lines, targets='all'):
557 557 d = self.multiengine.execute(lines, targets)
558 558 return d
559 559
560 560 @two_phase
561 561 def push(self, namespace, targets='all'):
562 562 return self.multiengine.push(namespace, targets)
563 563
564 564 @two_phase
565 565 def pull(self, keys, targets='all'):
566 566 d = self.multiengine.pull(keys, targets)
567 567 return d
568 568
569 569 @two_phase
570 570 def push_function(self, namespace, targets='all'):
571 571 return self.multiengine.push_function(namespace, targets)
572 572
573 573 @two_phase
574 574 def pull_function(self, keys, targets='all'):
575 575 d = self.multiengine.pull_function(keys, targets)
576 576 return d
577 577
578 578 @two_phase
579 579 def get_result(self, i=None, targets='all'):
580 580 return self.multiengine.get_result(i, targets='all')
581 581
582 582 @two_phase
583 583 def reset(self, targets='all'):
584 584 return self.multiengine.reset(targets)
585 585
586 586 @two_phase
587 587 def keys(self, targets='all'):
588 588 return self.multiengine.keys(targets)
589 589
590 590 @two_phase
591 591 def kill(self, controller=False, targets='all'):
592 592 return self.multiengine.kill(controller, targets)
593 593
594 594 @two_phase
595 595 def push_serialized(self, namespace, targets='all'):
596 596 return self.multiengine.push_serialized(namespace, targets)
597 597
598 598 @two_phase
599 599 def pull_serialized(self, keys, targets='all'):
600 600 return self.multiengine.pull_serialized(keys, targets)
601 601
602 602 @two_phase
603 603 def clear_queue(self, targets='all'):
604 604 return self.multiengine.clear_queue(targets)
605 605
606 606 @two_phase
607 607 def queue_status(self, targets='all'):
608 608 return self.multiengine.queue_status(targets)
609 609
610 610 @two_phase
611 611 def set_properties(self, properties, targets='all'):
612 612 return self.multiengine.set_properties(properties, targets)
613 613
614 614 @two_phase
615 615 def get_properties(self, keys=None, targets='all'):
616 616 return self.multiengine.get_properties(keys, targets)
617 617
618 618 @two_phase
619 619 def has_properties(self, keys, targets='all'):
620 620 return self.multiengine.has_properties(keys, targets)
621 621
622 622 @two_phase
623 623 def del_properties(self, keys, targets='all'):
624 624 return self.multiengine.del_properties(keys, targets)
625 625
626 626 @two_phase
627 627 def clear_properties(self, targets='all'):
628 628 return self.multiengine.clear_properties(targets)
629 629
630 630 #---------------------------------------------------------------------------
631 631 # IMultiEngine methods
632 632 #---------------------------------------------------------------------------
633 633
634 634 def get_ids(self):
635 635 """Return a list of registered engine ids.
636 636
637 637 Never use the two phase block/non-block stuff for this.
638 638 """
639 639 return self.multiengine.get_ids()
640 640
641 641
642 642 components.registerAdapter(SynchronousMultiEngine, IMultiEngine, ISynchronousMultiEngine)
643 643
644 644
645 645 #-------------------------------------------------------------------------------
646 646 # Various high-level interfaces that can be used as MultiEngine mix-ins
647 647 #-------------------------------------------------------------------------------
648 648
649 649 #-------------------------------------------------------------------------------
650 650 # IMultiEngineCoordinator
651 651 #-------------------------------------------------------------------------------
652 652
653 653 class IMultiEngineCoordinator(Interface):
654 654 """Methods that work on multiple engines explicitly."""
655 655
656 656 def scatter(key, seq, dist='b', flatten=False, targets='all'):
657 657 """Partition and distribute a sequence to targets."""
658 658
659 659 def gather(key, dist='b', targets='all'):
660 660 """Gather object key from targets."""
661 661
662 662 def raw_map(func, seqs, dist='b', targets='all'):
663 663 """
664 664 A parallelized version of Python's builtin `map` function.
665 665
666 666 This has a slightly different syntax than the builtin `map`.
667 667 This is needed because we need to have keyword arguments and thus
668 668 can't use *args to capture all the sequences. Instead, they must
669 669 be passed in a list or tuple.
670 670
671 671 The equivalence is:
672 672
673 673 raw_map(func, seqs) -> map(func, seqs[0], seqs[1], ...)
674 674
675 675 Most users will want to use parallel functions or the `mapper`
676 676 and `map` methods for an API that follows that of the builtin
677 677 `map`.
678 678 """
679 679
680 680
681 681 class ISynchronousMultiEngineCoordinator(IMultiEngineCoordinator):
682 682 """Methods that work on multiple engines explicitly."""
683 683
684 684 def scatter(key, seq, dist='b', flatten=False, targets='all', block=True):
685 685 """Partition and distribute a sequence to targets."""
686 686
687 687 def gather(key, dist='b', targets='all', block=True):
688 688 """Gather object key from targets"""
689 689
690 690 def raw_map(func, seqs, dist='b', targets='all', block=True):
691 691 """
692 692 A parallelized version of Python's builtin map.
693 693
694 694 This has a slightly different syntax than the builtin `map`.
695 695 This is needed because we need to have keyword arguments and thus
696 696 can't use *args to capture all the sequences. Instead, they must
697 697 be passed in a list or tuple.
698 698
699 699 raw_map(func, seqs) -> map(func, seqs[0], seqs[1], ...)
700 700
701 701 Most users will want to use parallel functions or the `mapper`
702 702 and `map` methods for an API that follows that of the builtin
703 703 `map`.
704 704 """
705 705
706 706
707 707 #-------------------------------------------------------------------------------
708 708 # IMultiEngineExtras
709 709 #-------------------------------------------------------------------------------
710 710
711 711 class IMultiEngineExtras(Interface):
712 712
713 713 def zip_pull(targets, keys):
714 714 """
715 715 Pull, but return results in a different format from `pull`.
716 716
717 717 This method basically returns zip(pull(targets, *keys)), with a few
718 718 edge cases handled differently. Users of chainsaw will find this format
719 719 familiar.
720 720 """
721 721
722 722 def run(targets, fname):
723 723 """Run a .py file on targets."""
724 724
725 725
726 726 class ISynchronousMultiEngineExtras(IMultiEngineExtras):
727 727 def zip_pull(targets, keys, block=True):
728 728 """
729 729 Pull, but return results in a different format from `pull`.
730 730
731 731 This method basically returns zip(pull(targets, *keys)), with a few
732 732 edge cases handled differently. Users of chainsaw will find this format
733 733 familiar.
734 734 """
735 735
736 736 def run(targets, fname, block=True):
737 737 """Run a .py file on targets."""
738 738
739 739 #-------------------------------------------------------------------------------
740 740 # The full MultiEngine interface
741 741 #-------------------------------------------------------------------------------
742 742
743 743 class IFullMultiEngine(IMultiEngine,
744 744 IMultiEngineCoordinator,
745 745 IMultiEngineExtras):
746 746 pass
747 747
748 748
749 749 class IFullSynchronousMultiEngine(ISynchronousMultiEngine,
750 750 ISynchronousMultiEngineCoordinator,
751 751 ISynchronousMultiEngineExtras):
752 752 pass
753 753
@@ -1,178 +1,178 b''
1 1 # encoding: utf-8
2 2 # -*- test-case-name: IPython.kernel.test.test_pendingdeferred -*-
3 3
4 4 """Classes to manage pending Deferreds.
5 5
6 6 A pending deferred is a deferred that may or may not have fired. This module
7 7 is useful for taking a class whose methods return deferreds and wrapping it to
8 8 provide API that keeps track of those deferreds for later retrieval. See the
9 9 tests for examples of its usage.
10 10 """
11 11
12 12 __docformat__ = "restructuredtext en"
13 13
14 14 #-------------------------------------------------------------------------------
15 15 # Copyright (C) 2008 The IPython Development Team
16 16 #
17 17 # Distributed under the terms of the BSD License. The full license is in
18 18 # the file COPYING, distributed as part of this software.
19 19 #-------------------------------------------------------------------------------
20 20
21 21 #-------------------------------------------------------------------------------
22 22 # Imports
23 23 #-------------------------------------------------------------------------------
24 24
25 25 from twisted.application import service
26 26 from twisted.internet import defer, reactor
27 27 from twisted.python import log, components, failure
28 28 from zope.interface import Interface, implements, Attribute
29 29
30 30 from IPython.kernel.twistedutil import gatherBoth
31 31 from IPython.kernel import error
32 32 from IPython.external import guid
33 from IPython.tools import growl
33 from IPython.utils import growl
34 34
35 35 class PendingDeferredManager(object):
36 36 """A class to track pending deferreds.
37 37
38 38 To track a pending deferred, the user of this class must first
39 39 get a deferredID by calling `get_next_deferred_id`. Then the user
40 40 calls `save_pending_deferred` passing that id and the deferred to
41 41 be tracked. To later retrieve it, the user calls
42 42 `get_pending_deferred` passing the id.
43 43 """
44 44
45 45 def __init__(self):
46 46 """Manage pending deferreds."""
47 47
48 48 self.results = {} # Populated when results are ready
49 49 self.deferred_ids = [] # List of deferred ids I am managing
50 50 self.deferreds_to_callback = {} # dict of lists of deferreds to callback
51 51
52 52 def get_deferred_id(self):
53 53 return guid.generate()
54 54
55 55 def quick_has_id(self, deferred_id):
56 56 return deferred_id in self.deferred_ids
57 57
58 58 def _save_result(self, result, deferred_id):
59 59 if self.quick_has_id(deferred_id):
60 60 self.results[deferred_id] = result
61 61 self._trigger_callbacks(deferred_id)
62 62
63 63 def _trigger_callbacks(self, deferred_id):
64 64 # Go through and call the waiting callbacks
65 65 result = self.results.get(deferred_id)
66 66 if result is not None: # Only trigger if there is a result
67 67 try:
68 68 d = self.deferreds_to_callback.pop(deferred_id)
69 69 except KeyError:
70 70 d = None
71 71 if d is not None:
72 72 if isinstance(result, failure.Failure):
73 73 d.errback(result)
74 74 else:
75 75 d.callback(result)
76 76 self.delete_pending_deferred(deferred_id)
77 77
78 78 def save_pending_deferred(self, d, deferred_id=None):
79 79 """Save the result of a deferred for later retrieval.
80 80
81 81 This works even if the deferred has not fired.
82 82
83 83 Only callbacks and errbacks applied to d before this method
84 84 is called will be called no the final result.
85 85 """
86 86 if deferred_id is None:
87 87 deferred_id = self.get_deferred_id()
88 88 self.deferred_ids.append(deferred_id)
89 89 d.addBoth(self._save_result, deferred_id)
90 90 return deferred_id
91 91
92 92 def _protected_del(self, key, container):
93 93 try:
94 94 del container[key]
95 95 except Exception:
96 96 pass
97 97
98 98 def delete_pending_deferred(self, deferred_id):
99 99 """Remove a deferred I am tracking and add a null Errback.
100 100
101 101 :Parameters:
102 102 deferredID : str
103 103 The id of a deferred that I am tracking.
104 104 """
105 105 if self.quick_has_id(deferred_id):
106 106 # First go through a errback any deferreds that are still waiting
107 107 d = self.deferreds_to_callback.get(deferred_id)
108 108 if d is not None:
109 109 d.errback(failure.Failure(error.AbortedPendingDeferredError("pending deferred has been deleted: %r"%deferred_id)))
110 110 # Now delete all references to this deferred_id
111 111 ind = self.deferred_ids.index(deferred_id)
112 112 self._protected_del(ind, self.deferred_ids)
113 113 self._protected_del(deferred_id, self.deferreds_to_callback)
114 114 self._protected_del(deferred_id, self.results)
115 115 else:
116 116 raise error.InvalidDeferredID('invalid deferred_id: %r' % deferred_id)
117 117
118 118 def clear_pending_deferreds(self):
119 119 """Remove all the deferreds I am tracking."""
120 120 for did in self.deferred_ids:
121 121 self.delete_pending_deferred(did)
122 122
123 123 def _delete_and_pass_through(self, r, deferred_id):
124 124 self.delete_pending_deferred(deferred_id)
125 125 return r
126 126
127 127 def get_pending_deferred(self, deferred_id, block):
128 128 if not self.quick_has_id(deferred_id) or self.deferreds_to_callback.get(deferred_id) is not None:
129 129 return defer.fail(failure.Failure(error.InvalidDeferredID('invalid deferred_id: %r' + deferred_id)))
130 130 result = self.results.get(deferred_id)
131 131 if result is not None:
132 132 self.delete_pending_deferred(deferred_id)
133 133 if isinstance(result, failure.Failure):
134 134 return defer.fail(result)
135 135 else:
136 136 return defer.succeed(result)
137 137 else: # Result is not ready
138 138 if block:
139 139 d = defer.Deferred()
140 140 self.deferreds_to_callback[deferred_id] = d
141 141 return d
142 142 else:
143 143 return defer.fail(failure.Failure(error.ResultNotCompleted("result not completed: %r" % deferred_id)))
144 144
145 145 def two_phase(wrapped_method):
146 146 """Wrap methods that return a deferred into a two phase process.
147 147
148 148 This transforms::
149 149
150 150 foo(arg1, arg2, ...) -> foo(arg1, arg2,...,block=True).
151 151
152 152 The wrapped method will then return a deferred to a deferred id. This will
153 153 only work on method of classes that inherit from `PendingDeferredManager`,
154 154 as that class provides an API for
155 155
156 156 block is a boolean to determine if we should use the two phase process or
157 157 just simply call the wrapped method. At this point block does not have a
158 158 default and it probably won't.
159 159 """
160 160
161 161 def wrapper_two_phase(pdm, *args, **kwargs):
162 162 try:
163 163 block = kwargs.pop('block')
164 164 except KeyError:
165 165 block = True # The default if not specified
166 166 if block:
167 167 return wrapped_method(pdm, *args, **kwargs)
168 168 else:
169 169 d = wrapped_method(pdm, *args, **kwargs)
170 170 deferred_id=pdm.save_pending_deferred(d)
171 171 return defer.succeed(deferred_id)
172 172
173 173 return wrapper_two_phase
174 174
175 175
176 176
177 177
178 178
@@ -1,416 +1,416 b''
1 1 #!/usr/bin/env python
2 2 # encoding: utf-8
3 3
4 4 """The IPython controller."""
5 5
6 6 __docformat__ = "restructuredtext en"
7 7
8 8 #-------------------------------------------------------------------------------
9 9 # Copyright (C) 2008 The IPython Development Team
10 10 #
11 11 # Distributed under the terms of the BSD License. The full license is in
12 12 # the file COPYING, distributed as part of this software.
13 13 #-------------------------------------------------------------------------------
14 14
15 15 #-------------------------------------------------------------------------------
16 16 # Imports
17 17 #-------------------------------------------------------------------------------
18 18
19 19 # Python looks for an empty string at the beginning of sys.path to enable
20 20 # importing from the cwd.
21 21 import sys
22 22 sys.path.insert(0, '')
23 23
24 24 from optparse import OptionParser
25 25 import os
26 26 import time
27 27 import tempfile
28 28
29 29 from twisted.application import internet, service
30 30 from twisted.internet import reactor, error, defer
31 31 from twisted.python import log
32 32
33 33 from IPython.kernel.fcutil import Tub, UnauthenticatedTub, have_crypto
34 34
35 # from IPython.tools import growl
35 # from IPython.utils import growl
36 36 # growl.start("IPython1 Controller")
37 37
38 38 from IPython.kernel.error import SecurityError
39 39 from IPython.kernel import controllerservice
40 40 from IPython.kernel.fcutil import check_furl_file_security
41 41
42 42 # Create various ipython directories if they don't exist.
43 43 # This must be done before IPython.kernel.config is imported.
44 44 from IPython.core.iplib import user_setup
45 45 from IPython.utils.genutils import get_ipython_dir, get_log_dir, get_security_dir
46 46 if os.name == 'posix':
47 47 rc_suffix = ''
48 48 else:
49 49 rc_suffix = '.ini'
50 50 user_setup(get_ipython_dir(), rc_suffix, mode='install', interactive=False)
51 51 get_log_dir()
52 52 get_security_dir()
53 53
54 54 from IPython.kernel.config import config_manager as kernel_config_manager
55 55 from IPython.config.cutils import import_item
56 56
57 57
58 58 #-------------------------------------------------------------------------------
59 59 # Code
60 60 #-------------------------------------------------------------------------------
61 61
62 62 def get_temp_furlfile(filename):
63 63 return tempfile.mktemp(dir=os.path.dirname(filename),
64 64 prefix=os.path.basename(filename))
65 65
66 66 def make_tub(ip, port, secure, cert_file):
67 67 """
68 68 Create a listening tub given an ip, port, and cert_file location.
69 69
70 70 :Parameters:
71 71 ip : str
72 72 The ip address that the tub should listen on. Empty means all
73 73 port : int
74 74 The port that the tub should listen on. A value of 0 means
75 75 pick a random port
76 76 secure: boolean
77 77 Will the connection be secure (in the foolscap sense)
78 78 cert_file:
79 79 A filename of a file to be used for theSSL certificate
80 80 """
81 81 if secure:
82 82 if have_crypto:
83 83 tub = Tub(certFile=cert_file)
84 84 else:
85 85 raise SecurityError("""
86 86 OpenSSL/pyOpenSSL is not available, so we can't run in secure mode.
87 87 Try running without security using 'ipcontroller -xy'.
88 88 """)
89 89 else:
90 90 tub = UnauthenticatedTub()
91 91
92 92 # Set the strport based on the ip and port and start listening
93 93 if ip == '':
94 94 strport = "tcp:%i" % port
95 95 else:
96 96 strport = "tcp:%i:interface=%s" % (port, ip)
97 97 listener = tub.listenOn(strport)
98 98
99 99 return tub, listener
100 100
101 101 def make_client_service(controller_service, config):
102 102 """
103 103 Create a service that will listen for clients.
104 104
105 105 This service is simply a `foolscap.Tub` instance that has a set of Referenceables
106 106 registered with it.
107 107 """
108 108
109 109 # Now create the foolscap tub
110 110 ip = config['controller']['client_tub']['ip']
111 111 port = config['controller']['client_tub'].as_int('port')
112 112 location = config['controller']['client_tub']['location']
113 113 secure = config['controller']['client_tub']['secure']
114 114 cert_file = config['controller']['client_tub']['cert_file']
115 115 client_tub, client_listener = make_tub(ip, port, secure, cert_file)
116 116
117 117 # Set the location in the trivial case of localhost
118 118 if ip == 'localhost' or ip == '127.0.0.1':
119 119 location = "127.0.0.1"
120 120
121 121 if not secure:
122 122 log.msg("WARNING: you are running the controller with no client security")
123 123
124 124 def set_location_and_register():
125 125 """Set the location for the tub and return a deferred."""
126 126
127 127 def register(empty, ref, furl_file):
128 128 # We create and then move to make sure that when the file
129 129 # appears to other processes, the buffer has the flushed
130 130 # and the file has been closed
131 131 temp_furl_file = get_temp_furlfile(furl_file)
132 132 client_tub.registerReference(ref, furlFile=temp_furl_file)
133 133 os.rename(temp_furl_file, furl_file)
134 134
135 135 if location == '':
136 136 d = client_tub.setLocationAutomatically()
137 137 else:
138 138 d = defer.maybeDeferred(client_tub.setLocation, "%s:%i" % (location, client_listener.getPortnum()))
139 139
140 140 for ciname, ci in config['controller']['controller_interfaces'].iteritems():
141 141 log.msg("Adapting Controller to interface: %s" % ciname)
142 142 furl_file = ci['furl_file']
143 143 log.msg("Saving furl for interface [%s] to file: %s" % (ciname, furl_file))
144 144 check_furl_file_security(furl_file, secure)
145 145 adapted_controller = import_item(ci['controller_interface'])(controller_service)
146 146 d.addCallback(register, import_item(ci['fc_interface'])(adapted_controller),
147 147 furl_file=ci['furl_file'])
148 148
149 149 reactor.callWhenRunning(set_location_and_register)
150 150 return client_tub
151 151
152 152
153 153 def make_engine_service(controller_service, config):
154 154 """
155 155 Create a service that will listen for engines.
156 156
157 157 This service is simply a `foolscap.Tub` instance that has a set of Referenceables
158 158 registered with it.
159 159 """
160 160
161 161 # Now create the foolscap tub
162 162 ip = config['controller']['engine_tub']['ip']
163 163 port = config['controller']['engine_tub'].as_int('port')
164 164 location = config['controller']['engine_tub']['location']
165 165 secure = config['controller']['engine_tub']['secure']
166 166 cert_file = config['controller']['engine_tub']['cert_file']
167 167 engine_tub, engine_listener = make_tub(ip, port, secure, cert_file)
168 168
169 169 # Set the location in the trivial case of localhost
170 170 if ip == 'localhost' or ip == '127.0.0.1':
171 171 location = "127.0.0.1"
172 172
173 173 if not secure:
174 174 log.msg("WARNING: you are running the controller with no engine security")
175 175
176 176 def set_location_and_register():
177 177 """Set the location for the tub and return a deferred."""
178 178
179 179 def register(empty, ref, furl_file):
180 180 # We create and then move to make sure that when the file
181 181 # appears to other processes, the buffer has the flushed
182 182 # and the file has been closed
183 183 temp_furl_file = get_temp_furlfile(furl_file)
184 184 engine_tub.registerReference(ref, furlFile=temp_furl_file)
185 185 os.rename(temp_furl_file, furl_file)
186 186
187 187 if location == '':
188 188 d = engine_tub.setLocationAutomatically()
189 189 else:
190 190 d = defer.maybeDeferred(engine_tub.setLocation, "%s:%i" % (location, engine_listener.getPortnum()))
191 191
192 192 furl_file = config['controller']['engine_furl_file']
193 193 engine_fc_interface = import_item(config['controller']['engine_fc_interface'])
194 194 log.msg("Saving furl for the engine to file: %s" % furl_file)
195 195 check_furl_file_security(furl_file, secure)
196 196 fc_controller = engine_fc_interface(controller_service)
197 197 d.addCallback(register, fc_controller, furl_file=furl_file)
198 198
199 199 reactor.callWhenRunning(set_location_and_register)
200 200 return engine_tub
201 201
202 202 def start_controller():
203 203 """
204 204 Start the controller by creating the service hierarchy and starting the reactor.
205 205
206 206 This method does the following:
207 207
208 208 * It starts the controller logging
209 209 * In execute an import statement for the controller
210 210 * It creates 2 `foolscap.Tub` instances for the client and the engines
211 211 and registers `foolscap.Referenceables` with the tubs to expose the
212 212 controller to engines and clients.
213 213 """
214 214 config = kernel_config_manager.get_config_obj()
215 215
216 216 # Start logging
217 217 logfile = config['controller']['logfile']
218 218 if logfile:
219 219 logfile = logfile + str(os.getpid()) + '.log'
220 220 try:
221 221 openLogFile = open(logfile, 'w')
222 222 except:
223 223 openLogFile = sys.stdout
224 224 else:
225 225 openLogFile = sys.stdout
226 226 log.startLogging(openLogFile)
227 227
228 228 # Execute any user defined import statements
229 229 cis = config['controller']['import_statement']
230 230 if cis:
231 231 try:
232 232 exec cis in globals(), locals()
233 233 except:
234 234 log.msg("Error running import_statement: %s" % cis)
235 235
236 236 # Delete old furl files unless the reuse_furls is set
237 237 reuse = config['controller']['reuse_furls']
238 238 if not reuse:
239 239 paths = (config['controller']['engine_furl_file'],
240 240 config['controller']['controller_interfaces']['task']['furl_file'],
241 241 config['controller']['controller_interfaces']['multiengine']['furl_file']
242 242 )
243 243 for p in paths:
244 244 if os.path.isfile(p):
245 245 os.remove(p)
246 246
247 247 # Create the service hierarchy
248 248 main_service = service.MultiService()
249 249 # The controller service
250 250 controller_service = controllerservice.ControllerService()
251 251 controller_service.setServiceParent(main_service)
252 252 # The client tub and all its refereceables
253 253 client_service = make_client_service(controller_service, config)
254 254 client_service.setServiceParent(main_service)
255 255 # The engine tub
256 256 engine_service = make_engine_service(controller_service, config)
257 257 engine_service.setServiceParent(main_service)
258 258 # Start the controller service and set things running
259 259 main_service.startService()
260 260 reactor.run()
261 261
262 262 def init_config():
263 263 """
264 264 Initialize the configuration using default and command line options.
265 265 """
266 266
267 267 parser = OptionParser("""ipcontroller [options]
268 268
269 269 Start an IPython controller.
270 270
271 271 Use the IPYTHONDIR environment variable to change your IPython directory
272 272 from the default of .ipython or _ipython. The log and security
273 273 subdirectories of your IPython directory will be used by this script
274 274 for log files and security files.""")
275 275
276 276 # Client related options
277 277 parser.add_option(
278 278 "--client-ip",
279 279 type="string",
280 280 dest="client_ip",
281 281 help="the IP address or hostname the controller will listen on for client connections"
282 282 )
283 283 parser.add_option(
284 284 "--client-port",
285 285 type="int",
286 286 dest="client_port",
287 287 help="the port the controller will listen on for client connections"
288 288 )
289 289 parser.add_option(
290 290 '--client-location',
291 291 type="string",
292 292 dest="client_location",
293 293 help="hostname or ip for clients to connect to"
294 294 )
295 295 parser.add_option(
296 296 "-x",
297 297 action="store_false",
298 298 dest="client_secure",
299 299 help="turn off all client security"
300 300 )
301 301 parser.add_option(
302 302 '--client-cert-file',
303 303 type="string",
304 304 dest="client_cert_file",
305 305 help="file to store the client SSL certificate"
306 306 )
307 307 parser.add_option(
308 308 '--task-furl-file',
309 309 type="string",
310 310 dest="task_furl_file",
311 311 help="file to store the FURL for task clients to connect with"
312 312 )
313 313 parser.add_option(
314 314 '--multiengine-furl-file',
315 315 type="string",
316 316 dest="multiengine_furl_file",
317 317 help="file to store the FURL for multiengine clients to connect with"
318 318 )
319 319 # Engine related options
320 320 parser.add_option(
321 321 "--engine-ip",
322 322 type="string",
323 323 dest="engine_ip",
324 324 help="the IP address or hostname the controller will listen on for engine connections"
325 325 )
326 326 parser.add_option(
327 327 "--engine-port",
328 328 type="int",
329 329 dest="engine_port",
330 330 help="the port the controller will listen on for engine connections"
331 331 )
332 332 parser.add_option(
333 333 '--engine-location',
334 334 type="string",
335 335 dest="engine_location",
336 336 help="hostname or ip for engines to connect to"
337 337 )
338 338 parser.add_option(
339 339 "-y",
340 340 action="store_false",
341 341 dest="engine_secure",
342 342 help="turn off all engine security"
343 343 )
344 344 parser.add_option(
345 345 '--engine-cert-file',
346 346 type="string",
347 347 dest="engine_cert_file",
348 348 help="file to store the engine SSL certificate"
349 349 )
350 350 parser.add_option(
351 351 '--engine-furl-file',
352 352 type="string",
353 353 dest="engine_furl_file",
354 354 help="file to store the FURL for engines to connect with"
355 355 )
356 356 parser.add_option(
357 357 "-l", "--logfile",
358 358 type="string",
359 359 dest="logfile",
360 360 help="log file name (default is stdout)"
361 361 )
362 362 parser.add_option(
363 363 "-r",
364 364 action="store_true",
365 365 dest="reuse_furls",
366 366 help="try to reuse all furl files"
367 367 )
368 368
369 369 (options, args) = parser.parse_args()
370 370
371 371 config = kernel_config_manager.get_config_obj()
372 372
373 373 # Update with command line options
374 374 if options.client_ip is not None:
375 375 config['controller']['client_tub']['ip'] = options.client_ip
376 376 if options.client_port is not None:
377 377 config['controller']['client_tub']['port'] = options.client_port
378 378 if options.client_location is not None:
379 379 config['controller']['client_tub']['location'] = options.client_location
380 380 if options.client_secure is not None:
381 381 config['controller']['client_tub']['secure'] = options.client_secure
382 382 if options.client_cert_file is not None:
383 383 config['controller']['client_tub']['cert_file'] = options.client_cert_file
384 384 if options.task_furl_file is not None:
385 385 config['controller']['controller_interfaces']['task']['furl_file'] = options.task_furl_file
386 386 if options.multiengine_furl_file is not None:
387 387 config['controller']['controller_interfaces']['multiengine']['furl_file'] = options.multiengine_furl_file
388 388 if options.engine_ip is not None:
389 389 config['controller']['engine_tub']['ip'] = options.engine_ip
390 390 if options.engine_port is not None:
391 391 config['controller']['engine_tub']['port'] = options.engine_port
392 392 if options.engine_location is not None:
393 393 config['controller']['engine_tub']['location'] = options.engine_location
394 394 if options.engine_secure is not None:
395 395 config['controller']['engine_tub']['secure'] = options.engine_secure
396 396 if options.engine_cert_file is not None:
397 397 config['controller']['engine_tub']['cert_file'] = options.engine_cert_file
398 398 if options.engine_furl_file is not None:
399 399 config['controller']['engine_furl_file'] = options.engine_furl_file
400 400 if options.reuse_furls is not None:
401 401 config['controller']['reuse_furls'] = options.reuse_furls
402 402
403 403 if options.logfile is not None:
404 404 config['controller']['logfile'] = options.logfile
405 405
406 406 kernel_config_manager.update_config_obj(config)
407 407
408 408 def main():
409 409 """
410 410 After creating the configuration information, start the controller.
411 411 """
412 412 init_config()
413 413 start_controller()
414 414
415 415 if __name__ == "__main__":
416 416 main()
1 NO CONTENT: file renamed from scripts/iptest to IPython/scripts/iptest
@@ -1,28 +1,28 b''
1 1 #!/usr/bin/env python
2 2 # -*- coding: utf-8 -*-
3 3 """IPython -- An enhanced Interactive Python
4 4
5 5 This is just the startup wrapper script, kept deliberately to a minimum.
6 6
7 7 The shell's mainloop() takes an optional argument, sys_exit (default=0). If
8 8 set to 1, it calls sys.exit() at exit time. You can use the following code in
9 9 your PYTHONSTARTUP file:
10 10
11 11 import IPython
12 12 IPython.Shell.IPShell().mainloop(sys_exit=1)
13 13
14 14 [or simply IPython.Shell.IPShell().mainloop(1) ]
15 15
16 16 and IPython will be your working environment when you start python. The final
17 17 sys.exit() call will make python exit transparently when IPython finishes, so
18 18 you don't have an extra prompt to get out of.
19 19
20 20 This is probably useful to developers who manage multiple Python versions and
21 21 don't want to have correspondingly multiple IPython versions. Note that in
22 22 this mode, there is no way to pass IPython any command-line options, as those
23 23 are trapped first by Python itself.
24 24 """
25 25
26 import IPython.Shell
26 import IPython.core.shell
27 27
28 IPython.Shell.start().mainloop()
28 IPython.core.shell.start().mainloop()
1 NO CONTENT: file renamed from scripts/ipython-wx to IPython/scripts/ipython-wx
1 NO CONTENT: file renamed from scripts/ipython_win_post_install.py to IPython/scripts/ipython_win_post_install.py
1 NO CONTENT: file renamed from scripts/ipythonx to IPython/scripts/ipythonx
@@ -1,9 +1,9 b''
1 1 #!/usr/bin/env python
2 2
3 3 """Thin wrapper around the IPython irunner module.
4 4
5 5 Run with --help for details, or see the irunner source."""
6 6
7 from IPython import irunner
7 from IPython.lib import irunner
8 8
9 9 irunner.main()
@@ -1,6 +1,6 b''
1 1 #!/usr/bin/env python
2 2 # -*- coding: utf-8 -*-
3 3 """Simple wrapper around PyColorize, which colorizes python sources."""
4 4
5 import IPython.PyColorize
6 IPython.PyColorize.main()
5 import IPython.utils.PyColorize
6 IPython.utils.PyColorize.main()
@@ -1,32 +1,36 b''
1 1 include ipython.py
2 2 include setupbase.py
3 3 include setupegg.py
4 4
5 graft scripts
6
7 5 graft setupext
8 6
9 graft IPython/UserConfig
10
11 7 graft IPython/kernel
12 8 graft IPython/config
9 graft IPython/core
10 graft IPython/deathrow
11 graft IPython/external
12 graft IPython/frontend
13 graft IPython/gui
14 graft IPython/lib
15 graft IPython/quarantine
16 graft IPython/scripts
13 17 graft IPython/testing
14 graft IPython/tools
18 graft IPython/utils
15 19
16 20 recursive-include IPython/Extensions igrid_help*
17 21
18 22 graft docs
19 23 exclude docs/\#*
20 24 exclude docs/man/*.1
21 25
22 26 # docs subdirs we want to skip
23 27 prune docs/attic
24 28 prune docs/build
25 29
26 30 global-exclude *~
27 31 global-exclude *.flc
28 32 global-exclude *.pyc
29 33 global-exclude .dircopy.log
30 34 global-exclude .svn
31 35 global-exclude .bzr
32 36 global-exclude .hgignore
@@ -1,191 +1,191 b''
1 1 # -*- coding: utf-8 -*-
2 2 #
3 3 # IPython documentation build configuration file.
4 4
5 5 # NOTE: This file has been edited manually from the auto-generated one from
6 6 # sphinx. Do NOT delete and re-generate. If any changes from sphinx are
7 7 # needed, generate a scratch one and merge by hand any new fields needed.
8 8
9 9 #
10 10 # This file is execfile()d with the current directory set to its containing dir.
11 11 #
12 12 # The contents of this file are pickled, so don't put values in the namespace
13 13 # that aren't pickleable (module imports are okay, they're removed automatically).
14 14 #
15 15 # All configuration values have a default value; values that are commented out
16 16 # serve to show the default value.
17 17
18 18 import sys, os
19 19
20 20 # If your extensions are in another directory, add it here. If the directory
21 21 # is relative to the documentation root, use os.path.abspath to make it
22 22 # absolute, like shown here.
23 23 sys.path.append(os.path.abspath('../sphinxext'))
24 24
25 25 # Import support for ipython console session syntax highlighting (lives
26 26 # in the sphinxext directory defined above)
27 27 import ipython_console_highlighting
28 28
29 29 # We load the ipython release info into a dict by explicit execution
30 30 iprelease = {}
31 execfile('../../IPython/Release.py',iprelease)
31 execfile('../../IPython/core/release.py',iprelease)
32 32
33 33 # General configuration
34 34 # ---------------------
35 35
36 36 # Add any Sphinx extension module names here, as strings. They can be extensions
37 37 # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
38 38 extensions = ['sphinx.ext.autodoc',
39 39 'sphinx.ext.doctest',
40 40
41 41 'only_directives',
42 42 'inheritance_diagram',
43 43 'ipython_console_highlighting',
44 44 # 'plot_directive', # disabled for now, needs matplotlib
45 45 'numpydoc', # to preprocess docstrings
46 46 ]
47 47
48 48 # Add any paths that contain templates here, relative to this directory.
49 49 templates_path = ['_templates']
50 50
51 51 # The suffix of source filenames.
52 52 source_suffix = '.txt'
53 53
54 54 # The master toctree document.
55 55 master_doc = 'index'
56 56
57 57 # General substitutions.
58 58 project = 'IPython'
59 59 copyright = '2008, The IPython Development Team'
60 60
61 61 # The default replacements for |version| and |release|, also used in various
62 62 # other places throughout the built documents.
63 63 #
64 64 # The full version, including alpha/beta/rc tags.
65 65 release = iprelease['version']
66 66 # The short X.Y version.
67 67 version = '.'.join(release.split('.',2)[:2])
68 68
69 69
70 70 # There are two options for replacing |today|: either, you set today to some
71 71 # non-false value, then it is used:
72 72 #today = ''
73 73 # Else, today_fmt is used as the format for a strftime call.
74 74 today_fmt = '%B %d, %Y'
75 75
76 76 # List of documents that shouldn't be included in the build.
77 77 #unused_docs = []
78 78
79 79 # List of directories, relative to source directories, that shouldn't be searched
80 80 # for source files.
81 81 exclude_dirs = ['attic']
82 82
83 83 # If true, '()' will be appended to :func: etc. cross-reference text.
84 84 #add_function_parentheses = True
85 85
86 86 # If true, the current module name will be prepended to all description
87 87 # unit titles (such as .. function::).
88 88 #add_module_names = True
89 89
90 90 # If true, sectionauthor and moduleauthor directives will be shown in the
91 91 # output. They are ignored by default.
92 92 #show_authors = False
93 93
94 94 # The name of the Pygments (syntax highlighting) style to use.
95 95 pygments_style = 'sphinx'
96 96
97 97
98 98 # Options for HTML output
99 99 # -----------------------
100 100
101 101 # The style sheet to use for HTML and HTML Help pages. A file of that name
102 102 # must exist either in Sphinx' static/ path, or in one of the custom paths
103 103 # given in html_static_path.
104 104 html_style = 'default.css'
105 105
106 106 # The name for this set of Sphinx documents. If None, it defaults to
107 107 # "<project> v<release> documentation".
108 108 #html_title = None
109 109
110 110 # The name of an image file (within the static path) to place at the top of
111 111 # the sidebar.
112 112 #html_logo = None
113 113
114 114 # Add any paths that contain custom static files (such as style sheets) here,
115 115 # relative to this directory. They are copied after the builtin static files,
116 116 # so a file named "default.css" will overwrite the builtin "default.css".
117 117 html_static_path = ['_static']
118 118
119 119 # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
120 120 # using the given strftime format.
121 121 html_last_updated_fmt = '%b %d, %Y'
122 122
123 123 # If true, SmartyPants will be used to convert quotes and dashes to
124 124 # typographically correct entities.
125 125 #html_use_smartypants = True
126 126
127 127 # Custom sidebar templates, maps document names to template names.
128 128 #html_sidebars = {}
129 129
130 130 # Additional templates that should be rendered to pages, maps page names to
131 131 # template names.
132 132 #html_additional_pages = {}
133 133
134 134 # If false, no module index is generated.
135 135 #html_use_modindex = True
136 136
137 137 # If true, the reST sources are included in the HTML build as _sources/<name>.
138 138 #html_copy_source = True
139 139
140 140 # If true, an OpenSearch description file will be output, and all pages will
141 141 # contain a <link> tag referring to it. The value of this option must be the
142 142 # base URL from which the finished HTML is served.
143 143 #html_use_opensearch = ''
144 144
145 145 # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
146 146 #html_file_suffix = ''
147 147
148 148 # Output file base name for HTML help builder.
149 149 htmlhelp_basename = 'ipythondoc'
150 150
151 151
152 152 # Options for LaTeX output
153 153 # ------------------------
154 154
155 155 # The paper size ('letter' or 'a4').
156 156 latex_paper_size = 'letter'
157 157
158 158 # The font size ('10pt', '11pt' or '12pt').
159 159 latex_font_size = '11pt'
160 160
161 161 # Grouping the document tree into LaTeX files. List of tuples
162 162 # (source start file, target name, title, author, document class [howto/manual]).
163 163
164 164 latex_documents = [ ('index', 'ipython.tex', 'IPython Documentation',
165 165 ur"""The IPython Development Team""",
166 166 'manual'),
167 167 ]
168 168
169 169 # The name of an image file (relative to this directory) to place at the top of
170 170 # the title page.
171 171 #latex_logo = None
172 172
173 173 # For "manual" documents, if this is true, then toplevel headings are parts,
174 174 # not chapters.
175 175 #latex_use_parts = False
176 176
177 177 # Additional stuff for the LaTeX preamble.
178 178 #latex_preamble = ''
179 179
180 180 # Documents to append as an appendix to all manuals.
181 181 #latex_appendices = []
182 182
183 183 # If false, no module index is generated.
184 184 #latex_use_modindex = True
185 185
186 186
187 187 # Cleanup
188 188 # -------
189 189 # delete release info to avoid pickling errors from sphinx
190 190
191 191 del iprelease
@@ -1,261 +1,266 b''
1 1 =============================
2 2 IPython module reorganization
3 3 =============================
4 4
5 5 Currently, IPython has many top-level modules that serve many different purposes.
6 6 The lack of organization make it very difficult for developers to work on IPython
7 7 and understand its design. This document contains notes about how we will reorganize
8 8 the modules into sub-packages.
9 9
10 10 .. warning::
11 11
12 12 This effort will possibly break third party packages that use IPython as
13 13 a library or hack on the IPython internals.
14 14
15 15 .. warning::
16 16
17 17 This effort will result in the removal from IPython of certain modules
18 18 that are not used anymore, don't currently work, are unmaintained, etc.
19 19
20 20
21 21 Current subpackges
22 22 ==================
23 23
24 24 IPython currently has the following sub-packages:
25 25
26 26 * :mod:`IPython.config`
27 27
28 28 * :mod:`IPython.Extensions`
29 29
30 30 * :mod:`IPython.external`
31 31
32 32 * :mod:`IPython.frontend`
33 33
34 34 * :mod:`IPython.gui`
35 35
36 36 * :mod:`IPython.kernel`
37 37
38 38 * :mod:`IPython.testing`
39 39
40 40 * :mod:`IPython.tests`
41 41
42 42 * :mod:`IPython.tools`
43 43
44 44 * :mod:`IPython.UserConfig`
45 45
46 46 New Subpackages to be created
47 47 =============================
48 48
49 49 We propose to create the following new sub-packages:
50 50
51 51 * :mod:`IPython.core`. This sub-package will contain the core of the IPython
52 52 interpreter, but none of its extended capabilities.
53 53
54 54 * :mod:`IPython.lib`. IPython has many extended capabilities that are not part
55 55 of the IPython core. These things will go here. Any better names than
56 56 :mod:`IPython.lib`?
57 57
58 58 * :mod:`IPython.utils`. This sub-package will contain anything that might
59 59 eventually be found in the Python standard library, like things in
60 60 :mod:`genutils`. Each sub-module in this sub-package should contain
61 61 functions and classes that serve a single purpose.
62 62
63 63 * :mod:`IPython.deathrow`. This is for code that is untested and/or rotting
64 64 and needs to be removed from IPython. Eventually all this code will either
65 65 i) be revived by someone willing to maintain it with tests and docs and
66 66 re-included into IPython or 2) be removed from IPython proper, but put into
67 67 a separate top-level (not IPython) package that we keep around. No new code
68 68 will be allowed here.
69 69
70 70 * :mod:`IPython.quarantine`. This is for code that doesn't meet IPython's
71 71 standards, but that we plan on keeping. To be moved out of this sub-package
72 72 a module needs to have a maintainer, tests and documentation.
73 73
74 74 Prodecure
75 75 =========
76 76
77 77 1. Move the file to its new location with its new name.
78 78 2. Rename all import statements to reflect the change.
79 79 3. Run PyFlakes on each changes module.
80 80 3. Add tests/test_imports.py to test it.
81 81
82 82 Need to modify iptests to properly skip modules that are no longer top
83 83 level modules.
84 84
85 85 Need to update the top level IPython/__init__.py file.
86 86
87 Need to get installation working correctly.
88
89 When running python setup.py sdist, the Sphinx API docs fail to build because
90 of something going on with IPython.core.fakemodule
91
87 92 Where things will be moved
88 93 ==========================
89 94
90 95 Top-level modules:
91 96
92 97 * :file:`background_jobs.py`. Move to :file:`IPython/lib/backgroundjobs.py`.
93 98
94 99 * :file:`ColorANSI.py`. Move to :file:`IPython/utils/coloransi.py`.
95 100
96 101 * :file:`completer.py`. Move to :file:`IPython/core/completer.py`.
97 102
98 103 * :file:`ConfigLoader.py`. Move to :file:`IPython/config/configloader.py`.
99 104
100 105 * :file:`CrashHandler.py`. Move to :file:`IPython/core/crashhandler`.
101 106
102 107 * :file:`Debugger.py`. Move to :file:`IPython/core/debugger.py`.
103 108
104 109 * :file:`deep_reload.py`. Move to :file:`IPython/lib/deepreload.py`.
105 110
106 111 * :file:`demo.py`. Move to :file:`IPython/lib/demo.py`.
107 112
108 113 * :file:`DPyGetOpt.py`. Move to :mod:`IPython.utils` and replace with newer options parser.
109 114
110 115 * :file:`dtutils.py`. Move to :file:`IPython.deathrow`.
111 116
112 117 * :file:`excolors.py`. Move to :file:`IPython.core` or :file:`IPython.config`.
113 118 Maybe move to :mod:`IPython.lib` or :mod:`IPython.python`?
114 119
115 120 * :file:`FakeModule.py`. Move to :file:`IPython/core/fakemodule.py`.
116 121
117 122 * :file:`generics.py`. Move to :file:`IPython.python`.
118 123
119 124 * :file:`genutils.py`. Move to :file:`IPython.utils`.
120 125
121 126 * :file:`Gnuplot2.py`. Move to :file:`IPython.sandbox`.
122 127
123 128 * :file:`GnuplotInteractive.py`. Move to :file:`IPython.sandbox`.
124 129
125 130 * :file:`GnuplotRuntime.py`. Move to :file:`IPython.sandbox`.
126 131
127 132 * :file:`numutils.py`. Move to :file:`IPython.sandbox`.
128 133
129 134 * :file:`twshell.py`. Move to :file:`IPython.sandbox`.
130 135
131 136 * :file:`Extensions`. This needs to be gone through separately. Minimally,
132 137 the package should be renamed to :file:`extensions`.
133 138
134 139 * :file:`history.py`. Move to :file:`IPython.core`.
135 140
136 141 * :file:`hooks.py`. Move to :file:`IPython.core`.
137 142
138 143 * :file:`ipapi.py`. Move to :file:`IPython.core`.
139 144
140 145 * :file:`iplib.py`. Move to :file:`IPython.core`.
141 146
142 147 * :file:`ipmaker.py`: Move to :file:`IPython.core`.
143 148
144 149 * :file:`ipstruct.py`. Move to :file:`IPython.python`.
145 150
146 151 * :file:`irunner.py`. Move to :file:`IPython.scripts`. ???
147 152
148 153 * :file:`Itpl.py`. Move to :file:`deathrow/Itpl.py`. Copy already in
149 154 :file:`IPython.external`.
150 155
151 156 * :file:`Logger.py`. Move to :file:`IPython/core/logger.py`.
152 157
153 158 * :file:`macro.py`. Move to :file:`IPython.core`.
154 159
155 160 * :file:`Magic.py`. Move to :file:`IPython/core/magic.py`.
156 161
157 162 * :file:`OInspect.py`. Move to :file:`IPython/core/oinspect.py`.
158 163
159 164 * :file:`OutputTrap.py`. Move to :file:`IPython/core/outputtrap.py`.
160 165
161 166 * :file:`platutils.py`. Move to :file:`IPython.python`.
162 167
163 168 * :file:`platutils_dummy.py`. Move to :file:`IPython.python`.
164 169
165 170 * :file:`platutils_posix.py`. Move to :file:`IPython.python`.
166 171
167 172 * :file:`platutils_win32.py`. Move to :file:`IPython.python`.
168 173
169 174 * :file:`prefilter.py`: Move to :file:`IPython.core`.
170 175
171 176 * :file:`Prompts.py`. Move to :file:`IPython/core/prompts.py` or
172 177 :file:`IPython/frontend/prompts.py`.
173 178
174 179 * :file:`PyColorize.py`. Replace with pygments? If not, move to
175 180 :file:`IPython/core/pycolorize.py`. Maybe move to :mod:`IPython.lib` or
176 181 :mod:`IPython.python`?
177 182
178 183 * :file:`Release.py`. Move to ??? or remove?
179 184
180 185 * :file:`rlineimpl.py`. Move to :file:`IPython.core`.
181 186
182 187 * :file:`shadowns.py`. Move to :file:`IPython.core`.
183 188
184 189 * :file:`Shell.py`. Move to :file:`IPython.core.shell.py` or
185 190 :file:`IPython/frontend/shell.py`.
186 191
187 192 * :file:`shellglobals.py`. Move to :file:`IPython.core`.
188 193
189 194 * :file:`strdispatch.py`. Move to :file:`IPython.python`.
190 195
191 196 * :file:`twshell.py`. Move to :file:`IPython.sandbox`.
192 197
193 198 * :file:`ultraTB.py`. Move to :file:`IPython/core/ultratb.py`.
194 199
195 200 * :file:`upgrade_dir.py`. Move to :file:`IPython/utils/upgradedir.py`.
196 201
197 202 * :file:`usage.py`. Move to :file:`IPython.core`.
198 203
199 204 * :file:`wildcard.py`. Move to :file:`IPython.utils`.
200 205
201 206 * :file:`winconsole.py`. Move to :file:`IPython.utils`.
202 207
203 208 Top-level sub-packages:
204 209
205 210 * :file:`testing`. Good where it is.
206 211
207 212 * :file:`tests`. Remove.
208 213
209 214 * :file:`tools`. Things in here need to be looked at and moved elsewhere like
210 215 :file:`IPython.utils`.
211 216
212 217 * :file:`UserConfig`. Move to :file:`IPython.config.userconfig`.
213 218
214 219 * :file:`config`. Good where it is!
215 220
216 221 * :file:`external`. Good where it is!
217 222
218 223 * :file:`frontend`. Good where it is!
219 224
220 225 * :file:`gui`. Eventually this should be moved to a subdir of
221 226 :file:`IPython.frontend`.
222 227
223 228 * :file:`kernel`. Good where it is.
224 229
225 230
226 231
227 232
228 233
229 234
230 235
231 236
232 237
233 238
234 239
235 240
236 241
237 242
238 243
239 244
240 245
241 246 Other things
242 247 ============
243 248
244 249 When these files are moved around, a number of other things will happen at the same time:
245 250
246 251 1. Test files will be created for each module in IPython. Minimally, all
247 252 modules will be imported as a part of the test. This will serve as a
248 253 test of the module reorganization. These tests will be put into new
249 254 :file:`tests` subdirectories that each package will have.
250 255
251 256 2. PyFlakes and other code checkers will be run to look for problems.
252 257
253 258 3. Modules will be renamed to comply with PEP 8 naming conventions: all
254 259 lowercase and no special characters like ``-`` or ``_``.
255 260
256 261 4. Existing tests will be moved to the appropriate :file:`tests`
257 262 subdirectories.
258 263
259 264
260 265
261 266
@@ -1,11 +1,11 b''
1 1 #!/usr/bin/env python
2 2 # -*- coding: utf-8 -*-
3 3 """IPython -- An enhanced Interactive Python
4 4
5 5 The actual ipython script to be installed with 'python setup.py install' is
6 6 in './scripts' directory. This file is here (ipython source root directory)
7 7 to facilitate non-root 'zero-installation' (just copy the source tree
8 8 somewhere and run ipython.py) and development. """
9 9
10 import IPython.Shell
11 IPython.Shell.start().mainloop()
10 import IPython.core.shell
11 IPython.core.shell.start().mainloop()
@@ -1,189 +1,190 b''
1 1 #!/usr/bin/env python
2 2 # -*- coding: utf-8 -*-
3 3 """Setup script for IPython.
4 4
5 5 Under Posix environments it works like a typical setup.py script.
6 6 Under Windows, the command sdist is not supported, since IPython
7 7 requires utilities which are not available under Windows."""
8 8
9 9 #-------------------------------------------------------------------------------
10 10 # Copyright (C) 2008 The IPython Development Team
11 11 #
12 12 # Distributed under the terms of the BSD License. The full license is in
13 13 # the file COPYING, distributed as part of this software.
14 14 #-------------------------------------------------------------------------------
15 15
16 16 #-------------------------------------------------------------------------------
17 17 # Imports
18 18 #-------------------------------------------------------------------------------
19 19
20 20 # Stdlib imports
21 21 import os
22 22 import sys
23 23
24 24 from glob import glob
25 25
26 26 # BEFORE importing distutils, remove MANIFEST. distutils doesn't properly
27 27 # update it when the contents of directories change.
28 28 if os.path.exists('MANIFEST'): os.remove('MANIFEST')
29 29
30 30 from distutils.core import setup
31 31
32 # Local imports
33 32 from IPython.utils.genutils import target_update
34 33
35 34 from setupbase import (
36 35 setup_args,
37 36 find_packages,
38 37 find_package_data,
39 38 find_scripts,
40 39 find_data_files,
41 40 check_for_dependencies
42 41 )
43 42
44 43 isfile = os.path.isfile
44 pjoin = os.path.join
45 45
46 46 #-------------------------------------------------------------------------------
47 47 # Handle OS specific things
48 48 #-------------------------------------------------------------------------------
49 49
50 50 if os.name == 'posix':
51 51 os_name = 'posix'
52 52 elif os.name in ['nt','dos']:
53 53 os_name = 'windows'
54 54 else:
55 55 print 'Unsupported operating system:',os.name
56 56 sys.exit(1)
57 57
58 58 # Under Windows, 'sdist' has not been supported. Now that the docs build with
59 59 # Sphinx it might work, but let's not turn it on until someone confirms that it
60 60 # actually works.
61 61 if os_name == 'windows' and 'sdist' in sys.argv:
62 62 print 'The sdist command is not available under Windows. Exiting.'
63 63 sys.exit(1)
64 64
65 65 #-------------------------------------------------------------------------------
66 66 # Things related to the IPython documentation
67 67 #-------------------------------------------------------------------------------
68 68
69 69 # update the manuals when building a source dist
70 70 if len(sys.argv) >= 2 and sys.argv[1] in ('sdist','bdist_rpm'):
71 71 import textwrap
72 72
73 73 # List of things to be updated. Each entry is a triplet of args for
74 74 # target_update()
75 75 to_update = [
76 76 # FIXME - Disabled for now: we need to redo an automatic way
77 77 # of generating the magic info inside the rst.
78 78 #('docs/magic.tex',
79 79 #['IPython/Magic.py'],
80 80 #"cd doc && ./update_magic.sh" ),
81 81
82 82 ('docs/man/ipython.1.gz',
83 83 ['docs/man/ipython.1'],
84 84 "cd docs/man && gzip -9c ipython.1 > ipython.1.gz"),
85 85
86 86 ('docs/man/pycolor.1.gz',
87 87 ['docs/man/pycolor.1'],
88 88 "cd docs/man && gzip -9c pycolor.1 > pycolor.1.gz"),
89 89 ]
90 90
91 91 # Only build the docs if sphinx is present
92 92 try:
93 93 import sphinx
94 94 except ImportError:
95 95 pass
96 96 else:
97 97 # The Makefile calls the do_sphinx scripts to build html and pdf, so
98 98 # just one target is enough to cover all manual generation
99 99
100 100 # First, compute all the dependencies that can force us to rebuild the
101 101 # docs. Start with the main release file that contains metadata
102 docdeps = ['IPython/Release.py']
102 docdeps = ['IPython/core/release.py']
103 103 # Inculde all the reST sources
104 104 pjoin = os.path.join
105 105 for dirpath,dirnames,filenames in os.walk('docs/source'):
106 106 if dirpath in ['_static','_templates']:
107 107 continue
108 108 docdeps += [ pjoin(dirpath,f) for f in filenames
109 109 if f.endswith('.txt') ]
110 110 # and the examples
111 111 for dirpath,dirnames,filenames in os.walk('docs/example'):
112 112 docdeps += [ pjoin(dirpath,f) for f in filenames
113 113 if not f.endswith('~') ]
114 114 # then, make them all dependencies for the main PDF (the html will get
115 115 # auto-generated as well).
116 116 to_update.append(
117 117 ('docs/dist/ipython.pdf',
118 118 docdeps,
119 119 "cd docs && make dist")
120 120 )
121 121
122 122 [ target_update(*t) for t in to_update ]
123 123
124 124
125 125 #---------------------------------------------------------------------------
126 126 # Find all the packages, package data, scripts and data_files
127 127 #---------------------------------------------------------------------------
128 128
129 129 packages = find_packages()
130 130 package_data = find_package_data()
131 131 scripts = find_scripts()
132 132 data_files = find_data_files()
133 133
134 134 #---------------------------------------------------------------------------
135 135 # Handle dependencies and setuptools specific things
136 136 #---------------------------------------------------------------------------
137 137
138 138 # This dict is used for passing extra arguments that are setuptools
139 139 # specific to setup
140 140 setuptools_extra_args = {}
141 141
142 142 if 'setuptools' in sys.modules:
143 143 setuptools_extra_args['zip_safe'] = False
144 144 setuptools_extra_args['entry_points'] = {
145 145 'console_scripts': [
146 146 'ipython = IPython.core.ipapi:launch_new_instance',
147 'pycolor = IPython.PyColorize:main',
147 'pycolor = IPython.utils.PyColorize:main',
148 148 'ipcontroller = IPython.kernel.scripts.ipcontroller:main',
149 149 'ipengine = IPython.kernel.scripts.ipengine:main',
150 150 'ipcluster = IPython.kernel.scripts.ipcluster:main',
151 151 'ipythonx = IPython.frontend.wx.ipythonx:main',
152 152 'iptest = IPython.testing.iptest:main',
153 'irunner = IPython.lib.irunner:main'
153 154 ]
154 155 }
155 156 setup_args['extras_require'] = dict(
156 157 kernel = [
157 158 'zope.interface>=3.4.1',
158 159 'Twisted>=8.0.1',
159 160 'foolscap>=0.2.6'
160 161 ],
161 162 doc='Sphinx>=0.3',
162 163 test='nose>=0.10.1',
163 164 security='pyOpenSSL>=0.6'
164 165 )
165 166 # Allow setuptools to handle the scripts
166 167 scripts = []
167 168 else:
168 169 # package_data of setuptools was introduced to distutils in 2.4
169 cfgfiles = filter(isfile, glob('IPython/UserConfig/*'))
170 cfgfiles = filter(isfile, glob(pjoin('IPython','config','userconfig')))
170 171 if sys.version_info < (2,4):
171 data_files.append(('lib', 'IPython/UserConfig', cfgfiles))
172 data_files.append(('lib', pjoin('IPython','config','userconfig'), cfgfiles))
172 173 # If we are running without setuptools, call this function which will
173 174 # check for dependencies an inform the user what is needed. This is
174 175 # just to make life easy for users.
175 176 check_for_dependencies()
176 177
177 178
178 179 #---------------------------------------------------------------------------
179 180 # Do the actual setup now
180 181 #---------------------------------------------------------------------------
181 182
182 183 setup_args['packages'] = packages
183 184 setup_args['package_data'] = package_data
184 185 setup_args['scripts'] = scripts
185 186 setup_args['data_files'] = data_files
186 187 setup_args.update(setuptools_extra_args)
187 188
188 189 if __name__ == '__main__':
189 190 setup(**setup_args)
@@ -1,279 +1,296 b''
1 1 # encoding: utf-8
2 2
3 3 """
4 4 This module defines the things that are used in setup.py for building IPython
5 5
6 6 This includes:
7 7
8 8 * The basic arguments to setup
9 9 * Functions for finding things like packages, package data, etc.
10 10 * A function for checking dependencies.
11 11 """
12 12
13 13 __docformat__ = "restructuredtext en"
14 14
15 15 #-------------------------------------------------------------------------------
16 16 # Copyright (C) 2008 The IPython Development Team
17 17 #
18 18 # Distributed under the terms of the BSD License. The full license is in
19 19 # the file COPYING, distributed as part of this software.
20 20 #-------------------------------------------------------------------------------
21 21
22 22 #-------------------------------------------------------------------------------
23 23 # Imports
24 24 #-------------------------------------------------------------------------------
25 25
26 26 import os, sys
27 27
28 28 from glob import glob
29 29
30 30 from setupext import install_data_ext
31 31
32 32 #-------------------------------------------------------------------------------
33 33 # Useful globals and utility functions
34 34 #-------------------------------------------------------------------------------
35 35
36 36 # A few handy globals
37 37 isfile = os.path.isfile
38 38 pjoin = os.path.join
39 39
40 40 def oscmd(s):
41 41 print ">", s
42 42 os.system(s)
43 43
44 44 # A little utility we'll need below, since glob() does NOT allow you to do
45 45 # exclusion on multiple endings!
46 46 def file_doesnt_endwith(test,endings):
47 47 """Return true if test is a file and its name does NOT end with any
48 48 of the strings listed in endings."""
49 49 if not isfile(test):
50 50 return False
51 51 for e in endings:
52 52 if test.endswith(e):
53 53 return False
54 54 return True
55 55
56 56 #---------------------------------------------------------------------------
57 57 # Basic project information
58 58 #---------------------------------------------------------------------------
59 59
60 60 # Release.py contains version, authors, license, url, keywords, etc.
61 execfile(pjoin('IPython','Release.py'))
61 execfile(pjoin('IPython','core','release.py'))
62 62
63 63 # Create a dict with the basic information
64 64 # This dict is eventually passed to setup after additional keys are added.
65 65 setup_args = dict(
66 66 name = name,
67 67 version = version,
68 68 description = description,
69 69 long_description = long_description,
70 70 author = author,
71 71 author_email = author_email,
72 72 url = url,
73 73 download_url = download_url,
74 74 license = license,
75 75 platforms = platforms,
76 76 keywords = keywords,
77 77 cmdclass = {'install_data': install_data_ext},
78 78 )
79 79
80 80
81 81 #---------------------------------------------------------------------------
82 82 # Find packages
83 83 #---------------------------------------------------------------------------
84 84
85 85 def add_package(packages,pname,config=False,tests=False,scripts=False,
86 86 others=None):
87 87 """
88 88 Add a package to the list of packages, including certain subpackages.
89 89 """
90 90 packages.append('.'.join(['IPython',pname]))
91 91 if config:
92 92 packages.append('.'.join(['IPython',pname,'config']))
93 93 if tests:
94 94 packages.append('.'.join(['IPython',pname,'tests']))
95 95 if scripts:
96 96 packages.append('.'.join(['IPython',pname,'scripts']))
97 97 if others is not None:
98 98 for o in others:
99 99 packages.append('.'.join(['IPython',pname,o]))
100 100
101 101 def find_packages():
102 102 """
103 103 Find all of IPython's packages.
104 104 """
105 105 packages = ['IPython']
106 106 add_package(packages, 'config', tests=True)
107 add_package(packages, 'config.userconfig')
108 add_package(packages, 'core', tests=True)
109 add_package(packages, 'deathrow', tests=True)
107 110 add_package(packages , 'Extensions')
108 111 add_package(packages, 'external')
109 add_package(packages, 'gui')
110 add_package(packages, 'gui.wx')
111 112 add_package(packages, 'frontend', tests=True)
113 # Don't include the cocoa frontend for now as it is not stable
114 if sys.platform == 'darwin' and False:
115 add_package(packages, 'frontend.cocoa', tests=True, others=['plugin'])
116 add_package(packages, 'frontend.cocoa.examples')
117 add_package(packages, 'frontend.cocoa.examples.IPython1Sandbox')
118 add_package(packages, 'frontend.cocoa.examples.IPython1Sandbox.English.lproj')
112 119 add_package(packages, 'frontend.process')
113 120 add_package(packages, 'frontend.wx')
114 add_package(packages, 'frontend.cocoa', tests=True)
121 add_package(packages, 'gui')
122 add_package(packages, 'gui.wx')
115 123 add_package(packages, 'kernel', config=True, tests=True, scripts=True)
116 124 add_package(packages, 'kernel.core', config=True, tests=True)
125 add_package(packages, 'lib', tests=True)
126 add_package(packages, 'quarantine', tests=True)
127 add_package(packages, 'scripts')
117 128 add_package(packages, 'testing', tests=True)
118 add_package(packages, 'tests')
119 129 add_package(packages, 'testing.plugin', tests=False)
120 add_package(packages, 'tools', tests=True)
121 add_package(packages, 'UserConfig')
130 add_package(packages, 'utils', tests=True)
122 131 return packages
123 132
124 133 #---------------------------------------------------------------------------
125 134 # Find package data
126 135 #---------------------------------------------------------------------------
127 136
128 137 def find_package_data():
129 138 """
130 139 Find IPython's package_data.
131 140 """
132 141 # This is not enough for these things to appear in an sdist.
133 142 # We need to muck with the MANIFEST to get this to work
134 143 package_data = {
135 'IPython.UserConfig' : ['*'],
136 'IPython.tools.tests' : ['*.txt'],
144 'IPython.config.userconfig' : ['*'],
137 145 'IPython.testing' : ['*.txt']
138 146 }
139 147 return package_data
140 148
141 149
142 150 #---------------------------------------------------------------------------
143 151 # Find data files
144 152 #---------------------------------------------------------------------------
145 153
146 154 def make_dir_struct(tag,base,out_base):
147 155 """Make the directory structure of all files below a starting dir.
148 156
149 157 This is just a convenience routine to help build a nested directory
150 158 hierarchy because distutils is too stupid to do this by itself.
151 159
152 160 XXX - this needs a proper docstring!
153 161 """
154 162
155 163 # we'll use these a lot below
156 164 lbase = len(base)
157 165 pathsep = os.path.sep
158 166 lpathsep = len(pathsep)
159 167
160 168 out = []
161 169 for (dirpath,dirnames,filenames) in os.walk(base):
162 170 # we need to strip out the dirpath from the base to map it to the
163 171 # output (installation) path. This requires possibly stripping the
164 172 # path separator, because otherwise pjoin will not work correctly
165 173 # (pjoin('foo/','/bar') returns '/bar').
166 174
167 175 dp_eff = dirpath[lbase:]
168 176 if dp_eff.startswith(pathsep):
169 177 dp_eff = dp_eff[lpathsep:]
170 178 # The output path must be anchored at the out_base marker
171 179 out_path = pjoin(out_base,dp_eff)
172 180 # Now we can generate the final filenames. Since os.walk only produces
173 181 # filenames, we must join back with the dirpath to get full valid file
174 182 # paths:
175 183 pfiles = [pjoin(dirpath,f) for f in filenames]
176 184 # Finally, generate the entry we need, which is a triple of (tag,output
177 185 # path, files) for use as a data_files parameter in install_data.
178 186 out.append((tag,out_path,pfiles))
179 187
180 188 return out
181 189
182 190
183 191 def find_data_files():
184 192 """
185 193 Find IPython's data_files.
186 194
187 195 Most of these are docs.
188 196 """
189 197
190 docdirbase = 'share/doc/ipython'
191 manpagebase = 'share/man/man1'
192
198 docdirbase = pjoin('share', 'doc', 'ipython')
199 manpagebase = pjoin('share', 'man', 'man1')
200
193 201 # Simple file lists can be made by hand
194 manpages = filter(isfile, glob('docs/man/*.1.gz'))
195 igridhelpfiles = filter(isfile, glob('IPython/Extensions/igrid_help.*'))
202 manpages = filter(isfile, glob(pjoin('docs','man','*.1.gz')))
203 igridhelpfiles = filter(isfile, glob(pjoin('IPython','Extensions','igrid_help.*')))
196 204
197 205 # For nested structures, use the utility above
198 example_files = make_dir_struct('data','docs/examples',
199 pjoin(docdirbase,'examples'))
200 manual_files = make_dir_struct('data','docs/dist',pjoin(docdirbase,'manual'))
206 example_files = make_dir_struct(
207 'data',
208 pjoin('docs','examples'),
209 pjoin(docdirbase,'examples')
210 )
211 manual_files = make_dir_struct(
212 'data',
213 pjoin('docs','dist'),
214 pjoin(docdirbase,'manual')
215 )
201 216
202 217 # And assemble the entire output list
203 218 data_files = [ ('data',manpagebase, manpages),
204 219 ('data',pjoin(docdirbase,'extensions'),igridhelpfiles),
205 220 ] + manual_files + example_files
206 221
207 222 ## import pprint # dbg
208 223 ## print '*'*80
209 224 ## print 'data files'
210 225 ## pprint.pprint(data_files)
211 226 ## print '*'*80
212 227
213 228 return data_files
214 229
215 230 #---------------------------------------------------------------------------
216 231 # Find scripts
217 232 #---------------------------------------------------------------------------
218 233
219 234 def find_scripts():
220 235 """
221 236 Find IPython's scripts.
222 237 """
223 scripts = ['IPython/kernel/scripts/ipengine',
224 'IPython/kernel/scripts/ipcontroller',
225 'IPython/kernel/scripts/ipcluster',
226 'scripts/ipython',
227 'scripts/ipythonx',
228 'scripts/ipython-wx',
229 'scripts/pycolor',
230 'scripts/irunner',
231 'scripts/iptest',
232 ]
238 kernel_scripts = pjoin('IPython','kernel','scripts')
239 main_scripts = pjoin('IPython','scripts')
240 scripts = [pjoin(kernel_scripts, 'ipengine'),
241 pjoin(kernel_scripts, 'ipcontroller'),
242 pjoin(kernel_scripts, 'ipcluster'),
243 pjoin(main_scripts, 'ipython'),
244 pjoin(main_scripts, 'ipythonx'),
245 pjoin(main_scripts, 'ipython-wx'),
246 pjoin(main_scripts, 'pycolor'),
247 pjoin(main_scripts, 'irunner'),
248 pjoin(main_scripts, 'iptest')
249 ]
233 250
234 251 # Script to be run by the windows binary installer after the default setup
235 252 # routine, to add shortcuts and similar windows-only things. Windows
236 253 # post-install scripts MUST reside in the scripts/ dir, otherwise distutils
237 254 # doesn't find them.
238 255 if 'bdist_wininst' in sys.argv:
239 256 if len(sys.argv) > 2 and ('sdist' in sys.argv or 'bdist_rpm' in sys.argv):
240 257 print >> sys.stderr,"ERROR: bdist_wininst must be run alone. Exiting."
241 258 sys.exit(1)
242 scripts.append('scripts/ipython_win_post_install.py')
259 scripts.append(pjoin(main_scripts,'ipython_win_post_install.py'))
243 260
244 261 return scripts
245 262
246 263 #---------------------------------------------------------------------------
247 264 # Verify all dependencies
248 265 #---------------------------------------------------------------------------
249 266
250 267 def check_for_dependencies():
251 268 """Check for IPython's dependencies.
252 269
253 270 This function should NOT be called if running under setuptools!
254 271 """
255 272 from setupext.setupext import (
256 273 print_line, print_raw, print_status, print_message,
257 274 check_for_zopeinterface, check_for_twisted,
258 275 check_for_foolscap, check_for_pyopenssl,
259 276 check_for_sphinx, check_for_pygments,
260 277 check_for_nose, check_for_pexpect
261 278 )
262 279 print_line()
263 280 print_raw("BUILDING IPYTHON")
264 281 print_status('python', sys.version)
265 282 print_status('platform', sys.platform)
266 283 if sys.platform == 'win32':
267 284 print_status('Windows version', sys.getwindowsversion())
268 285
269 286 print_raw("")
270 287 print_raw("OPTIONAL DEPENDENCIES")
271 288
272 289 check_for_zopeinterface()
273 290 check_for_twisted()
274 291 check_for_foolscap()
275 292 check_for_pyopenssl()
276 293 check_for_sphinx()
277 294 check_for_pygments()
278 295 check_for_nose()
279 296 check_for_pexpect()
1 NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now