Show More
@@ -0,0 +1,184 b'' | |||||
|
1 | import os | |||
|
2 | ||||
|
3 | c = get_config() | |||
|
4 | ||||
|
5 | #----------------------------------------------------------------------------- | |||
|
6 | # Select which launchers to use | |||
|
7 | #----------------------------------------------------------------------------- | |||
|
8 | ||||
|
9 | # This allows you to control what method is used to start the controller | |||
|
10 | # and engines. The following methods are currently supported: | |||
|
11 | # - Start as a regular process on localhost. | |||
|
12 | # - Start using mpiexec. | |||
|
13 | # - Start using the Windows HPC Server 2008 scheduler | |||
|
14 | # - Start using PBS | |||
|
15 | # - Start using SSH (currently broken) | |||
|
16 | ||||
|
17 | ||||
|
18 | # The selected launchers can be configured below. | |||
|
19 | ||||
|
20 | # Options are: | |||
|
21 | # - LocalControllerLauncher | |||
|
22 | # - MPIExecControllerLauncher | |||
|
23 | # - PBSControllerLauncher | |||
|
24 | # - WindowsHPCControllerLauncher | |||
|
25 | # c.Global.controller_launcher = 'IPython.kernel.launcher.LocalControllerLauncher' | |||
|
26 | ||||
|
27 | # Options are: | |||
|
28 | # - LocalEngineSetLauncher | |||
|
29 | # - MPIExecEngineSetLauncher | |||
|
30 | # - PBSEngineSetLauncher | |||
|
31 | # - WindowsHPCEngineSetLauncher | |||
|
32 | # c.Global.engine_launcher = 'IPython.kernel.launcher.LocalEngineSetLauncher' | |||
|
33 | ||||
|
34 | #----------------------------------------------------------------------------- | |||
|
35 | # Global configuration | |||
|
36 | #----------------------------------------------------------------------------- | |||
|
37 | ||||
|
38 | # The default number of engines that will be started. This is overridden by | |||
|
39 | # the -n command line option: "ipcluster start -n 4" | |||
|
40 | # c.Global.n = 2 | |||
|
41 | ||||
|
42 | # Log to a file in cluster_dir/log, otherwise just log to sys.stdout. | |||
|
43 | # c.Global.log_to_file = False | |||
|
44 | ||||
|
45 | # Remove old logs from cluster_dir/log before starting. | |||
|
46 | # c.Global.clean_logs = True | |||
|
47 | ||||
|
48 | # The working directory for the process. The application will use os.chdir | |||
|
49 | # to change to this directory before starting. | |||
|
50 | # c.Global.work_dir = os.getcwd() | |||
|
51 | ||||
|
52 | ||||
|
53 | #----------------------------------------------------------------------------- | |||
|
54 | # Local process launchers | |||
|
55 | #----------------------------------------------------------------------------- | |||
|
56 | ||||
|
57 | # The command line arguments to call the controller with. | |||
|
58 | # c.LocalControllerLauncher.controller_args = \ | |||
|
59 | # ['--log-to-file','--log-level', '40'] | |||
|
60 | ||||
|
61 | # The working directory for the controller | |||
|
62 | # c.LocalEngineSetLauncher.work_dir = u'' | |||
|
63 | ||||
|
64 | # Command line argument passed to the engines. | |||
|
65 | # c.LocalEngineSetLauncher.engine_args = ['--log-to-file','--log-level', '40'] | |||
|
66 | ||||
|
67 | #----------------------------------------------------------------------------- | |||
|
68 | # MPIExec launchers | |||
|
69 | #----------------------------------------------------------------------------- | |||
|
70 | ||||
|
71 | # The mpiexec/mpirun command to use in started the controller. | |||
|
72 | # c.MPIExecControllerLauncher.mpi_cmd = ['mpiexec'] | |||
|
73 | ||||
|
74 | # Additional arguments to pass to the actual mpiexec command. | |||
|
75 | # c.MPIExecControllerLauncher.mpi_args = [] | |||
|
76 | ||||
|
77 | # The command line argument to call the controller with. | |||
|
78 | # c.MPIExecControllerLauncher.controller_args = \ | |||
|
79 | # ['--log-to-file','--log-level', '40'] | |||
|
80 | ||||
|
81 | ||||
|
82 | # The mpiexec/mpirun command to use in started the controller. | |||
|
83 | # c.MPIExecEngineSetLauncher.mpi_cmd = ['mpiexec'] | |||
|
84 | ||||
|
85 | # Additional arguments to pass to the actual mpiexec command. | |||
|
86 | # c.MPIExecEngineSetLauncher.mpi_args = [] | |||
|
87 | ||||
|
88 | # Command line argument passed to the engines. | |||
|
89 | # c.MPIExecEngineSetLauncher.engine_args = ['--log-to-file','--log-level', '40'] | |||
|
90 | ||||
|
91 | # The default number of engines to start if not given elsewhere. | |||
|
92 | # c.MPIExecEngineSetLauncher.n = 1 | |||
|
93 | ||||
|
94 | #----------------------------------------------------------------------------- | |||
|
95 | # SSH launchers | |||
|
96 | #----------------------------------------------------------------------------- | |||
|
97 | ||||
|
98 | # Todo | |||
|
99 | ||||
|
100 | ||||
|
101 | #----------------------------------------------------------------------------- | |||
|
102 | # Unix batch (PBS) schedulers launchers | |||
|
103 | #----------------------------------------------------------------------------- | |||
|
104 | ||||
|
105 | # The command line program to use to submit a PBS job. | |||
|
106 | # c.PBSControllerLauncher.submit_command = 'qsub' | |||
|
107 | ||||
|
108 | # The command line program to use to delete a PBS job. | |||
|
109 | # c.PBSControllerLauncher.delete_command = 'qdel' | |||
|
110 | ||||
|
111 | # A regular expression that takes the output of qsub and find the job id. | |||
|
112 | # c.PBSControllerLauncher.job_id_regexp = r'\d+' | |||
|
113 | ||||
|
114 | # The batch submission script used to start the controller. This is where | |||
|
115 | # environment variables would be setup, etc. This string is interpolated using | |||
|
116 | # the Itpl module in IPython.external. Basically, you can use ${n} for the | |||
|
117 | # number of engine and ${cluster_dir} for the cluster_dir. | |||
|
118 | # c.PBSControllerLauncher.batch_template = """""" | |||
|
119 | ||||
|
120 | # The name of the instantiated batch script that will actually be used to | |||
|
121 | # submit the job. This will be written to the cluster directory. | |||
|
122 | # c.PBSControllerLauncher.batch_file_name = u'pbs_batch_script_controller' | |||
|
123 | ||||
|
124 | ||||
|
125 | # The command line program to use to submit a PBS job. | |||
|
126 | # c.PBSEngineSetLauncher.submit_command = 'qsub' | |||
|
127 | ||||
|
128 | # The command line program to use to delete a PBS job. | |||
|
129 | # c.PBSEngineSetLauncher.delete_command = 'qdel' | |||
|
130 | ||||
|
131 | # A regular expression that takes the output of qsub and find the job id. | |||
|
132 | # c.PBSEngineSetLauncher.job_id_regexp = r'\d+' | |||
|
133 | ||||
|
134 | # The batch submission script used to start the engines. This is where | |||
|
135 | # environment variables would be setup, etc. This string is interpolated using | |||
|
136 | # the Itpl module in IPython.external. Basically, you can use ${n} for the | |||
|
137 | # number of engine and ${cluster_dir} for the cluster_dir. | |||
|
138 | # c.PBSEngineSetLauncher.batch_template = """""" | |||
|
139 | ||||
|
140 | # The name of the instantiated batch script that will actually be used to | |||
|
141 | # submit the job. This will be written to the cluster directory. | |||
|
142 | # c.PBSEngineSetLauncher.batch_file_name = u'pbs_batch_script_engines' | |||
|
143 | ||||
|
144 | #----------------------------------------------------------------------------- | |||
|
145 | # Windows HPC Server 2008 launcher configuration | |||
|
146 | #----------------------------------------------------------------------------- | |||
|
147 | ||||
|
148 | # c.IPControllerJob.job_name = 'IPController' | |||
|
149 | # c.IPControllerJob.is_exclusive = False | |||
|
150 | # c.IPControllerJob.username = r'USERDOMAIN\USERNAME' | |||
|
151 | # c.IPControllerJob.priority = 'Highest' | |||
|
152 | # c.IPControllerJob.requested_nodes = '' | |||
|
153 | # c.IPControllerJob.project = 'MyProject' | |||
|
154 | ||||
|
155 | # c.IPControllerTask.task_name = 'IPController' | |||
|
156 | # c.IPControllerTask.controller_cmd = [u'ipcontroller.exe'] | |||
|
157 | # c.IPControllerTask.controller_args = ['--log-to-file', '--log-level', '40'] | |||
|
158 | # c.IPControllerTask.environment_variables = {} | |||
|
159 | ||||
|
160 | # c.WindowsHPCControllerLauncher.scheduler = 'HEADNODE' | |||
|
161 | # c.WindowsHPCControllerLauncher.job_file_name = u'ipcontroller_job.xml' | |||
|
162 | ||||
|
163 | ||||
|
164 | # c.IPEngineSetJob.job_name = 'IPEngineSet' | |||
|
165 | # c.IPEngineSetJob.is_exclusive = False | |||
|
166 | # c.IPEngineSetJob.username = r'USERDOMAIN\USERNAME' | |||
|
167 | # c.IPEngineSetJob.priority = 'Highest' | |||
|
168 | # c.IPEngineSetJob.requested_nodes = '' | |||
|
169 | # c.IPEngineSetJob.project = 'MyProject' | |||
|
170 | ||||
|
171 | # c.IPEngineTask.task_name = 'IPEngine' | |||
|
172 | # c.IPEngineTask.engine_cmd = [u'ipengine.exe'] | |||
|
173 | # c.IPEngineTask.engine_args = ['--log-to-file', '--log-level', '40'] | |||
|
174 | # c.IPEngineTask.environment_variables = {} | |||
|
175 | ||||
|
176 | # c.WindowsHPCEngineSetLauncher.scheduler = 'HEADNODE' | |||
|
177 | # c.WindowsHPCEngineSetLauncher.job_file_name = u'ipengineset_job.xml' | |||
|
178 | ||||
|
179 | ||||
|
180 | ||||
|
181 | ||||
|
182 | ||||
|
183 | ||||
|
184 |
@@ -0,0 +1,136 b'' | |||||
|
1 | from IPython.config.loader import Config | |||
|
2 | ||||
|
3 | c = get_config() | |||
|
4 | ||||
|
5 | #----------------------------------------------------------------------------- | |||
|
6 | # Global configuration | |||
|
7 | #----------------------------------------------------------------------------- | |||
|
8 | ||||
|
9 | # Basic Global config attributes | |||
|
10 | ||||
|
11 | # Start up messages are logged to stdout using the logging module. | |||
|
12 | # These all happen before the twisted reactor is started and are | |||
|
13 | # useful for debugging purposes. Can be (10=DEBUG,20=INFO,30=WARN,40=CRITICAL) | |||
|
14 | # and smaller is more verbose. | |||
|
15 | # c.Global.log_level = 20 | |||
|
16 | ||||
|
17 | # Log to a file in cluster_dir/log, otherwise just log to sys.stdout. | |||
|
18 | # c.Global.log_to_file = False | |||
|
19 | ||||
|
20 | # Remove old logs from cluster_dir/log before starting. | |||
|
21 | # c.Global.clean_logs = True | |||
|
22 | ||||
|
23 | # A list of Python statements that will be run before starting the | |||
|
24 | # controller. This is provided because occasionally certain things need to | |||
|
25 | # be imported in the controller for pickling to work. | |||
|
26 | # c.Global.import_statements = ['import math'] | |||
|
27 | ||||
|
28 | # Reuse the controller's FURL files. If False, FURL files are regenerated | |||
|
29 | # each time the controller is run. If True, they will be reused, *but*, you | |||
|
30 | # also must set the network ports by hand. If set, this will override the | |||
|
31 | # values set for the client and engine connections below. | |||
|
32 | # c.Global.reuse_furls = True | |||
|
33 | ||||
|
34 | # Enable SSL encryption on all connections to the controller. If set, this | |||
|
35 | # will override the values set for the client and engine connections below. | |||
|
36 | # c.Global.secure = True | |||
|
37 | ||||
|
38 | # The working directory for the process. The application will use os.chdir | |||
|
39 | # to change to this directory before starting. | |||
|
40 | # c.Global.work_dir = os.getcwd() | |||
|
41 | ||||
|
42 | #----------------------------------------------------------------------------- | |||
|
43 | # Configure the client services | |||
|
44 | #----------------------------------------------------------------------------- | |||
|
45 | ||||
|
46 | # Basic client service config attributes | |||
|
47 | ||||
|
48 | # The network interface the controller will listen on for client connections. | |||
|
49 | # This should be an IP address or hostname of the controller's host. The empty | |||
|
50 | # string means listen on all interfaces. | |||
|
51 | # c.FCClientServiceFactory.ip = '' | |||
|
52 | ||||
|
53 | # The TCP/IP port the controller will listen on for client connections. If 0 | |||
|
54 | # a random port will be used. If the controller's host has a firewall running | |||
|
55 | # it must allow incoming traffic on this port. | |||
|
56 | # c.FCClientServiceFactory.port = 0 | |||
|
57 | ||||
|
58 | # The client learns how to connect to the controller by looking at the | |||
|
59 | # location field embedded in the FURL. If this field is empty, all network | |||
|
60 | # interfaces that the controller is listening on will be listed. To have the | |||
|
61 | # client connect on a particular interface, list it here. | |||
|
62 | # c.FCClientServiceFactory.location = '' | |||
|
63 | ||||
|
64 | # Use SSL encryption for the client connection. | |||
|
65 | # c.FCClientServiceFactory.secure = True | |||
|
66 | ||||
|
67 | # Reuse the client FURL each time the controller is started. If set, you must | |||
|
68 | # also pick a specific network port above (FCClientServiceFactory.port). | |||
|
69 | # c.FCClientServiceFactory.reuse_furls = False | |||
|
70 | ||||
|
71 | #----------------------------------------------------------------------------- | |||
|
72 | # Configure the engine services | |||
|
73 | #----------------------------------------------------------------------------- | |||
|
74 | ||||
|
75 | # Basic config attributes for the engine services. | |||
|
76 | ||||
|
77 | # The network interface the controller will listen on for engine connections. | |||
|
78 | # This should be an IP address or hostname of the controller's host. The empty | |||
|
79 | # string means listen on all interfaces. | |||
|
80 | # c.FCEngineServiceFactory.ip = '' | |||
|
81 | ||||
|
82 | # The TCP/IP port the controller will listen on for engine connections. If 0 | |||
|
83 | # a random port will be used. If the controller's host has a firewall running | |||
|
84 | # it must allow incoming traffic on this port. | |||
|
85 | # c.FCEngineServiceFactory.port = 0 | |||
|
86 | ||||
|
87 | # The engine learns how to connect to the controller by looking at the | |||
|
88 | # location field embedded in the FURL. If this field is empty, all network | |||
|
89 | # interfaces that the controller is listening on will be listed. To have the | |||
|
90 | # client connect on a particular interface, list it here. | |||
|
91 | # c.FCEngineServiceFactory.location = '' | |||
|
92 | ||||
|
93 | # Use SSL encryption for the engine connection. | |||
|
94 | # c.FCEngineServiceFactory.secure = True | |||
|
95 | ||||
|
96 | # Reuse the client FURL each time the controller is started. If set, you must | |||
|
97 | # also pick a specific network port above (FCClientServiceFactory.port). | |||
|
98 | # c.FCEngineServiceFactory.reuse_furls = False | |||
|
99 | ||||
|
100 | #----------------------------------------------------------------------------- | |||
|
101 | # Developer level configuration attributes | |||
|
102 | #----------------------------------------------------------------------------- | |||
|
103 | ||||
|
104 | # You shouldn't have to modify anything in this section. These attributes | |||
|
105 | # are more for developers who want to change the behavior of the controller | |||
|
106 | # at a fundamental level. | |||
|
107 | ||||
|
108 | # c.FCClientServiceFactory.cert_file = u'ipcontroller-client.pem' | |||
|
109 | ||||
|
110 | # default_client_interfaces = Config() | |||
|
111 | # default_client_interfaces.Task.interface_chain = [ | |||
|
112 | # 'IPython.kernel.task.ITaskController', | |||
|
113 | # 'IPython.kernel.taskfc.IFCTaskController' | |||
|
114 | # ] | |||
|
115 | # | |||
|
116 | # default_client_interfaces.Task.furl_file = u'ipcontroller-tc.furl' | |||
|
117 | # | |||
|
118 | # default_client_interfaces.MultiEngine.interface_chain = [ | |||
|
119 | # 'IPython.kernel.multiengine.IMultiEngine', | |||
|
120 | # 'IPython.kernel.multienginefc.IFCSynchronousMultiEngine' | |||
|
121 | # ] | |||
|
122 | # | |||
|
123 | # default_client_interfaces.MultiEngine.furl_file = u'ipcontroller-mec.furl' | |||
|
124 | # | |||
|
125 | # c.FCEngineServiceFactory.interfaces = default_client_interfaces | |||
|
126 | ||||
|
127 | # c.FCEngineServiceFactory.cert_file = u'ipcontroller-engine.pem' | |||
|
128 | ||||
|
129 | # default_engine_interfaces = Config() | |||
|
130 | # default_engine_interfaces.Default.interface_chain = [ | |||
|
131 | # 'IPython.kernel.enginefc.IFCControllerBase' | |||
|
132 | # ] | |||
|
133 | # | |||
|
134 | # default_engine_interfaces.Default.furl_file = u'ipcontroller-engine.furl' | |||
|
135 | # | |||
|
136 | # c.FCEngineServiceFactory.interfaces = default_engine_interfaces |
@@ -0,0 +1,90 b'' | |||||
|
1 | c = get_config() | |||
|
2 | ||||
|
3 | #----------------------------------------------------------------------------- | |||
|
4 | # Global configuration | |||
|
5 | #----------------------------------------------------------------------------- | |||
|
6 | ||||
|
7 | # Start up messages are logged to stdout using the logging module. | |||
|
8 | # These all happen before the twisted reactor is started and are | |||
|
9 | # useful for debugging purposes. Can be (10=DEBUG,20=INFO,30=WARN,40=CRITICAL) | |||
|
10 | # and smaller is more verbose. | |||
|
11 | # c.Global.log_level = 20 | |||
|
12 | ||||
|
13 | # Log to a file in cluster_dir/log, otherwise just log to sys.stdout. | |||
|
14 | # c.Global.log_to_file = False | |||
|
15 | ||||
|
16 | # Remove old logs from cluster_dir/log before starting. | |||
|
17 | # c.Global.clean_logs = True | |||
|
18 | ||||
|
19 | # A list of strings that will be executed in the users namespace on the engine | |||
|
20 | # before it connects to the controller. | |||
|
21 | # c.Global.exec_lines = ['import numpy'] | |||
|
22 | ||||
|
23 | # The engine will try to connect to the controller multiple times, to allow | |||
|
24 | # the controller time to startup and write its FURL file. These parameters | |||
|
25 | # control the number of retries (connect_max_tries) and the initial delay | |||
|
26 | # (connect_delay) between attemps. The actual delay between attempts gets | |||
|
27 | # longer each time by a factor of 1.5 (delay[i] = 1.5*delay[i-1]) | |||
|
28 | # those attemps. | |||
|
29 | # c.Global.connect_delay = 0.1 | |||
|
30 | # c.Global.connect_max_tries = 15 | |||
|
31 | ||||
|
32 | # By default, the engine will look for the controller's FURL file in its own | |||
|
33 | # cluster directory. Sometimes, the FURL file will be elsewhere and this | |||
|
34 | # attribute can be set to the full path of the FURL file. | |||
|
35 | # c.Global.furl_file = u'' | |||
|
36 | ||||
|
37 | # The working directory for the process. The application will use os.chdir | |||
|
38 | # to change to this directory before starting. | |||
|
39 | # c.Global.work_dir = os.getcwd() | |||
|
40 | ||||
|
41 | #----------------------------------------------------------------------------- | |||
|
42 | # MPI configuration | |||
|
43 | #----------------------------------------------------------------------------- | |||
|
44 | ||||
|
45 | # Upon starting the engine can be configured to call MPI_Init. This section | |||
|
46 | # configures that. | |||
|
47 | ||||
|
48 | # Select which MPI section to execute to setup MPI. The value of this | |||
|
49 | # attribute must match the name of another attribute in the MPI config | |||
|
50 | # section (mpi4py, pytrilinos, etc.). This can also be set by the --mpi | |||
|
51 | # command line option. | |||
|
52 | # c.MPI.use = '' | |||
|
53 | ||||
|
54 | # Initialize MPI using mpi4py. To use this, set c.MPI.use = 'mpi4py' to use | |||
|
55 | # --mpi=mpi4py at the command line. | |||
|
56 | # c.MPI.mpi4py = """from mpi4py import MPI as mpi | |||
|
57 | # mpi.size = mpi.COMM_WORLD.Get_size() | |||
|
58 | # mpi.rank = mpi.COMM_WORLD.Get_rank() | |||
|
59 | # """ | |||
|
60 | ||||
|
61 | # Initialize MPI using pytrilinos. To use this, set c.MPI.use = 'pytrilinos' | |||
|
62 | # to use --mpi=pytrilinos at the command line. | |||
|
63 | # c.MPI.pytrilinos = """from PyTrilinos import Epetra | |||
|
64 | # class SimpleStruct: | |||
|
65 | # pass | |||
|
66 | # mpi = SimpleStruct() | |||
|
67 | # mpi.rank = 0 | |||
|
68 | # mpi.size = 0 | |||
|
69 | # """ | |||
|
70 | ||||
|
71 | #----------------------------------------------------------------------------- | |||
|
72 | # Developer level configuration attributes | |||
|
73 | #----------------------------------------------------------------------------- | |||
|
74 | ||||
|
75 | # You shouldn't have to modify anything in this section. These attributes | |||
|
76 | # are more for developers who want to change the behavior of the controller | |||
|
77 | # at a fundamental level. | |||
|
78 | ||||
|
79 | # You should not have to change these attributes. | |||
|
80 | ||||
|
81 | # c.Global.shell_class = 'IPython.kernel.core.interpreter.Interpreter' | |||
|
82 | ||||
|
83 | # c.Global.furl_file_name = u'ipcontroller-engine.furl' | |||
|
84 | ||||
|
85 | ||||
|
86 | ||||
|
87 | ||||
|
88 | ||||
|
89 | ||||
|
90 |
@@ -0,0 +1,24 b'' | |||||
|
1 | c = get_config() | |||
|
2 | ||||
|
3 | # This can be used at any point in a config file to load a sub config | |||
|
4 | # and merge it into the current one. | |||
|
5 | load_subconfig('ipython_config.py') | |||
|
6 | ||||
|
7 | lines = """ | |||
|
8 | from IPython.kernel.client import * | |||
|
9 | """ | |||
|
10 | ||||
|
11 | # You have to make sure that attributes that are containers already | |||
|
12 | # exist before using them. Simple assigning a new list will override | |||
|
13 | # all previous values. | |||
|
14 | if hasattr(c.Global, 'exec_lines'): | |||
|
15 | c.Global.exec_lines.append(lines) | |||
|
16 | else: | |||
|
17 | c.Global.exec_lines = [lines] | |||
|
18 | ||||
|
19 | # Load the parallelmagic extension to enable %result, %px, %autopx magics. | |||
|
20 | if hasattr(c.Global, 'extensions'): | |||
|
21 | c.Global.extensions.append('parallelmagic') | |||
|
22 | else: | |||
|
23 | c.Global.extensions = ['parallelmagic'] | |||
|
24 |
@@ -0,0 +1,205 b'' | |||||
|
1 | #!/usr/bin/env python | |||
|
2 | # encoding: utf-8 | |||
|
3 | ||||
|
4 | """Magic command interface for interactive parallel work.""" | |||
|
5 | ||||
|
6 | #----------------------------------------------------------------------------- | |||
|
7 | # Copyright (C) 2008-2009 The IPython Development Team | |||
|
8 | # | |||
|
9 | # Distributed under the terms of the BSD License. The full license is in | |||
|
10 | # the file COPYING, distributed as part of this software. | |||
|
11 | #----------------------------------------------------------------------------- | |||
|
12 | ||||
|
13 | #----------------------------------------------------------------------------- | |||
|
14 | # Imports | |||
|
15 | #----------------------------------------------------------------------------- | |||
|
16 | ||||
|
17 | import new | |||
|
18 | ||||
|
19 | from IPython.core.component import Component | |||
|
20 | from IPython.utils.traitlets import Bool, Any | |||
|
21 | from IPython.utils.autoattr import auto_attr | |||
|
22 | ||||
|
23 | #----------------------------------------------------------------------------- | |||
|
24 | # Definitions of magic functions for use with IPython | |||
|
25 | #----------------------------------------------------------------------------- | |||
|
26 | ||||
|
27 | ||||
|
28 | NO_ACTIVE_MULTIENGINE_CLIENT = """ | |||
|
29 | Use activate() on a MultiEngineClient object to activate it for magics. | |||
|
30 | """ | |||
|
31 | ||||
|
32 | ||||
|
33 | class ParalleMagicComponent(Component): | |||
|
34 | """A component to manage the %result, %px and %autopx magics.""" | |||
|
35 | ||||
|
36 | active_multiengine_client = Any() | |||
|
37 | verbose = Bool(False, config=True) | |||
|
38 | ||||
|
39 | def __init__(self, parent, name=None, config=None): | |||
|
40 | super(ParalleMagicComponent, self).__init__(parent, name=name, config=config) | |||
|
41 | self._define_magics() | |||
|
42 | # A flag showing if autopx is activated or not | |||
|
43 | self.autopx = False | |||
|
44 | ||||
|
45 | # Access other components like this rather than by a regular attribute. | |||
|
46 | # This won't lookup the InteractiveShell object until it is used and | |||
|
47 | # then it is cached. This is both efficient and couples this class | |||
|
48 | # more loosely to InteractiveShell. | |||
|
49 | @auto_attr | |||
|
50 | def shell(self): | |||
|
51 | return Component.get_instances( | |||
|
52 | root=self.root, | |||
|
53 | klass='IPython.core.iplib.InteractiveShell')[0] | |||
|
54 | ||||
|
55 | def _define_magics(self): | |||
|
56 | """Define the magic functions.""" | |||
|
57 | self.shell.define_magic('result', self.magic_result) | |||
|
58 | self.shell.define_magic('px', self.magic_px) | |||
|
59 | self.shell.define_magic('autopx', self.magic_autopx) | |||
|
60 | ||||
|
61 | def magic_result(self, ipself, parameter_s=''): | |||
|
62 | """Print the result of command i on all engines.. | |||
|
63 | ||||
|
64 | To use this a :class:`MultiEngineClient` instance must be created | |||
|
65 | and then activated by calling its :meth:`activate` method. | |||
|
66 | ||||
|
67 | Then you can do the following:: | |||
|
68 | ||||
|
69 | In [23]: %result | |||
|
70 | Out[23]: | |||
|
71 | <Results List> | |||
|
72 | [0] In [6]: a = 10 | |||
|
73 | [1] In [6]: a = 10 | |||
|
74 | ||||
|
75 | In [22]: %result 6 | |||
|
76 | Out[22]: | |||
|
77 | <Results List> | |||
|
78 | [0] In [6]: a = 10 | |||
|
79 | [1] In [6]: a = 10 | |||
|
80 | """ | |||
|
81 | if self.active_multiengine_client is None: | |||
|
82 | print NO_ACTIVE_MULTIENGINE_CLIENT | |||
|
83 | return | |||
|
84 | ||||
|
85 | try: | |||
|
86 | index = int(parameter_s) | |||
|
87 | except: | |||
|
88 | index = None | |||
|
89 | result = self.active_multiengine_client.get_result(index) | |||
|
90 | return result | |||
|
91 | ||||
|
92 | def magic_px(self, ipself, parameter_s=''): | |||
|
93 | """Executes the given python command in parallel. | |||
|
94 | ||||
|
95 | To use this a :class:`MultiEngineClient` instance must be created | |||
|
96 | and then activated by calling its :meth:`activate` method. | |||
|
97 | ||||
|
98 | Then you can do the following:: | |||
|
99 | ||||
|
100 | In [24]: %px a = 5 | |||
|
101 | Parallel execution on engines: all | |||
|
102 | Out[24]: | |||
|
103 | <Results List> | |||
|
104 | [0] In [7]: a = 5 | |||
|
105 | [1] In [7]: a = 5 | |||
|
106 | """ | |||
|
107 | ||||
|
108 | if self.active_multiengine_client is None: | |||
|
109 | print NO_ACTIVE_MULTIENGINE_CLIENT | |||
|
110 | return | |||
|
111 | print "Parallel execution on engines: %s" % self.active_multiengine_client.targets | |||
|
112 | result = self.active_multiengine_client.execute(parameter_s) | |||
|
113 | return result | |||
|
114 | ||||
|
115 | def magic_autopx(self, ipself, parameter_s=''): | |||
|
116 | """Toggles auto parallel mode. | |||
|
117 | ||||
|
118 | To use this a :class:`MultiEngineClient` instance must be created | |||
|
119 | and then activated by calling its :meth:`activate` method. Once this | |||
|
120 | is called, all commands typed at the command line are send to | |||
|
121 | the engines to be executed in parallel. To control which engine | |||
|
122 | are used, set the ``targets`` attributed of the multiengine client | |||
|
123 | before entering ``%autopx`` mode. | |||
|
124 | ||||
|
125 | Then you can do the following:: | |||
|
126 | ||||
|
127 | In [25]: %autopx | |||
|
128 | %autopx to enabled | |||
|
129 | ||||
|
130 | In [26]: a = 10 | |||
|
131 | <Results List> | |||
|
132 | [0] In [8]: a = 10 | |||
|
133 | [1] In [8]: a = 10 | |||
|
134 | ||||
|
135 | ||||
|
136 | In [27]: %autopx | |||
|
137 | %autopx disabled | |||
|
138 | """ | |||
|
139 | if self.autopx: | |||
|
140 | self._disable_autopx() | |||
|
141 | else: | |||
|
142 | self._enable_autopx() | |||
|
143 | ||||
|
144 | def _enable_autopx(self): | |||
|
145 | """Enable %autopx mode by saving the original runsource and installing | |||
|
146 | pxrunsource. | |||
|
147 | """ | |||
|
148 | if self.active_multiengine_client is None: | |||
|
149 | print NO_ACTIVE_MULTIENGINE_CLIENT | |||
|
150 | return | |||
|
151 | ||||
|
152 | self._original_runsource = self.shell.runsource | |||
|
153 | self.shell.runsource = new.instancemethod( | |||
|
154 | self.pxrunsource, self.shell, self.shell.__class__ | |||
|
155 | ) | |||
|
156 | self.autopx = True | |||
|
157 | print "%autopx enabled" | |||
|
158 | ||||
|
159 | def _disable_autopx(self): | |||
|
160 | """Disable %autopx by restoring the original InteractiveShell.runsource.""" | |||
|
161 | if self.autopx: | |||
|
162 | self.shell.runsource = self._original_runsource | |||
|
163 | self.autopx = False | |||
|
164 | print "%autopx disabled" | |||
|
165 | ||||
|
166 | def pxrunsource(self, ipself, source, filename="<input>", symbol="single"): | |||
|
167 | """A parallel replacement for InteractiveShell.runsource.""" | |||
|
168 | ||||
|
169 | try: | |||
|
170 | code = ipself.compile(source, filename, symbol) | |||
|
171 | except (OverflowError, SyntaxError, ValueError): | |||
|
172 | # Case 1 | |||
|
173 | ipself.showsyntaxerror(filename) | |||
|
174 | return None | |||
|
175 | ||||
|
176 | if code is None: | |||
|
177 | # Case 2 | |||
|
178 | return True | |||
|
179 | ||||
|
180 | # Case 3 | |||
|
181 | # Because autopx is enabled, we now call executeAll or disable autopx if | |||
|
182 | # %autopx or autopx has been called | |||
|
183 | if 'get_ipython().magic("%autopx' in source or 'get_ipython().magic("autopx' in source: | |||
|
184 | self._disable_autopx() | |||
|
185 | return False | |||
|
186 | else: | |||
|
187 | try: | |||
|
188 | result = self.active_multiengine_client.execute(source) | |||
|
189 | except: | |||
|
190 | ipself.showtraceback() | |||
|
191 | else: | |||
|
192 | print result.__repr__() | |||
|
193 | return False | |||
|
194 | ||||
|
195 | ||||
|
196 | _loaded = False | |||
|
197 | ||||
|
198 | ||||
|
199 | def load_ipython_extension(ip): | |||
|
200 | """Load the extension in IPython.""" | |||
|
201 | global _loaded | |||
|
202 | if not _loaded: | |||
|
203 | prd = ParalleMagicComponent(ip, name='parallel_magic') | |||
|
204 | _loaded = True | |||
|
205 |
@@ -0,0 +1,475 b'' | |||||
|
1 | #!/usr/bin/env python | |||
|
2 | # encoding: utf-8 | |||
|
3 | """ | |||
|
4 | The IPython cluster directory | |||
|
5 | """ | |||
|
6 | ||||
|
7 | #----------------------------------------------------------------------------- | |||
|
8 | # Copyright (C) 2008-2009 The IPython Development Team | |||
|
9 | # | |||
|
10 | # Distributed under the terms of the BSD License. The full license is in | |||
|
11 | # the file COPYING, distributed as part of this software. | |||
|
12 | #----------------------------------------------------------------------------- | |||
|
13 | ||||
|
14 | #----------------------------------------------------------------------------- | |||
|
15 | # Imports | |||
|
16 | #----------------------------------------------------------------------------- | |||
|
17 | ||||
|
18 | from __future__ import with_statement | |||
|
19 | ||||
|
20 | import os | |||
|
21 | import shutil | |||
|
22 | import sys | |||
|
23 | ||||
|
24 | from twisted.python import log | |||
|
25 | ||||
|
26 | from IPython.core import release | |||
|
27 | from IPython.config.loader import PyFileConfigLoader | |||
|
28 | from IPython.core.application import Application | |||
|
29 | from IPython.core.component import Component | |||
|
30 | from IPython.config.loader import ArgParseConfigLoader, NoConfigDefault | |||
|
31 | from IPython.utils.traitlets import Unicode, Bool | |||
|
32 | from IPython.utils import genutils | |||
|
33 | ||||
|
34 | #----------------------------------------------------------------------------- | |||
|
35 | # Imports | |||
|
36 | #----------------------------------------------------------------------------- | |||
|
37 | ||||
|
38 | ||||
|
39 | class ClusterDirError(Exception): | |||
|
40 | pass | |||
|
41 | ||||
|
42 | ||||
|
43 | class PIDFileError(Exception): | |||
|
44 | pass | |||
|
45 | ||||
|
46 | ||||
|
47 | class ClusterDir(Component): | |||
|
48 | """An object to manage the cluster directory and its resources. | |||
|
49 | ||||
|
50 | The cluster directory is used by :command:`ipcontroller`, | |||
|
51 | :command:`ipcontroller` and :command:`ipcontroller` to manage the | |||
|
52 | configuration, logging and security of these applications. | |||
|
53 | ||||
|
54 | This object knows how to find, create and manage these directories. This | |||
|
55 | should be used by any code that want's to handle cluster directories. | |||
|
56 | """ | |||
|
57 | ||||
|
58 | security_dir_name = Unicode('security') | |||
|
59 | log_dir_name = Unicode('log') | |||
|
60 | pid_dir_name = Unicode('pid') | |||
|
61 | security_dir = Unicode(u'') | |||
|
62 | log_dir = Unicode(u'') | |||
|
63 | pid_dir = Unicode(u'') | |||
|
64 | location = Unicode(u'') | |||
|
65 | ||||
|
66 | def __init__(self, location): | |||
|
67 | super(ClusterDir, self).__init__(None) | |||
|
68 | self.location = location | |||
|
69 | ||||
|
70 | def _location_changed(self, name, old, new): | |||
|
71 | if not os.path.isdir(new): | |||
|
72 | os.makedirs(new) | |||
|
73 | self.security_dir = os.path.join(new, self.security_dir_name) | |||
|
74 | self.log_dir = os.path.join(new, self.log_dir_name) | |||
|
75 | self.pid_dir = os.path.join(new, self.pid_dir_name) | |||
|
76 | self.check_dirs() | |||
|
77 | ||||
|
78 | def _log_dir_changed(self, name, old, new): | |||
|
79 | self.check_log_dir() | |||
|
80 | ||||
|
81 | def check_log_dir(self): | |||
|
82 | if not os.path.isdir(self.log_dir): | |||
|
83 | os.mkdir(self.log_dir) | |||
|
84 | ||||
|
85 | def _security_dir_changed(self, name, old, new): | |||
|
86 | self.check_security_dir() | |||
|
87 | ||||
|
88 | def check_security_dir(self): | |||
|
89 | if not os.path.isdir(self.security_dir): | |||
|
90 | os.mkdir(self.security_dir, 0700) | |||
|
91 | os.chmod(self.security_dir, 0700) | |||
|
92 | ||||
|
93 | def _pid_dir_changed(self, name, old, new): | |||
|
94 | self.check_pid_dir() | |||
|
95 | ||||
|
96 | def check_pid_dir(self): | |||
|
97 | if not os.path.isdir(self.pid_dir): | |||
|
98 | os.mkdir(self.pid_dir, 0700) | |||
|
99 | os.chmod(self.pid_dir, 0700) | |||
|
100 | ||||
|
101 | def check_dirs(self): | |||
|
102 | self.check_security_dir() | |||
|
103 | self.check_log_dir() | |||
|
104 | self.check_pid_dir() | |||
|
105 | ||||
|
106 | def load_config_file(self, filename): | |||
|
107 | """Load a config file from the top level of the cluster dir. | |||
|
108 | ||||
|
109 | Parameters | |||
|
110 | ---------- | |||
|
111 | filename : unicode or str | |||
|
112 | The filename only of the config file that must be located in | |||
|
113 | the top-level of the cluster directory. | |||
|
114 | """ | |||
|
115 | loader = PyFileConfigLoader(filename, self.location) | |||
|
116 | return loader.load_config() | |||
|
117 | ||||
|
118 | def copy_config_file(self, config_file, path=None, overwrite=False): | |||
|
119 | """Copy a default config file into the active cluster directory. | |||
|
120 | ||||
|
121 | Default configuration files are kept in :mod:`IPython.config.default`. | |||
|
122 | This function moves these from that location to the working cluster | |||
|
123 | directory. | |||
|
124 | """ | |||
|
125 | if path is None: | |||
|
126 | import IPython.config.default | |||
|
127 | path = IPython.config.default.__file__.split(os.path.sep)[:-1] | |||
|
128 | path = os.path.sep.join(path) | |||
|
129 | src = os.path.join(path, config_file) | |||
|
130 | dst = os.path.join(self.location, config_file) | |||
|
131 | if not os.path.isfile(dst) or overwrite: | |||
|
132 | shutil.copy(src, dst) | |||
|
133 | ||||
|
134 | def copy_all_config_files(self, path=None, overwrite=False): | |||
|
135 | """Copy all config files into the active cluster directory.""" | |||
|
136 | for f in [u'ipcontroller_config.py', u'ipengine_config.py', | |||
|
137 | u'ipcluster_config.py']: | |||
|
138 | self.copy_config_file(f, path=path, overwrite=overwrite) | |||
|
139 | ||||
|
140 | @classmethod | |||
|
141 | def create_cluster_dir(csl, cluster_dir): | |||
|
142 | """Create a new cluster directory given a full path. | |||
|
143 | ||||
|
144 | Parameters | |||
|
145 | ---------- | |||
|
146 | cluster_dir : str | |||
|
147 | The full path to the cluster directory. If it does exist, it will | |||
|
148 | be used. If not, it will be created. | |||
|
149 | """ | |||
|
150 | return ClusterDir(cluster_dir) | |||
|
151 | ||||
|
152 | @classmethod | |||
|
153 | def create_cluster_dir_by_profile(cls, path, profile=u'default'): | |||
|
154 | """Create a cluster dir by profile name and path. | |||
|
155 | ||||
|
156 | Parameters | |||
|
157 | ---------- | |||
|
158 | path : str | |||
|
159 | The path (directory) to put the cluster directory in. | |||
|
160 | profile : str | |||
|
161 | The name of the profile. The name of the cluster directory will | |||
|
162 | be "cluster_<profile>". | |||
|
163 | """ | |||
|
164 | if not os.path.isdir(path): | |||
|
165 | raise ClusterDirError('Directory not found: %s' % path) | |||
|
166 | cluster_dir = os.path.join(path, u'cluster_' + profile) | |||
|
167 | return ClusterDir(cluster_dir) | |||
|
168 | ||||
|
169 | @classmethod | |||
|
170 | def find_cluster_dir_by_profile(cls, ipython_dir, profile=u'default'): | |||
|
171 | """Find an existing cluster dir by profile name, return its ClusterDir. | |||
|
172 | ||||
|
173 | This searches through a sequence of paths for a cluster dir. If it | |||
|
174 | is not found, a :class:`ClusterDirError` exception will be raised. | |||
|
175 | ||||
|
176 | The search path algorithm is: | |||
|
177 | 1. ``os.getcwd()`` | |||
|
178 | 2. ``ipython_dir`` | |||
|
179 | 3. The directories found in the ":" separated | |||
|
180 | :env:`IPCLUSTER_DIR_PATH` environment variable. | |||
|
181 | ||||
|
182 | Parameters | |||
|
183 | ---------- | |||
|
184 | ipython_dir : unicode or str | |||
|
185 | The IPython directory to use. | |||
|
186 | profile : unicode or str | |||
|
187 | The name of the profile. The name of the cluster directory | |||
|
188 | will be "cluster_<profile>". | |||
|
189 | """ | |||
|
190 | dirname = u'cluster_' + profile | |||
|
191 | cluster_dir_paths = os.environ.get('IPCLUSTER_DIR_PATH','') | |||
|
192 | if cluster_dir_paths: | |||
|
193 | cluster_dir_paths = cluster_dir_paths.split(':') | |||
|
194 | else: | |||
|
195 | cluster_dir_paths = [] | |||
|
196 | paths = [os.getcwd(), ipython_dir] + cluster_dir_paths | |||
|
197 | for p in paths: | |||
|
198 | cluster_dir = os.path.join(p, dirname) | |||
|
199 | if os.path.isdir(cluster_dir): | |||
|
200 | return ClusterDir(cluster_dir) | |||
|
201 | else: | |||
|
202 | raise ClusterDirError('Cluster directory not found in paths: %s' % dirname) | |||
|
203 | ||||
|
204 | @classmethod | |||
|
205 | def find_cluster_dir(cls, cluster_dir): | |||
|
206 | """Find/create a cluster dir and return its ClusterDir. | |||
|
207 | ||||
|
208 | This will create the cluster directory if it doesn't exist. | |||
|
209 | ||||
|
210 | Parameters | |||
|
211 | ---------- | |||
|
212 | cluster_dir : unicode or str | |||
|
213 | The path of the cluster directory. This is expanded using | |||
|
214 | :func:`IPython.utils.genutils.expand_path`. | |||
|
215 | """ | |||
|
216 | cluster_dir = genutils.expand_path(cluster_dir) | |||
|
217 | if not os.path.isdir(cluster_dir): | |||
|
218 | raise ClusterDirError('Cluster directory not found: %s' % cluster_dir) | |||
|
219 | return ClusterDir(cluster_dir) | |||
|
220 | ||||
|
221 | ||||
|
222 | class AppWithClusterDirArgParseConfigLoader(ArgParseConfigLoader): | |||
|
223 | """Default command line options for IPython cluster applications.""" | |||
|
224 | ||||
|
225 | def _add_other_arguments(self): | |||
|
226 | self.parser.add_argument('--ipython-dir', | |||
|
227 | dest='Global.ipython_dir',type=unicode, | |||
|
228 | help='Set to override default location of Global.ipython_dir.', | |||
|
229 | default=NoConfigDefault, | |||
|
230 | metavar='Global.ipython_dir' | |||
|
231 | ) | |||
|
232 | self.parser.add_argument('-p', '--profile', | |||
|
233 | dest='Global.profile',type=unicode, | |||
|
234 | help='The string name of the profile to be used. This determines ' | |||
|
235 | 'the name of the cluster dir as: cluster_<profile>. The default profile ' | |||
|
236 | 'is named "default". The cluster directory is resolve this way ' | |||
|
237 | 'if the --cluster-dir option is not used.', | |||
|
238 | default=NoConfigDefault, | |||
|
239 | metavar='Global.profile' | |||
|
240 | ) | |||
|
241 | self.parser.add_argument('--log-level', | |||
|
242 | dest="Global.log_level",type=int, | |||
|
243 | help='Set the log level (0,10,20,30,40,50). Default is 30.', | |||
|
244 | default=NoConfigDefault, | |||
|
245 | metavar="Global.log_level" | |||
|
246 | ) | |||
|
247 | self.parser.add_argument('--cluster-dir', | |||
|
248 | dest='Global.cluster_dir',type=unicode, | |||
|
249 | help='Set the cluster dir. This overrides the logic used by the ' | |||
|
250 | '--profile option.', | |||
|
251 | default=NoConfigDefault, | |||
|
252 | metavar='Global.cluster_dir' | |||
|
253 | ), | |||
|
254 | self.parser.add_argument('--work-dir', | |||
|
255 | dest='Global.work_dir',type=unicode, | |||
|
256 | help='Set the working dir for the process.', | |||
|
257 | default=NoConfigDefault, | |||
|
258 | metavar='Global.work_dir' | |||
|
259 | ) | |||
|
260 | self.parser.add_argument('--clean-logs', | |||
|
261 | dest='Global.clean_logs', action='store_true', | |||
|
262 | help='Delete old log flies before starting.', | |||
|
263 | default=NoConfigDefault | |||
|
264 | ) | |||
|
265 | self.parser.add_argument('--no-clean-logs', | |||
|
266 | dest='Global.clean_logs', action='store_false', | |||
|
267 | help="Don't Delete old log flies before starting.", | |||
|
268 | default=NoConfigDefault | |||
|
269 | ) | |||
|
270 | ||||
|
271 | class ApplicationWithClusterDir(Application): | |||
|
272 | """An application that puts everything into a cluster directory. | |||
|
273 | ||||
|
274 | Instead of looking for things in the ipython_dir, this type of application | |||
|
275 | will use its own private directory called the "cluster directory" | |||
|
276 | for things like config files, log files, etc. | |||
|
277 | ||||
|
278 | The cluster directory is resolved as follows: | |||
|
279 | ||||
|
280 | * If the ``--cluster-dir`` option is given, it is used. | |||
|
281 | * If ``--cluster-dir`` is not given, the application directory is | |||
|
282 | resolve using the profile name as ``cluster_<profile>``. The search | |||
|
283 | path for this directory is then i) cwd if it is found there | |||
|
284 | and ii) in ipython_dir otherwise. | |||
|
285 | ||||
|
286 | The config file for the application is to be put in the cluster | |||
|
287 | dir and named the value of the ``config_file_name`` class attribute. | |||
|
288 | """ | |||
|
289 | ||||
|
290 | auto_create_cluster_dir = True | |||
|
291 | ||||
|
292 | def create_default_config(self): | |||
|
293 | super(ApplicationWithClusterDir, self).create_default_config() | |||
|
294 | self.default_config.Global.profile = u'default' | |||
|
295 | self.default_config.Global.cluster_dir = u'' | |||
|
296 | self.default_config.Global.work_dir = os.getcwd() | |||
|
297 | self.default_config.Global.log_to_file = False | |||
|
298 | self.default_config.Global.clean_logs = False | |||
|
299 | ||||
|
300 | def create_command_line_config(self): | |||
|
301 | """Create and return a command line config loader.""" | |||
|
302 | return AppWithClusterDirArgParseConfigLoader( | |||
|
303 | description=self.description, | |||
|
304 | version=release.version | |||
|
305 | ) | |||
|
306 | ||||
|
307 | def find_resources(self): | |||
|
308 | """This resolves the cluster directory. | |||
|
309 | ||||
|
310 | This tries to find the cluster directory and if successful, it will | |||
|
311 | have done: | |||
|
312 | * Sets ``self.cluster_dir_obj`` to the :class:`ClusterDir` object for | |||
|
313 | the application. | |||
|
314 | * Sets ``self.cluster_dir`` attribute of the application and config | |||
|
315 | objects. | |||
|
316 | ||||
|
317 | The algorithm used for this is as follows: | |||
|
318 | 1. Try ``Global.cluster_dir``. | |||
|
319 | 2. Try using ``Global.profile``. | |||
|
320 | 3. If both of these fail and ``self.auto_create_cluster_dir`` is | |||
|
321 | ``True``, then create the new cluster dir in the IPython directory. | |||
|
322 | 4. If all fails, then raise :class:`ClusterDirError`. | |||
|
323 | """ | |||
|
324 | ||||
|
325 | try: | |||
|
326 | cluster_dir = self.command_line_config.Global.cluster_dir | |||
|
327 | except AttributeError: | |||
|
328 | cluster_dir = self.default_config.Global.cluster_dir | |||
|
329 | cluster_dir = genutils.expand_path(cluster_dir) | |||
|
330 | try: | |||
|
331 | self.cluster_dir_obj = ClusterDir.find_cluster_dir(cluster_dir) | |||
|
332 | except ClusterDirError: | |||
|
333 | pass | |||
|
334 | else: | |||
|
335 | self.log.info('Using existing cluster dir: %s' % \ | |||
|
336 | self.cluster_dir_obj.location | |||
|
337 | ) | |||
|
338 | self.finish_cluster_dir() | |||
|
339 | return | |||
|
340 | ||||
|
341 | try: | |||
|
342 | self.profile = self.command_line_config.Global.profile | |||
|
343 | except AttributeError: | |||
|
344 | self.profile = self.default_config.Global.profile | |||
|
345 | try: | |||
|
346 | self.cluster_dir_obj = ClusterDir.find_cluster_dir_by_profile( | |||
|
347 | self.ipython_dir, self.profile) | |||
|
348 | except ClusterDirError: | |||
|
349 | pass | |||
|
350 | else: | |||
|
351 | self.log.info('Using existing cluster dir: %s' % \ | |||
|
352 | self.cluster_dir_obj.location | |||
|
353 | ) | |||
|
354 | self.finish_cluster_dir() | |||
|
355 | return | |||
|
356 | ||||
|
357 | if self.auto_create_cluster_dir: | |||
|
358 | self.cluster_dir_obj = ClusterDir.create_cluster_dir_by_profile( | |||
|
359 | self.ipython_dir, self.profile | |||
|
360 | ) | |||
|
361 | self.log.info('Creating new cluster dir: %s' % \ | |||
|
362 | self.cluster_dir_obj.location | |||
|
363 | ) | |||
|
364 | self.finish_cluster_dir() | |||
|
365 | else: | |||
|
366 | raise ClusterDirError('Could not find a valid cluster directory.') | |||
|
367 | ||||
|
368 | def finish_cluster_dir(self): | |||
|
369 | # Set the cluster directory | |||
|
370 | self.cluster_dir = self.cluster_dir_obj.location | |||
|
371 | ||||
|
372 | # These have to be set because they could be different from the one | |||
|
373 | # that we just computed. Because command line has the highest | |||
|
374 | # priority, this will always end up in the master_config. | |||
|
375 | self.default_config.Global.cluster_dir = self.cluster_dir | |||
|
376 | self.command_line_config.Global.cluster_dir = self.cluster_dir | |||
|
377 | ||||
|
378 | # Set the search path to the cluster directory | |||
|
379 | self.config_file_paths = (self.cluster_dir,) | |||
|
380 | ||||
|
381 | def find_config_file_name(self): | |||
|
382 | """Find the config file name for this application.""" | |||
|
383 | # For this type of Application it should be set as a class attribute. | |||
|
384 | if not hasattr(self, 'config_file_name'): | |||
|
385 | self.log.critical("No config filename found") | |||
|
386 | ||||
|
387 | def find_config_file_paths(self): | |||
|
388 | # Set the search path to the cluster directory | |||
|
389 | self.config_file_paths = (self.cluster_dir,) | |||
|
390 | ||||
|
391 | def pre_construct(self): | |||
|
392 | # The log and security dirs were set earlier, but here we put them | |||
|
393 | # into the config and log them. | |||
|
394 | config = self.master_config | |||
|
395 | sdir = self.cluster_dir_obj.security_dir | |||
|
396 | self.security_dir = config.Global.security_dir = sdir | |||
|
397 | ldir = self.cluster_dir_obj.log_dir | |||
|
398 | self.log_dir = config.Global.log_dir = ldir | |||
|
399 | pdir = self.cluster_dir_obj.pid_dir | |||
|
400 | self.pid_dir = config.Global.pid_dir = pdir | |||
|
401 | self.log.info("Cluster directory set to: %s" % self.cluster_dir) | |||
|
402 | config.Global.work_dir = unicode(genutils.expand_path(config.Global.work_dir)) | |||
|
403 | # Change to the working directory. We do this just before construct | |||
|
404 | # is called so all the components there have the right working dir. | |||
|
405 | self.to_work_dir() | |||
|
406 | ||||
|
407 | def to_work_dir(self): | |||
|
408 | wd = self.master_config.Global.work_dir | |||
|
409 | if unicode(wd) != unicode(os.getcwd()): | |||
|
410 | os.chdir(wd) | |||
|
411 | self.log.info("Changing to working dir: %s" % wd) | |||
|
412 | ||||
|
413 | def start_logging(self): | |||
|
414 | # Remove old log files | |||
|
415 | if self.master_config.Global.clean_logs: | |||
|
416 | log_dir = self.master_config.Global.log_dir | |||
|
417 | for f in os.listdir(log_dir): | |||
|
418 | if f.startswith(self.name + u'-') and f.endswith('.log'): | |||
|
419 | os.remove(os.path.join(log_dir, f)) | |||
|
420 | # Start logging to the new log file | |||
|
421 | if self.master_config.Global.log_to_file: | |||
|
422 | log_filename = self.name + u'-' + str(os.getpid()) + u'.log' | |||
|
423 | logfile = os.path.join(self.log_dir, log_filename) | |||
|
424 | open_log_file = open(logfile, 'w') | |||
|
425 | else: | |||
|
426 | open_log_file = sys.stdout | |||
|
427 | log.startLogging(open_log_file) | |||
|
428 | ||||
|
429 | def write_pid_file(self, overwrite=False): | |||
|
430 | """Create a .pid file in the pid_dir with my pid. | |||
|
431 | ||||
|
432 | This must be called after pre_construct, which sets `self.pid_dir`. | |||
|
433 | This raises :exc:`PIDFileError` if the pid file exists already. | |||
|
434 | """ | |||
|
435 | pid_file = os.path.join(self.pid_dir, self.name + u'.pid') | |||
|
436 | if os.path.isfile(pid_file): | |||
|
437 | pid = self.get_pid_from_file() | |||
|
438 | if not overwrite: | |||
|
439 | raise PIDFileError( | |||
|
440 | 'The pid file [%s] already exists. \nThis could mean that this ' | |||
|
441 | 'server is already running with [pid=%s].' % (pid_file, pid) | |||
|
442 | ) | |||
|
443 | with open(pid_file, 'w') as f: | |||
|
444 | self.log.info("Creating pid file: %s" % pid_file) | |||
|
445 | f.write(repr(os.getpid())+'\n') | |||
|
446 | ||||
|
447 | def remove_pid_file(self): | |||
|
448 | """Remove the pid file. | |||
|
449 | ||||
|
450 | This should be called at shutdown by registering a callback with | |||
|
451 | :func:`reactor.addSystemEventTrigger`. This needs to return | |||
|
452 | ``None``. | |||
|
453 | """ | |||
|
454 | pid_file = os.path.join(self.pid_dir, self.name + u'.pid') | |||
|
455 | if os.path.isfile(pid_file): | |||
|
456 | try: | |||
|
457 | self.log.info("Removing pid file: %s" % pid_file) | |||
|
458 | os.remove(pid_file) | |||
|
459 | except: | |||
|
460 | self.log.warn("Error removing the pid file: %s" % pid_file) | |||
|
461 | ||||
|
462 | def get_pid_from_file(self): | |||
|
463 | """Get the pid from the pid file. | |||
|
464 | ||||
|
465 | If the pid file doesn't exist a :exc:`PIDFileError` is raised. | |||
|
466 | """ | |||
|
467 | pid_file = os.path.join(self.pid_dir, self.name + u'.pid') | |||
|
468 | if os.path.isfile(pid_file): | |||
|
469 | with open(pid_file, 'r') as f: | |||
|
470 | pid = int(f.read().strip()) | |||
|
471 | return pid | |||
|
472 | else: | |||
|
473 | raise PIDFileError('pid file not found: %s' % pid_file) | |||
|
474 | ||||
|
475 |
@@ -0,0 +1,79 b'' | |||||
|
1 | #!/usr/bin/env python | |||
|
2 | # encoding: utf-8 | |||
|
3 | """ | |||
|
4 | A class for creating a Twisted service that is configured using IPython's | |||
|
5 | configuration system. | |||
|
6 | """ | |||
|
7 | ||||
|
8 | #----------------------------------------------------------------------------- | |||
|
9 | # Copyright (C) 2008-2009 The IPython Development Team | |||
|
10 | # | |||
|
11 | # Distributed under the terms of the BSD License. The full license is in | |||
|
12 | # the file COPYING, distributed as part of this software. | |||
|
13 | #----------------------------------------------------------------------------- | |||
|
14 | ||||
|
15 | #----------------------------------------------------------------------------- | |||
|
16 | # Imports | |||
|
17 | #----------------------------------------------------------------------------- | |||
|
18 | ||||
|
19 | import zope.interface as zi | |||
|
20 | ||||
|
21 | from IPython.core.component import Component | |||
|
22 | ||||
|
23 | #----------------------------------------------------------------------------- | |||
|
24 | # Code | |||
|
25 | #----------------------------------------------------------------------------- | |||
|
26 | ||||
|
27 | ||||
|
28 | class IConfiguredObjectFactory(zi.Interface): | |||
|
29 | """I am a component that creates a configured object. | |||
|
30 | ||||
|
31 | This class is useful if you want to configure a class that is not a | |||
|
32 | subclass of :class:`IPython.core.component.Component`. | |||
|
33 | """ | |||
|
34 | ||||
|
35 | def __init__(config): | |||
|
36 | """Get ready to configure the object using config.""" | |||
|
37 | ||||
|
38 | def create(): | |||
|
39 | """Return an instance of the configured object.""" | |||
|
40 | ||||
|
41 | ||||
|
42 | class ConfiguredObjectFactory(Component): | |||
|
43 | ||||
|
44 | zi.implements(IConfiguredObjectFactory) | |||
|
45 | ||||
|
46 | def __init__(self, config): | |||
|
47 | super(ConfiguredObjectFactory, self).__init__(None, config=config) | |||
|
48 | ||||
|
49 | def create(self): | |||
|
50 | raise NotImplementedError('create must be implemented in a subclass') | |||
|
51 | ||||
|
52 | ||||
|
53 | class IAdaptedConfiguredObjectFactory(zi.Interface): | |||
|
54 | """I am a component that adapts and configures an object. | |||
|
55 | ||||
|
56 | This class is useful if you have the adapt an instance and configure it. | |||
|
57 | """ | |||
|
58 | ||||
|
59 | def __init__(config, adaptee=None): | |||
|
60 | """Get ready to adapt adaptee and then configure it using config.""" | |||
|
61 | ||||
|
62 | def create(): | |||
|
63 | """Return an instance of the adapted and configured object.""" | |||
|
64 | ||||
|
65 | ||||
|
66 | class AdaptedConfiguredObjectFactory(Component): | |||
|
67 | ||||
|
68 | # zi.implements(IAdaptedConfiguredObjectFactory) | |||
|
69 | ||||
|
70 | def __init__(self, config, adaptee): | |||
|
71 | ||||
|
72 | # print "config pre:", config | |||
|
73 | super(AdaptedConfiguredObjectFactory, self).__init__(None, config=config) | |||
|
74 | ||||
|
75 | # print "config post:", config | |||
|
76 | self.adaptee = adaptee | |||
|
77 | ||||
|
78 | def create(self): | |||
|
79 | raise NotImplementedError('create must be implemented in a subclass') No newline at end of file |
@@ -0,0 +1,471 b'' | |||||
|
1 | #!/usr/bin/env python | |||
|
2 | # encoding: utf-8 | |||
|
3 | """ | |||
|
4 | The ipcluster application. | |||
|
5 | """ | |||
|
6 | ||||
|
7 | #----------------------------------------------------------------------------- | |||
|
8 | # Copyright (C) 2008-2009 The IPython Development Team | |||
|
9 | # | |||
|
10 | # Distributed under the terms of the BSD License. The full license is in | |||
|
11 | # the file COPYING, distributed as part of this software. | |||
|
12 | #----------------------------------------------------------------------------- | |||
|
13 | ||||
|
14 | #----------------------------------------------------------------------------- | |||
|
15 | # Imports | |||
|
16 | #----------------------------------------------------------------------------- | |||
|
17 | ||||
|
18 | import logging | |||
|
19 | import os | |||
|
20 | import signal | |||
|
21 | import sys | |||
|
22 | ||||
|
23 | if os.name=='posix': | |||
|
24 | from twisted.scripts._twistd_unix import daemonize | |||
|
25 | ||||
|
26 | from IPython.core import release | |||
|
27 | from IPython.external import argparse | |||
|
28 | from IPython.config.loader import ArgParseConfigLoader, NoConfigDefault | |||
|
29 | from IPython.utils.importstring import import_item | |||
|
30 | ||||
|
31 | from IPython.kernel.clusterdir import ( | |||
|
32 | ApplicationWithClusterDir, ClusterDirError, PIDFileError | |||
|
33 | ) | |||
|
34 | ||||
|
35 | from twisted.internet import reactor, defer | |||
|
36 | from twisted.python import log, failure | |||
|
37 | ||||
|
38 | ||||
|
39 | #----------------------------------------------------------------------------- | |||
|
40 | # The ipcluster application | |||
|
41 | #----------------------------------------------------------------------------- | |||
|
42 | ||||
|
43 | ||||
|
44 | # Exit codes for ipcluster | |||
|
45 | ||||
|
46 | # This will be the exit code if the ipcluster appears to be running because | |||
|
47 | # a .pid file exists | |||
|
48 | ALREADY_STARTED = 10 | |||
|
49 | ||||
|
50 | # This will be the exit code if ipcluster stop is run, but there is not .pid | |||
|
51 | # file to be found. | |||
|
52 | ALREADY_STOPPED = 11 | |||
|
53 | ||||
|
54 | ||||
|
55 | class IPClusterCLLoader(ArgParseConfigLoader): | |||
|
56 | ||||
|
57 | def _add_arguments(self): | |||
|
58 | # This has all the common options that all subcommands use | |||
|
59 | parent_parser1 = argparse.ArgumentParser(add_help=False) | |||
|
60 | parent_parser1.add_argument('--ipython-dir', | |||
|
61 | dest='Global.ipython_dir',type=unicode, | |||
|
62 | help='Set to override default location of Global.ipython_dir.', | |||
|
63 | default=NoConfigDefault, | |||
|
64 | metavar='Global.ipython_dir') | |||
|
65 | parent_parser1.add_argument('--log-level', | |||
|
66 | dest="Global.log_level",type=int, | |||
|
67 | help='Set the log level (0,10,20,30,40,50). Default is 30.', | |||
|
68 | default=NoConfigDefault, | |||
|
69 | metavar='Global.log_level') | |||
|
70 | ||||
|
71 | # This has all the common options that other subcommands use | |||
|
72 | parent_parser2 = argparse.ArgumentParser(add_help=False) | |||
|
73 | parent_parser2.add_argument('-p','--profile', | |||
|
74 | dest='Global.profile',type=unicode, | |||
|
75 | help='The string name of the profile to be used. This determines ' | |||
|
76 | 'the name of the cluster dir as: cluster_<profile>. The default profile ' | |||
|
77 | 'is named "default". The cluster directory is resolve this way ' | |||
|
78 | 'if the --cluster-dir option is not used.', | |||
|
79 | default=NoConfigDefault, | |||
|
80 | metavar='Global.profile') | |||
|
81 | parent_parser2.add_argument('--cluster-dir', | |||
|
82 | dest='Global.cluster_dir',type=unicode, | |||
|
83 | help='Set the cluster dir. This overrides the logic used by the ' | |||
|
84 | '--profile option.', | |||
|
85 | default=NoConfigDefault, | |||
|
86 | metavar='Global.cluster_dir'), | |||
|
87 | parent_parser2.add_argument('--work-dir', | |||
|
88 | dest='Global.work_dir',type=unicode, | |||
|
89 | help='Set the working dir for the process.', | |||
|
90 | default=NoConfigDefault, | |||
|
91 | metavar='Global.work_dir') | |||
|
92 | parent_parser2.add_argument('--log-to-file', | |||
|
93 | action='store_true', dest='Global.log_to_file', | |||
|
94 | default=NoConfigDefault, | |||
|
95 | help='Log to a file in the log directory (default is stdout)' | |||
|
96 | ) | |||
|
97 | ||||
|
98 | subparsers = self.parser.add_subparsers( | |||
|
99 | dest='Global.subcommand', | |||
|
100 | title='ipcluster subcommands', | |||
|
101 | description='ipcluster has a variety of subcommands. ' | |||
|
102 | 'The general way of running ipcluster is "ipcluster <cmd> ' | |||
|
103 | ' [options]""', | |||
|
104 | help='For more help, type "ipcluster <cmd> -h"') | |||
|
105 | ||||
|
106 | parser_list = subparsers.add_parser( | |||
|
107 | 'list', | |||
|
108 | help='List all clusters in cwd and ipython_dir.', | |||
|
109 | parents=[parent_parser1] | |||
|
110 | ) | |||
|
111 | ||||
|
112 | parser_create = subparsers.add_parser( | |||
|
113 | 'create', | |||
|
114 | help='Create a new cluster directory.', | |||
|
115 | parents=[parent_parser1, parent_parser2] | |||
|
116 | ) | |||
|
117 | parser_create.add_argument( | |||
|
118 | '--reset-config', | |||
|
119 | dest='Global.reset_config', action='store_true', | |||
|
120 | default=NoConfigDefault, | |||
|
121 | help='Recopy the default config files to the cluster directory. ' | |||
|
122 | 'You will loose any modifications you have made to these files.' | |||
|
123 | ) | |||
|
124 | ||||
|
125 | parser_start = subparsers.add_parser( | |||
|
126 | 'start', | |||
|
127 | help='Start a cluster.', | |||
|
128 | parents=[parent_parser1, parent_parser2] | |||
|
129 | ) | |||
|
130 | parser_start.add_argument( | |||
|
131 | '-n', '--number', | |||
|
132 | type=int, dest='Global.n', | |||
|
133 | default=NoConfigDefault, | |||
|
134 | help='The number of engines to start.', | |||
|
135 | metavar='Global.n' | |||
|
136 | ) | |||
|
137 | parser_start.add_argument('--clean-logs', | |||
|
138 | dest='Global.clean_logs', action='store_true', | |||
|
139 | help='Delete old log flies before starting.', | |||
|
140 | default=NoConfigDefault | |||
|
141 | ) | |||
|
142 | parser_start.add_argument('--no-clean-logs', | |||
|
143 | dest='Global.clean_logs', action='store_false', | |||
|
144 | help="Don't delete old log flies before starting.", | |||
|
145 | default=NoConfigDefault | |||
|
146 | ) | |||
|
147 | parser_start.add_argument('--daemon', | |||
|
148 | dest='Global.daemonize', action='store_true', | |||
|
149 | help='Daemonize the ipcluster program. This implies --log-to-file', | |||
|
150 | default=NoConfigDefault | |||
|
151 | ) | |||
|
152 | parser_start.add_argument('--no-daemon', | |||
|
153 | dest='Global.daemonize', action='store_false', | |||
|
154 | help="Dont't daemonize the ipcluster program.", | |||
|
155 | default=NoConfigDefault | |||
|
156 | ) | |||
|
157 | ||||
|
158 | parser_start = subparsers.add_parser( | |||
|
159 | 'stop', | |||
|
160 | help='Stop a cluster.', | |||
|
161 | parents=[parent_parser1, parent_parser2] | |||
|
162 | ) | |||
|
163 | parser_start.add_argument('--signal', | |||
|
164 | dest='Global.signal', type=int, | |||
|
165 | help="The signal number to use in stopping the cluster (default=2).", | |||
|
166 | metavar="Global.signal", | |||
|
167 | default=NoConfigDefault | |||
|
168 | ) | |||
|
169 | ||||
|
170 | ||||
|
171 | default_config_file_name = u'ipcluster_config.py' | |||
|
172 | ||||
|
173 | ||||
|
174 | _description = """Start an IPython cluster for parallel computing.\n\n | |||
|
175 | ||||
|
176 | An IPython cluster consists of 1 controller and 1 or more engines. | |||
|
177 | This command automates the startup of these processes using a wide | |||
|
178 | range of startup methods (SSH, local processes, PBS, mpiexec, | |||
|
179 | Windows HPC Server 2008). To start a cluster with 4 engines on your | |||
|
180 | local host simply do "ipcluster start -n 4". For more complex usage | |||
|
181 | you will typically do "ipcluster create -p mycluster", then edit | |||
|
182 | configuration files, followed by "ipcluster start -p mycluster -n 4". | |||
|
183 | """ | |||
|
184 | ||||
|
185 | ||||
|
186 | class IPClusterApp(ApplicationWithClusterDir): | |||
|
187 | ||||
|
188 | name = u'ipcluster' | |||
|
189 | description = _description | |||
|
190 | config_file_name = default_config_file_name | |||
|
191 | default_log_level = logging.INFO | |||
|
192 | auto_create_cluster_dir = False | |||
|
193 | ||||
|
194 | def create_default_config(self): | |||
|
195 | super(IPClusterApp, self).create_default_config() | |||
|
196 | self.default_config.Global.controller_launcher = \ | |||
|
197 | 'IPython.kernel.launcher.LocalControllerLauncher' | |||
|
198 | self.default_config.Global.engine_launcher = \ | |||
|
199 | 'IPython.kernel.launcher.LocalEngineSetLauncher' | |||
|
200 | self.default_config.Global.n = 2 | |||
|
201 | self.default_config.Global.reset_config = False | |||
|
202 | self.default_config.Global.clean_logs = True | |||
|
203 | self.default_config.Global.signal = 2 | |||
|
204 | self.default_config.Global.daemonize = False | |||
|
205 | ||||
|
206 | def create_command_line_config(self): | |||
|
207 | """Create and return a command line config loader.""" | |||
|
208 | return IPClusterCLLoader( | |||
|
209 | description=self.description, | |||
|
210 | version=release.version | |||
|
211 | ) | |||
|
212 | ||||
|
213 | def find_resources(self): | |||
|
214 | subcommand = self.command_line_config.Global.subcommand | |||
|
215 | if subcommand=='list': | |||
|
216 | self.list_cluster_dirs() | |||
|
217 | # Exit immediately because there is nothing left to do. | |||
|
218 | self.exit() | |||
|
219 | elif subcommand=='create': | |||
|
220 | self.auto_create_cluster_dir = True | |||
|
221 | super(IPClusterApp, self).find_resources() | |||
|
222 | elif subcommand=='start' or subcommand=='stop': | |||
|
223 | self.auto_create_cluster_dir = True | |||
|
224 | try: | |||
|
225 | super(IPClusterApp, self).find_resources() | |||
|
226 | except ClusterDirError: | |||
|
227 | raise ClusterDirError( | |||
|
228 | "Could not find a cluster directory. A cluster dir must " | |||
|
229 | "be created before running 'ipcluster start'. Do " | |||
|
230 | "'ipcluster create -h' or 'ipcluster list -h' for more " | |||
|
231 | "information about creating and listing cluster dirs." | |||
|
232 | ) | |||
|
233 | ||||
|
234 | def list_cluster_dirs(self): | |||
|
235 | # Find the search paths | |||
|
236 | cluster_dir_paths = os.environ.get('IPCLUSTER_DIR_PATH','') | |||
|
237 | if cluster_dir_paths: | |||
|
238 | cluster_dir_paths = cluster_dir_paths.split(':') | |||
|
239 | else: | |||
|
240 | cluster_dir_paths = [] | |||
|
241 | try: | |||
|
242 | ipython_dir = self.command_line_config.Global.ipython_dir | |||
|
243 | except AttributeError: | |||
|
244 | ipython_dir = self.default_config.Global.ipython_dir | |||
|
245 | paths = [os.getcwd(), ipython_dir] + \ | |||
|
246 | cluster_dir_paths | |||
|
247 | paths = list(set(paths)) | |||
|
248 | ||||
|
249 | self.log.info('Searching for cluster dirs in paths: %r' % paths) | |||
|
250 | for path in paths: | |||
|
251 | files = os.listdir(path) | |||
|
252 | for f in files: | |||
|
253 | full_path = os.path.join(path, f) | |||
|
254 | if os.path.isdir(full_path) and f.startswith('cluster_'): | |||
|
255 | profile = full_path.split('_')[-1] | |||
|
256 | start_cmd = 'ipcluster start -p %s -n 4' % profile | |||
|
257 | print start_cmd + " ==> " + full_path | |||
|
258 | ||||
|
259 | def pre_construct(self): | |||
|
260 | # IPClusterApp.pre_construct() is where we cd to the working directory. | |||
|
261 | super(IPClusterApp, self).pre_construct() | |||
|
262 | config = self.master_config | |||
|
263 | try: | |||
|
264 | daemon = config.Global.daemonize | |||
|
265 | if daemon: | |||
|
266 | config.Global.log_to_file = True | |||
|
267 | except AttributeError: | |||
|
268 | pass | |||
|
269 | ||||
|
270 | def construct(self): | |||
|
271 | config = self.master_config | |||
|
272 | subcmd = config.Global.subcommand | |||
|
273 | reset = config.Global.reset_config | |||
|
274 | if subcmd == 'list': | |||
|
275 | return | |||
|
276 | if subcmd == 'create': | |||
|
277 | self.log.info('Copying default config files to cluster directory ' | |||
|
278 | '[overwrite=%r]' % (reset,)) | |||
|
279 | self.cluster_dir_obj.copy_all_config_files(overwrite=reset) | |||
|
280 | if subcmd =='start': | |||
|
281 | self.cluster_dir_obj.copy_all_config_files(overwrite=False) | |||
|
282 | self.start_logging() | |||
|
283 | reactor.callWhenRunning(self.start_launchers) | |||
|
284 | ||||
|
285 | def start_launchers(self): | |||
|
286 | config = self.master_config | |||
|
287 | ||||
|
288 | # Create the launchers. In both bases, we set the work_dir of | |||
|
289 | # the launcher to the cluster_dir. This is where the launcher's | |||
|
290 | # subprocesses will be launched. It is not where the controller | |||
|
291 | # and engine will be launched. | |||
|
292 | el_class = import_item(config.Global.engine_launcher) | |||
|
293 | self.engine_launcher = el_class( | |||
|
294 | work_dir=self.cluster_dir, config=config | |||
|
295 | ) | |||
|
296 | cl_class = import_item(config.Global.controller_launcher) | |||
|
297 | self.controller_launcher = cl_class( | |||
|
298 | work_dir=self.cluster_dir, config=config | |||
|
299 | ) | |||
|
300 | ||||
|
301 | # Setup signals | |||
|
302 | signal.signal(signal.SIGINT, self.sigint_handler) | |||
|
303 | ||||
|
304 | # Setup the observing of stopping. If the controller dies, shut | |||
|
305 | # everything down as that will be completely fatal for the engines. | |||
|
306 | d1 = self.controller_launcher.observe_stop() | |||
|
307 | d1.addCallback(self.stop_launchers) | |||
|
308 | # But, we don't monitor the stopping of engines. An engine dying | |||
|
309 | # is just fine and in principle a user could start a new engine. | |||
|
310 | # Also, if we did monitor engine stopping, it is difficult to | |||
|
311 | # know what to do when only some engines die. Currently, the | |||
|
312 | # observing of engine stopping is inconsistent. Some launchers | |||
|
313 | # might trigger on a single engine stopping, other wait until | |||
|
314 | # all stop. TODO: think more about how to handle this. | |||
|
315 | ||||
|
316 | # Start the controller and engines | |||
|
317 | self._stopping = False # Make sure stop_launchers is not called 2x. | |||
|
318 | d = self.start_controller() | |||
|
319 | d.addCallback(self.start_engines) | |||
|
320 | d.addCallback(self.startup_message) | |||
|
321 | # If the controller or engines fail to start, stop everything | |||
|
322 | d.addErrback(self.stop_launchers) | |||
|
323 | return d | |||
|
324 | ||||
|
325 | def startup_message(self, r=None): | |||
|
326 | log.msg("IPython cluster: started") | |||
|
327 | return r | |||
|
328 | ||||
|
329 | def start_controller(self, r=None): | |||
|
330 | # log.msg("In start_controller") | |||
|
331 | config = self.master_config | |||
|
332 | d = self.controller_launcher.start( | |||
|
333 | cluster_dir=config.Global.cluster_dir | |||
|
334 | ) | |||
|
335 | return d | |||
|
336 | ||||
|
337 | def start_engines(self, r=None): | |||
|
338 | # log.msg("In start_engines") | |||
|
339 | config = self.master_config | |||
|
340 | d = self.engine_launcher.start( | |||
|
341 | config.Global.n, | |||
|
342 | cluster_dir=config.Global.cluster_dir | |||
|
343 | ) | |||
|
344 | return d | |||
|
345 | ||||
|
346 | def stop_controller(self, r=None): | |||
|
347 | # log.msg("In stop_controller") | |||
|
348 | if self.controller_launcher.running: | |||
|
349 | d = self.controller_launcher.stop() | |||
|
350 | d.addErrback(self.log_err) | |||
|
351 | return d | |||
|
352 | else: | |||
|
353 | return defer.succeed(None) | |||
|
354 | ||||
|
355 | def stop_engines(self, r=None): | |||
|
356 | # log.msg("In stop_engines") | |||
|
357 | if self.engine_launcher.running: | |||
|
358 | d = self.engine_launcher.stop() | |||
|
359 | d.addErrback(self.log_err) | |||
|
360 | return d | |||
|
361 | else: | |||
|
362 | return defer.succeed(None) | |||
|
363 | ||||
|
364 | def log_err(self, f): | |||
|
365 | log.msg(f.getTraceback()) | |||
|
366 | return None | |||
|
367 | ||||
|
368 | def stop_launchers(self, r=None): | |||
|
369 | if not self._stopping: | |||
|
370 | self._stopping = True | |||
|
371 | if isinstance(r, failure.Failure): | |||
|
372 | log.msg('Unexpected error in ipcluster:') | |||
|
373 | log.msg(r.getTraceback()) | |||
|
374 | log.msg("IPython cluster: stopping") | |||
|
375 | d= self.stop_engines() | |||
|
376 | d2 = self.stop_controller() | |||
|
377 | # Wait a few seconds to let things shut down. | |||
|
378 | reactor.callLater(4.0, reactor.stop) | |||
|
379 | ||||
|
380 | def sigint_handler(self, signum, frame): | |||
|
381 | self.stop_launchers() | |||
|
382 | ||||
|
383 | def start_logging(self): | |||
|
384 | # Remove old log files of the controller and engine | |||
|
385 | if self.master_config.Global.clean_logs: | |||
|
386 | log_dir = self.master_config.Global.log_dir | |||
|
387 | for f in os.listdir(log_dir): | |||
|
388 | if f.startswith('ipengine' + '-'): | |||
|
389 | if f.endswith('.log') or f.endswith('.out') or f.endswith('.err'): | |||
|
390 | os.remove(os.path.join(log_dir, f)) | |||
|
391 | if f.startswith('ipcontroller' + '-'): | |||
|
392 | if f.endswith('.log') or f.endswith('.out') or f.endswith('.err'): | |||
|
393 | os.remove(os.path.join(log_dir, f)) | |||
|
394 | # This will remote old log files for ipcluster itself | |||
|
395 | super(IPClusterApp, self).start_logging() | |||
|
396 | ||||
|
397 | def start_app(self): | |||
|
398 | """Start the application, depending on what subcommand is used.""" | |||
|
399 | subcmd = self.master_config.Global.subcommand | |||
|
400 | if subcmd=='create' or subcmd=='list': | |||
|
401 | return | |||
|
402 | elif subcmd=='start': | |||
|
403 | self.start_app_start() | |||
|
404 | elif subcmd=='stop': | |||
|
405 | self.start_app_stop() | |||
|
406 | ||||
|
407 | def start_app_start(self): | |||
|
408 | """Start the app for the start subcommand.""" | |||
|
409 | config = self.master_config | |||
|
410 | # First see if the cluster is already running | |||
|
411 | try: | |||
|
412 | pid = self.get_pid_from_file() | |||
|
413 | except PIDFileError: | |||
|
414 | pass | |||
|
415 | else: | |||
|
416 | self.log.critical( | |||
|
417 | 'Cluster is already running with [pid=%s]. ' | |||
|
418 | 'use "ipcluster stop" to stop the cluster.' % pid | |||
|
419 | ) | |||
|
420 | # Here I exit with a unusual exit status that other processes | |||
|
421 | # can watch for to learn how I existed. | |||
|
422 | self.exit(ALREADY_STARTED) | |||
|
423 | ||||
|
424 | # Now log and daemonize | |||
|
425 | self.log.info( | |||
|
426 | 'Starting ipcluster with [daemon=%r]' % config.Global.daemonize | |||
|
427 | ) | |||
|
428 | # TODO: Get daemonize working on Windows or as a Windows Server. | |||
|
429 | if config.Global.daemonize: | |||
|
430 | if os.name=='posix': | |||
|
431 | daemonize() | |||
|
432 | ||||
|
433 | # Now write the new pid file AFTER our new forked pid is active. | |||
|
434 | self.write_pid_file() | |||
|
435 | reactor.addSystemEventTrigger('during','shutdown', self.remove_pid_file) | |||
|
436 | reactor.run() | |||
|
437 | ||||
|
438 | def start_app_stop(self): | |||
|
439 | """Start the app for the stop subcommand.""" | |||
|
440 | config = self.master_config | |||
|
441 | try: | |||
|
442 | pid = self.get_pid_from_file() | |||
|
443 | except PIDFileError: | |||
|
444 | self.log.critical( | |||
|
445 | 'Problem reading pid file, cluster is probably not running.' | |||
|
446 | ) | |||
|
447 | # Here I exit with a unusual exit status that other processes | |||
|
448 | # can watch for to learn how I existed. | |||
|
449 | self.exit(ALREADY_STOPPED) | |||
|
450 | else: | |||
|
451 | if os.name=='posix': | |||
|
452 | sig = config.Global.signal | |||
|
453 | self.log.info( | |||
|
454 | "Stopping cluster [pid=%r] with [signal=%r]" % (pid, sig) | |||
|
455 | ) | |||
|
456 | os.kill(pid, sig) | |||
|
457 | elif os.name=='nt': | |||
|
458 | # As of right now, we don't support daemonize on Windows, so | |||
|
459 | # stop will not do anything. Minimally, it should clean up the | |||
|
460 | # old .pid files. | |||
|
461 | self.remove_pid_file() | |||
|
462 | ||||
|
463 | def launch_new_instance(): | |||
|
464 | """Create and run the IPython cluster.""" | |||
|
465 | app = IPClusterApp() | |||
|
466 | app.start() | |||
|
467 | ||||
|
468 | ||||
|
469 | if __name__ == '__main__': | |||
|
470 | launch_new_instance() | |||
|
471 |
@@ -0,0 +1,275 b'' | |||||
|
1 | #!/usr/bin/env python | |||
|
2 | # encoding: utf-8 | |||
|
3 | """ | |||
|
4 | The IPython controller application. | |||
|
5 | """ | |||
|
6 | ||||
|
7 | #----------------------------------------------------------------------------- | |||
|
8 | # Copyright (C) 2008-2009 The IPython Development Team | |||
|
9 | # | |||
|
10 | # Distributed under the terms of the BSD License. The full license is in | |||
|
11 | # the file COPYING, distributed as part of this software. | |||
|
12 | #----------------------------------------------------------------------------- | |||
|
13 | ||||
|
14 | #----------------------------------------------------------------------------- | |||
|
15 | # Imports | |||
|
16 | #----------------------------------------------------------------------------- | |||
|
17 | ||||
|
18 | from __future__ import with_statement | |||
|
19 | ||||
|
20 | import copy | |||
|
21 | import os | |||
|
22 | import sys | |||
|
23 | ||||
|
24 | from twisted.application import service | |||
|
25 | from twisted.internet import reactor | |||
|
26 | from twisted.python import log | |||
|
27 | ||||
|
28 | from IPython.config.loader import Config, NoConfigDefault | |||
|
29 | ||||
|
30 | from IPython.kernel.clusterdir import ( | |||
|
31 | ApplicationWithClusterDir, | |||
|
32 | AppWithClusterDirArgParseConfigLoader | |||
|
33 | ) | |||
|
34 | ||||
|
35 | from IPython.core import release | |||
|
36 | ||||
|
37 | from IPython.utils.traitlets import Str, Instance, Unicode | |||
|
38 | ||||
|
39 | from IPython.kernel import controllerservice | |||
|
40 | ||||
|
41 | from IPython.kernel.fcutil import FCServiceFactory | |||
|
42 | ||||
|
43 | #----------------------------------------------------------------------------- | |||
|
44 | # Default interfaces | |||
|
45 | #----------------------------------------------------------------------------- | |||
|
46 | ||||
|
47 | ||||
|
48 | # The default client interfaces for FCClientServiceFactory.interfaces | |||
|
49 | default_client_interfaces = Config() | |||
|
50 | default_client_interfaces.Task.interface_chain = [ | |||
|
51 | 'IPython.kernel.task.ITaskController', | |||
|
52 | 'IPython.kernel.taskfc.IFCTaskController' | |||
|
53 | ] | |||
|
54 | ||||
|
55 | default_client_interfaces.Task.furl_file = 'ipcontroller-tc.furl' | |||
|
56 | ||||
|
57 | default_client_interfaces.MultiEngine.interface_chain = [ | |||
|
58 | 'IPython.kernel.multiengine.IMultiEngine', | |||
|
59 | 'IPython.kernel.multienginefc.IFCSynchronousMultiEngine' | |||
|
60 | ] | |||
|
61 | ||||
|
62 | default_client_interfaces.MultiEngine.furl_file = u'ipcontroller-mec.furl' | |||
|
63 | ||||
|
64 | # Make this a dict we can pass to Config.__init__ for the default | |||
|
65 | default_client_interfaces = dict(copy.deepcopy(default_client_interfaces.items())) | |||
|
66 | ||||
|
67 | ||||
|
68 | ||||
|
69 | # The default engine interfaces for FCEngineServiceFactory.interfaces | |||
|
70 | default_engine_interfaces = Config() | |||
|
71 | default_engine_interfaces.Default.interface_chain = [ | |||
|
72 | 'IPython.kernel.enginefc.IFCControllerBase' | |||
|
73 | ] | |||
|
74 | ||||
|
75 | default_engine_interfaces.Default.furl_file = u'ipcontroller-engine.furl' | |||
|
76 | ||||
|
77 | # Make this a dict we can pass to Config.__init__ for the default | |||
|
78 | default_engine_interfaces = dict(copy.deepcopy(default_engine_interfaces.items())) | |||
|
79 | ||||
|
80 | ||||
|
81 | #----------------------------------------------------------------------------- | |||
|
82 | # Service factories | |||
|
83 | #----------------------------------------------------------------------------- | |||
|
84 | ||||
|
85 | ||||
|
86 | class FCClientServiceFactory(FCServiceFactory): | |||
|
87 | """A Foolscap implementation of the client services.""" | |||
|
88 | ||||
|
89 | cert_file = Unicode(u'ipcontroller-client.pem', config=True) | |||
|
90 | interfaces = Instance(klass=Config, kw=default_client_interfaces, | |||
|
91 | allow_none=False, config=True) | |||
|
92 | ||||
|
93 | ||||
|
94 | class FCEngineServiceFactory(FCServiceFactory): | |||
|
95 | """A Foolscap implementation of the engine services.""" | |||
|
96 | ||||
|
97 | cert_file = Unicode(u'ipcontroller-engine.pem', config=True) | |||
|
98 | interfaces = Instance(klass=dict, kw=default_engine_interfaces, | |||
|
99 | allow_none=False, config=True) | |||
|
100 | ||||
|
101 | ||||
|
102 | #----------------------------------------------------------------------------- | |||
|
103 | # The main application | |||
|
104 | #----------------------------------------------------------------------------- | |||
|
105 | ||||
|
106 | ||||
|
107 | cl_args = ( | |||
|
108 | # Client config | |||
|
109 | (('--client-ip',), dict( | |||
|
110 | type=str, dest='FCClientServiceFactory.ip', default=NoConfigDefault, | |||
|
111 | help='The IP address or hostname the controller will listen on for ' | |||
|
112 | 'client connections.', | |||
|
113 | metavar='FCClientServiceFactory.ip') | |||
|
114 | ), | |||
|
115 | (('--client-port',), dict( | |||
|
116 | type=int, dest='FCClientServiceFactory.port', default=NoConfigDefault, | |||
|
117 | help='The port the controller will listen on for client connections. ' | |||
|
118 | 'The default is to use 0, which will autoselect an open port.', | |||
|
119 | metavar='FCClientServiceFactory.port') | |||
|
120 | ), | |||
|
121 | (('--client-location',), dict( | |||
|
122 | type=str, dest='FCClientServiceFactory.location', default=NoConfigDefault, | |||
|
123 | help='The hostname or IP that clients should connect to. This does ' | |||
|
124 | 'not control which interface the controller listens on. Instead, this ' | |||
|
125 | 'determines the hostname/IP that is listed in the FURL, which is how ' | |||
|
126 | 'clients know where to connect. Useful if the controller is listening ' | |||
|
127 | 'on multiple interfaces.', | |||
|
128 | metavar='FCClientServiceFactory.location') | |||
|
129 | ), | |||
|
130 | # Engine config | |||
|
131 | (('--engine-ip',), dict( | |||
|
132 | type=str, dest='FCEngineServiceFactory.ip', default=NoConfigDefault, | |||
|
133 | help='The IP address or hostname the controller will listen on for ' | |||
|
134 | 'engine connections.', | |||
|
135 | metavar='FCEngineServiceFactory.ip') | |||
|
136 | ), | |||
|
137 | (('--engine-port',), dict( | |||
|
138 | type=int, dest='FCEngineServiceFactory.port', default=NoConfigDefault, | |||
|
139 | help='The port the controller will listen on for engine connections. ' | |||
|
140 | 'The default is to use 0, which will autoselect an open port.', | |||
|
141 | metavar='FCEngineServiceFactory.port') | |||
|
142 | ), | |||
|
143 | (('--engine-location',), dict( | |||
|
144 | type=str, dest='FCEngineServiceFactory.location', default=NoConfigDefault, | |||
|
145 | help='The hostname or IP that engines should connect to. This does ' | |||
|
146 | 'not control which interface the controller listens on. Instead, this ' | |||
|
147 | 'determines the hostname/IP that is listed in the FURL, which is how ' | |||
|
148 | 'engines know where to connect. Useful if the controller is listening ' | |||
|
149 | 'on multiple interfaces.', | |||
|
150 | metavar='FCEngineServiceFactory.location') | |||
|
151 | ), | |||
|
152 | # Global config | |||
|
153 | (('--log-to-file',), dict( | |||
|
154 | action='store_true', dest='Global.log_to_file', default=NoConfigDefault, | |||
|
155 | help='Log to a file in the log directory (default is stdout)') | |||
|
156 | ), | |||
|
157 | (('-r','--reuse-furls'), dict( | |||
|
158 | action='store_true', dest='Global.reuse_furls', default=NoConfigDefault, | |||
|
159 | help='Try to reuse all FURL files. If this is not set all FURL files ' | |||
|
160 | 'are deleted before the controller starts. This must be set if ' | |||
|
161 | 'specific ports are specified by --engine-port or --client-port.') | |||
|
162 | ), | |||
|
163 | (('--no-secure',), dict( | |||
|
164 | action='store_false', dest='Global.secure', default=NoConfigDefault, | |||
|
165 | help='Turn off SSL encryption for all connections.') | |||
|
166 | ), | |||
|
167 | (('--secure',), dict( | |||
|
168 | action='store_true', dest='Global.secure', default=NoConfigDefault, | |||
|
169 | help='Turn off SSL encryption for all connections.') | |||
|
170 | ) | |||
|
171 | ) | |||
|
172 | ||||
|
173 | ||||
|
174 | class IPControllerAppCLConfigLoader(AppWithClusterDirArgParseConfigLoader): | |||
|
175 | ||||
|
176 | arguments = cl_args | |||
|
177 | ||||
|
178 | ||||
|
179 | _description = """Start the IPython controller for parallel computing. | |||
|
180 | ||||
|
181 | The IPython controller provides a gateway between the IPython engines and | |||
|
182 | clients. The controller needs to be started before the engines and can be | |||
|
183 | configured using command line options or using a cluster directory. Cluster | |||
|
184 | directories contain config, log and security files and are usually located in | |||
|
185 | your .ipython directory and named as "cluster_<profile>". See the --profile | |||
|
186 | and --cluster-dir options for details. | |||
|
187 | """ | |||
|
188 | ||||
|
189 | default_config_file_name = u'ipcontroller_config.py' | |||
|
190 | ||||
|
191 | ||||
|
192 | class IPControllerApp(ApplicationWithClusterDir): | |||
|
193 | ||||
|
194 | name = u'ipcontroller' | |||
|
195 | description = _description | |||
|
196 | config_file_name = default_config_file_name | |||
|
197 | auto_create_cluster_dir = True | |||
|
198 | ||||
|
199 | def create_default_config(self): | |||
|
200 | super(IPControllerApp, self).create_default_config() | |||
|
201 | self.default_config.Global.reuse_furls = False | |||
|
202 | self.default_config.Global.secure = True | |||
|
203 | self.default_config.Global.import_statements = [] | |||
|
204 | self.default_config.Global.clean_logs = True | |||
|
205 | ||||
|
206 | def create_command_line_config(self): | |||
|
207 | """Create and return a command line config loader.""" | |||
|
208 | return IPControllerAppCLConfigLoader( | |||
|
209 | description=self.description, | |||
|
210 | version=release.version | |||
|
211 | ) | |||
|
212 | ||||
|
213 | def post_load_command_line_config(self): | |||
|
214 | # Now setup reuse_furls | |||
|
215 | c = self.command_line_config | |||
|
216 | if hasattr(c.Global, 'reuse_furls'): | |||
|
217 | c.FCClientServiceFactory.reuse_furls = c.Global.reuse_furls | |||
|
218 | c.FCEngineServiceFactory.reuse_furls = c.Global.reuse_furls | |||
|
219 | del c.Global.reuse_furls | |||
|
220 | if hasattr(c.Global, 'secure'): | |||
|
221 | c.FCClientServiceFactory.secure = c.Global.secure | |||
|
222 | c.FCEngineServiceFactory.secure = c.Global.secure | |||
|
223 | del c.Global.secure | |||
|
224 | ||||
|
225 | def construct(self): | |||
|
226 | # This is the working dir by now. | |||
|
227 | sys.path.insert(0, '') | |||
|
228 | ||||
|
229 | self.start_logging() | |||
|
230 | self.import_statements() | |||
|
231 | ||||
|
232 | # Create the service hierarchy | |||
|
233 | self.main_service = service.MultiService() | |||
|
234 | # The controller service | |||
|
235 | controller_service = controllerservice.ControllerService() | |||
|
236 | controller_service.setServiceParent(self.main_service) | |||
|
237 | # The client tub and all its refereceables | |||
|
238 | csfactory = FCClientServiceFactory(self.master_config, controller_service) | |||
|
239 | client_service = csfactory.create() | |||
|
240 | client_service.setServiceParent(self.main_service) | |||
|
241 | # The engine tub | |||
|
242 | esfactory = FCEngineServiceFactory(self.master_config, controller_service) | |||
|
243 | engine_service = esfactory.create() | |||
|
244 | engine_service.setServiceParent(self.main_service) | |||
|
245 | ||||
|
246 | def import_statements(self): | |||
|
247 | statements = self.master_config.Global.import_statements | |||
|
248 | for s in statements: | |||
|
249 | try: | |||
|
250 | log.msg("Executing statement: '%s'" % s) | |||
|
251 | exec s in globals(), locals() | |||
|
252 | except: | |||
|
253 | log.msg("Error running statement: %s" % s) | |||
|
254 | ||||
|
255 | def start_app(self): | |||
|
256 | # Start the controller service. | |||
|
257 | self.main_service.startService() | |||
|
258 | # Write the .pid file overwriting old ones. This allow multiple | |||
|
259 | # controllers to clober each other. But Windows is not cleaning | |||
|
260 | # these up properly. | |||
|
261 | self.write_pid_file(overwrite=True) | |||
|
262 | # Add a trigger to delete the .pid file upon shutting down. | |||
|
263 | reactor.addSystemEventTrigger('during','shutdown', self.remove_pid_file) | |||
|
264 | reactor.run() | |||
|
265 | ||||
|
266 | ||||
|
267 | def launch_new_instance(): | |||
|
268 | """Create and run the IPython controller""" | |||
|
269 | app = IPControllerApp() | |||
|
270 | app.start() | |||
|
271 | ||||
|
272 | ||||
|
273 | if __name__ == '__main__': | |||
|
274 | launch_new_instance() | |||
|
275 |
@@ -0,0 +1,248 b'' | |||||
|
1 | #!/usr/bin/env python | |||
|
2 | # encoding: utf-8 | |||
|
3 | """ | |||
|
4 | The IPython controller application | |||
|
5 | """ | |||
|
6 | ||||
|
7 | #----------------------------------------------------------------------------- | |||
|
8 | # Copyright (C) 2008-2009 The IPython Development Team | |||
|
9 | # | |||
|
10 | # Distributed under the terms of the BSD License. The full license is in | |||
|
11 | # the file COPYING, distributed as part of this software. | |||
|
12 | #----------------------------------------------------------------------------- | |||
|
13 | ||||
|
14 | #----------------------------------------------------------------------------- | |||
|
15 | # Imports | |||
|
16 | #----------------------------------------------------------------------------- | |||
|
17 | ||||
|
18 | import os | |||
|
19 | import sys | |||
|
20 | ||||
|
21 | from twisted.application import service | |||
|
22 | from twisted.internet import reactor | |||
|
23 | from twisted.python import log | |||
|
24 | ||||
|
25 | from IPython.config.loader import NoConfigDefault | |||
|
26 | ||||
|
27 | from IPython.kernel.clusterdir import ( | |||
|
28 | ApplicationWithClusterDir, | |||
|
29 | AppWithClusterDirArgParseConfigLoader | |||
|
30 | ) | |||
|
31 | from IPython.core import release | |||
|
32 | ||||
|
33 | from IPython.utils.importstring import import_item | |||
|
34 | ||||
|
35 | from IPython.kernel.engineservice import EngineService | |||
|
36 | from IPython.kernel.fcutil import Tub | |||
|
37 | from IPython.kernel.engineconnector import EngineConnector | |||
|
38 | ||||
|
39 | #----------------------------------------------------------------------------- | |||
|
40 | # The main application | |||
|
41 | #----------------------------------------------------------------------------- | |||
|
42 | ||||
|
43 | ||||
|
44 | cl_args = ( | |||
|
45 | # Controller config | |||
|
46 | (('--furl-file',), dict( | |||
|
47 | type=unicode, dest='Global.furl_file', default=NoConfigDefault, | |||
|
48 | help='The full location of the file containing the FURL of the ' | |||
|
49 | 'controller. If this is not given, the FURL file must be in the ' | |||
|
50 | 'security directory of the cluster directory. This location is ' | |||
|
51 | 'resolved using the --profile and --app-dir options.', | |||
|
52 | metavar='Global.furl_file') | |||
|
53 | ), | |||
|
54 | # MPI | |||
|
55 | (('--mpi',), dict( | |||
|
56 | type=str, dest='MPI.use', default=NoConfigDefault, | |||
|
57 | help='How to enable MPI (mpi4py, pytrilinos, or empty string to disable).', | |||
|
58 | metavar='MPI.use') | |||
|
59 | ), | |||
|
60 | # Global config | |||
|
61 | (('--log-to-file',), dict( | |||
|
62 | action='store_true', dest='Global.log_to_file', default=NoConfigDefault, | |||
|
63 | help='Log to a file in the log directory (default is stdout)') | |||
|
64 | ) | |||
|
65 | ) | |||
|
66 | ||||
|
67 | ||||
|
68 | class IPEngineAppCLConfigLoader(AppWithClusterDirArgParseConfigLoader): | |||
|
69 | ||||
|
70 | arguments = cl_args | |||
|
71 | ||||
|
72 | ||||
|
73 | mpi4py_init = """from mpi4py import MPI as mpi | |||
|
74 | mpi.size = mpi.COMM_WORLD.Get_size() | |||
|
75 | mpi.rank = mpi.COMM_WORLD.Get_rank() | |||
|
76 | """ | |||
|
77 | ||||
|
78 | pytrilinos_init = """from PyTrilinos import Epetra | |||
|
79 | class SimpleStruct: | |||
|
80 | pass | |||
|
81 | mpi = SimpleStruct() | |||
|
82 | mpi.rank = 0 | |||
|
83 | mpi.size = 0 | |||
|
84 | """ | |||
|
85 | ||||
|
86 | ||||
|
87 | default_config_file_name = u'ipengine_config.py' | |||
|
88 | ||||
|
89 | ||||
|
90 | _description = """Start an IPython engine for parallel computing.\n\n | |||
|
91 | ||||
|
92 | IPython engines run in parallel and perform computations on behalf of a client | |||
|
93 | and controller. A controller needs to be started before the engines. The | |||
|
94 | engine can be configured using command line options or using a cluster | |||
|
95 | directory. Cluster directories contain config, log and security files and are | |||
|
96 | usually located in your .ipython directory and named as "cluster_<profile>". | |||
|
97 | See the --profile and --cluster-dir options for details. | |||
|
98 | """ | |||
|
99 | ||||
|
100 | ||||
|
101 | class IPEngineApp(ApplicationWithClusterDir): | |||
|
102 | ||||
|
103 | name = u'ipengine' | |||
|
104 | description = _description | |||
|
105 | config_file_name = default_config_file_name | |||
|
106 | auto_create_cluster_dir = True | |||
|
107 | ||||
|
108 | def create_default_config(self): | |||
|
109 | super(IPEngineApp, self).create_default_config() | |||
|
110 | ||||
|
111 | # The engine should not clean logs as we don't want to remove the | |||
|
112 | # active log files of other running engines. | |||
|
113 | self.default_config.Global.clean_logs = False | |||
|
114 | ||||
|
115 | # Global config attributes | |||
|
116 | self.default_config.Global.exec_lines = [] | |||
|
117 | self.default_config.Global.shell_class = 'IPython.kernel.core.interpreter.Interpreter' | |||
|
118 | ||||
|
119 | # Configuration related to the controller | |||
|
120 | # This must match the filename (path not included) that the controller | |||
|
121 | # used for the FURL file. | |||
|
122 | self.default_config.Global.furl_file_name = u'ipcontroller-engine.furl' | |||
|
123 | # If given, this is the actual location of the controller's FURL file. | |||
|
124 | # If not, this is computed using the profile, app_dir and furl_file_name | |||
|
125 | self.default_config.Global.furl_file = u'' | |||
|
126 | ||||
|
127 | # The max number of connection attemps and the initial delay between | |||
|
128 | # those attemps. | |||
|
129 | self.default_config.Global.connect_delay = 0.1 | |||
|
130 | self.default_config.Global.connect_max_tries = 15 | |||
|
131 | ||||
|
132 | # MPI related config attributes | |||
|
133 | self.default_config.MPI.use = '' | |||
|
134 | self.default_config.MPI.mpi4py = mpi4py_init | |||
|
135 | self.default_config.MPI.pytrilinos = pytrilinos_init | |||
|
136 | ||||
|
137 | def create_command_line_config(self): | |||
|
138 | """Create and return a command line config loader.""" | |||
|
139 | return IPEngineAppCLConfigLoader( | |||
|
140 | description=self.description, | |||
|
141 | version=release.version | |||
|
142 | ) | |||
|
143 | ||||
|
144 | def post_load_command_line_config(self): | |||
|
145 | pass | |||
|
146 | ||||
|
147 | def pre_construct(self): | |||
|
148 | super(IPEngineApp, self).pre_construct() | |||
|
149 | self.find_cont_furl_file() | |||
|
150 | ||||
|
151 | def find_cont_furl_file(self): | |||
|
152 | """Set the furl file. | |||
|
153 | ||||
|
154 | Here we don't try to actually see if it exists for is valid as that | |||
|
155 | is hadled by the connection logic. | |||
|
156 | """ | |||
|
157 | config = self.master_config | |||
|
158 | # Find the actual controller FURL file | |||
|
159 | if not config.Global.furl_file: | |||
|
160 | try_this = os.path.join( | |||
|
161 | config.Global.cluster_dir, | |||
|
162 | config.Global.security_dir, | |||
|
163 | config.Global.furl_file_name | |||
|
164 | ) | |||
|
165 | config.Global.furl_file = try_this | |||
|
166 | ||||
|
167 | def construct(self): | |||
|
168 | # This is the working dir by now. | |||
|
169 | sys.path.insert(0, '') | |||
|
170 | ||||
|
171 | self.start_mpi() | |||
|
172 | self.start_logging() | |||
|
173 | ||||
|
174 | # Create the underlying shell class and EngineService | |||
|
175 | shell_class = import_item(self.master_config.Global.shell_class) | |||
|
176 | self.engine_service = EngineService(shell_class, mpi=mpi) | |||
|
177 | ||||
|
178 | self.exec_lines() | |||
|
179 | ||||
|
180 | # Create the service hierarchy | |||
|
181 | self.main_service = service.MultiService() | |||
|
182 | self.engine_service.setServiceParent(self.main_service) | |||
|
183 | self.tub_service = Tub() | |||
|
184 | self.tub_service.setServiceParent(self.main_service) | |||
|
185 | # This needs to be called before the connection is initiated | |||
|
186 | self.main_service.startService() | |||
|
187 | ||||
|
188 | # This initiates the connection to the controller and calls | |||
|
189 | # register_engine to tell the controller we are ready to do work | |||
|
190 | self.engine_connector = EngineConnector(self.tub_service) | |||
|
191 | ||||
|
192 | log.msg("Using furl file: %s" % self.master_config.Global.furl_file) | |||
|
193 | ||||
|
194 | reactor.callWhenRunning(self.call_connect) | |||
|
195 | ||||
|
196 | def call_connect(self): | |||
|
197 | d = self.engine_connector.connect_to_controller( | |||
|
198 | self.engine_service, | |||
|
199 | self.master_config.Global.furl_file, | |||
|
200 | self.master_config.Global.connect_delay, | |||
|
201 | self.master_config.Global.connect_max_tries | |||
|
202 | ) | |||
|
203 | ||||
|
204 | def handle_error(f): | |||
|
205 | log.msg('Error connecting to controller. This usually means that ' | |||
|
206 | 'i) the controller was not started, ii) a firewall was blocking ' | |||
|
207 | 'the engine from connecting to the controller or iii) the engine ' | |||
|
208 | ' was not pointed at the right FURL file:') | |||
|
209 | log.msg(f.getErrorMessage()) | |||
|
210 | reactor.callLater(0.1, reactor.stop) | |||
|
211 | ||||
|
212 | d.addErrback(handle_error) | |||
|
213 | ||||
|
214 | def start_mpi(self): | |||
|
215 | global mpi | |||
|
216 | mpikey = self.master_config.MPI.use | |||
|
217 | mpi_import_statement = self.master_config.MPI.get(mpikey, None) | |||
|
218 | if mpi_import_statement is not None: | |||
|
219 | try: | |||
|
220 | self.log.info("Initializing MPI:") | |||
|
221 | self.log.info(mpi_import_statement) | |||
|
222 | exec mpi_import_statement in globals() | |||
|
223 | except: | |||
|
224 | mpi = None | |||
|
225 | else: | |||
|
226 | mpi = None | |||
|
227 | ||||
|
228 | def exec_lines(self): | |||
|
229 | for line in self.master_config.Global.exec_lines: | |||
|
230 | try: | |||
|
231 | log.msg("Executing statement: '%s'" % line) | |||
|
232 | self.engine_service.execute(line) | |||
|
233 | except: | |||
|
234 | log.msg("Error executing statement: %s" % line) | |||
|
235 | ||||
|
236 | def start_app(self): | |||
|
237 | reactor.run() | |||
|
238 | ||||
|
239 | ||||
|
240 | def launch_new_instance(): | |||
|
241 | """Create and run the IPython controller""" | |||
|
242 | app = IPEngineApp() | |||
|
243 | app.start() | |||
|
244 | ||||
|
245 | ||||
|
246 | if __name__ == '__main__': | |||
|
247 | launch_new_instance() | |||
|
248 |
This diff has been collapsed as it changes many lines, (869 lines changed) Show them Hide them | |||||
@@ -0,0 +1,869 b'' | |||||
|
1 | #!/usr/bin/env python | |||
|
2 | # encoding: utf-8 | |||
|
3 | """ | |||
|
4 | Facilities for launching IPython processes asynchronously. | |||
|
5 | """ | |||
|
6 | ||||
|
7 | #----------------------------------------------------------------------------- | |||
|
8 | # Copyright (C) 2008-2009 The IPython Development Team | |||
|
9 | # | |||
|
10 | # Distributed under the terms of the BSD License. The full license is in | |||
|
11 | # the file COPYING, distributed as part of this software. | |||
|
12 | #----------------------------------------------------------------------------- | |||
|
13 | ||||
|
14 | #----------------------------------------------------------------------------- | |||
|
15 | # Imports | |||
|
16 | #----------------------------------------------------------------------------- | |||
|
17 | ||||
|
18 | import os | |||
|
19 | import re | |||
|
20 | import sys | |||
|
21 | ||||
|
22 | from IPython.core.component import Component | |||
|
23 | from IPython.external import Itpl | |||
|
24 | from IPython.utils.traitlets import Str, Int, List, Unicode, Enum | |||
|
25 | from IPython.utils.platutils import find_cmd | |||
|
26 | from IPython.kernel.twistedutil import gatherBoth, make_deferred, sleep_deferred | |||
|
27 | from IPython.kernel.winhpcjob import ( | |||
|
28 | WinHPCJob, WinHPCTask, | |||
|
29 | IPControllerTask, IPEngineTask, | |||
|
30 | IPControllerJob, IPEngineSetJob | |||
|
31 | ) | |||
|
32 | ||||
|
33 | from twisted.internet import reactor, defer | |||
|
34 | from twisted.internet.defer import inlineCallbacks | |||
|
35 | from twisted.internet.protocol import ProcessProtocol | |||
|
36 | from twisted.internet.utils import getProcessOutput | |||
|
37 | from twisted.internet.error import ProcessDone, ProcessTerminated | |||
|
38 | from twisted.python import log | |||
|
39 | from twisted.python.failure import Failure | |||
|
40 | ||||
|
41 | #----------------------------------------------------------------------------- | |||
|
42 | # Utilities | |||
|
43 | #----------------------------------------------------------------------------- | |||
|
44 | ||||
|
45 | ||||
|
46 | def find_controller_cmd(): | |||
|
47 | """Find the command line ipcontroller program in a cross platform way.""" | |||
|
48 | if sys.platform == 'win32': | |||
|
49 | # This logic is needed because the ipcontroller script doesn't | |||
|
50 | # always get installed in the same way or in the same location. | |||
|
51 | from IPython.kernel import ipcontrollerapp | |||
|
52 | script_location = ipcontrollerapp.__file__.replace('.pyc', '.py') | |||
|
53 | # The -u option here turns on unbuffered output, which is required | |||
|
54 | # on Win32 to prevent wierd conflict and problems with Twisted. | |||
|
55 | # Also, use sys.executable to make sure we are picking up the | |||
|
56 | # right python exe. | |||
|
57 | cmd = [sys.executable, '-u', script_location] | |||
|
58 | else: | |||
|
59 | # ipcontroller has to be on the PATH in this case. | |||
|
60 | cmd = ['ipcontroller'] | |||
|
61 | return cmd | |||
|
62 | ||||
|
63 | ||||
|
64 | def find_engine_cmd(): | |||
|
65 | """Find the command line ipengine program in a cross platform way.""" | |||
|
66 | if sys.platform == 'win32': | |||
|
67 | # This logic is needed because the ipengine script doesn't | |||
|
68 | # always get installed in the same way or in the same location. | |||
|
69 | from IPython.kernel import ipengineapp | |||
|
70 | script_location = ipengineapp.__file__.replace('.pyc', '.py') | |||
|
71 | # The -u option here turns on unbuffered output, which is required | |||
|
72 | # on Win32 to prevent wierd conflict and problems with Twisted. | |||
|
73 | # Also, use sys.executable to make sure we are picking up the | |||
|
74 | # right python exe. | |||
|
75 | cmd = [sys.executable, '-u', script_location] | |||
|
76 | else: | |||
|
77 | # ipcontroller has to be on the PATH in this case. | |||
|
78 | cmd = ['ipengine'] | |||
|
79 | return cmd | |||
|
80 | ||||
|
81 | ||||
|
82 | #----------------------------------------------------------------------------- | |||
|
83 | # Base launchers and errors | |||
|
84 | #----------------------------------------------------------------------------- | |||
|
85 | ||||
|
86 | ||||
|
87 | class LauncherError(Exception): | |||
|
88 | pass | |||
|
89 | ||||
|
90 | ||||
|
91 | class ProcessStateError(LauncherError): | |||
|
92 | pass | |||
|
93 | ||||
|
94 | ||||
|
95 | class UnknownStatus(LauncherError): | |||
|
96 | pass | |||
|
97 | ||||
|
98 | ||||
|
99 | class BaseLauncher(Component): | |||
|
100 | """An asbtraction for starting, stopping and signaling a process.""" | |||
|
101 | ||||
|
102 | # In all of the launchers, the work_dir is where child processes will be | |||
|
103 | # run. This will usually be the cluster_dir, but may not be. any work_dir | |||
|
104 | # passed into the __init__ method will override the config value. | |||
|
105 | # This should not be used to set the work_dir for the actual engine | |||
|
106 | # and controller. Instead, use their own config files or the | |||
|
107 | # controller_args, engine_args attributes of the launchers to add | |||
|
108 | # the --work-dir option. | |||
|
109 | work_dir = Unicode(u'') | |||
|
110 | ||||
|
111 | def __init__(self, work_dir, parent=None, name=None, config=None): | |||
|
112 | super(BaseLauncher, self).__init__(parent, name, config) | |||
|
113 | self.work_dir = work_dir | |||
|
114 | self.state = 'before' # can be before, running, after | |||
|
115 | self.stop_deferreds = [] | |||
|
116 | self.start_data = None | |||
|
117 | self.stop_data = None | |||
|
118 | ||||
|
119 | @property | |||
|
120 | def args(self): | |||
|
121 | """A list of cmd and args that will be used to start the process. | |||
|
122 | ||||
|
123 | This is what is passed to :func:`spawnProcess` and the first element | |||
|
124 | will be the process name. | |||
|
125 | """ | |||
|
126 | return self.find_args() | |||
|
127 | ||||
|
128 | def find_args(self): | |||
|
129 | """The ``.args`` property calls this to find the args list. | |||
|
130 | ||||
|
131 | Subcommand should implement this to construct the cmd and args. | |||
|
132 | """ | |||
|
133 | raise NotImplementedError('find_args must be implemented in a subclass') | |||
|
134 | ||||
|
135 | @property | |||
|
136 | def arg_str(self): | |||
|
137 | """The string form of the program arguments.""" | |||
|
138 | return ' '.join(self.args) | |||
|
139 | ||||
|
140 | @property | |||
|
141 | def running(self): | |||
|
142 | """Am I running.""" | |||
|
143 | if self.state == 'running': | |||
|
144 | return True | |||
|
145 | else: | |||
|
146 | return False | |||
|
147 | ||||
|
148 | def start(self): | |||
|
149 | """Start the process. | |||
|
150 | ||||
|
151 | This must return a deferred that fires with information about the | |||
|
152 | process starting (like a pid, job id, etc.). | |||
|
153 | """ | |||
|
154 | return defer.fail( | |||
|
155 | Failure(NotImplementedError( | |||
|
156 | 'start must be implemented in a subclass') | |||
|
157 | ) | |||
|
158 | ) | |||
|
159 | ||||
|
160 | def stop(self): | |||
|
161 | """Stop the process and notify observers of stopping. | |||
|
162 | ||||
|
163 | This must return a deferred that fires with information about the | |||
|
164 | processing stopping, like errors that occur while the process is | |||
|
165 | attempting to be shut down. This deferred won't fire when the process | |||
|
166 | actually stops. To observe the actual process stopping, see | |||
|
167 | :func:`observe_stop`. | |||
|
168 | """ | |||
|
169 | return defer.fail( | |||
|
170 | Failure(NotImplementedError( | |||
|
171 | 'stop must be implemented in a subclass') | |||
|
172 | ) | |||
|
173 | ) | |||
|
174 | ||||
|
175 | def observe_stop(self): | |||
|
176 | """Get a deferred that will fire when the process stops. | |||
|
177 | ||||
|
178 | The deferred will fire with data that contains information about | |||
|
179 | the exit status of the process. | |||
|
180 | """ | |||
|
181 | if self.state=='after': | |||
|
182 | return defer.succeed(self.stop_data) | |||
|
183 | else: | |||
|
184 | d = defer.Deferred() | |||
|
185 | self.stop_deferreds.append(d) | |||
|
186 | return d | |||
|
187 | ||||
|
188 | def notify_start(self, data): | |||
|
189 | """Call this to trigger startup actions. | |||
|
190 | ||||
|
191 | This logs the process startup and sets the state to 'running'. It is | |||
|
192 | a pass-through so it can be used as a callback. | |||
|
193 | """ | |||
|
194 | ||||
|
195 | log.msg('Process %r started: %r' % (self.args[0], data)) | |||
|
196 | self.start_data = data | |||
|
197 | self.state = 'running' | |||
|
198 | return data | |||
|
199 | ||||
|
200 | def notify_stop(self, data): | |||
|
201 | """Call this to trigger process stop actions. | |||
|
202 | ||||
|
203 | This logs the process stopping and sets the state to 'after'. Call | |||
|
204 | this to trigger all the deferreds from :func:`observe_stop`.""" | |||
|
205 | ||||
|
206 | log.msg('Process %r stopped: %r' % (self.args[0], data)) | |||
|
207 | self.stop_data = data | |||
|
208 | self.state = 'after' | |||
|
209 | for i in range(len(self.stop_deferreds)): | |||
|
210 | d = self.stop_deferreds.pop() | |||
|
211 | d.callback(data) | |||
|
212 | return data | |||
|
213 | ||||
|
214 | def signal(self, sig): | |||
|
215 | """Signal the process. | |||
|
216 | ||||
|
217 | Return a semi-meaningless deferred after signaling the process. | |||
|
218 | ||||
|
219 | Parameters | |||
|
220 | ---------- | |||
|
221 | sig : str or int | |||
|
222 | 'KILL', 'INT', etc., or any signal number | |||
|
223 | """ | |||
|
224 | return defer.fail( | |||
|
225 | Failure(NotImplementedError( | |||
|
226 | 'signal must be implemented in a subclass') | |||
|
227 | ) | |||
|
228 | ) | |||
|
229 | ||||
|
230 | ||||
|
231 | #----------------------------------------------------------------------------- | |||
|
232 | # Local process launchers | |||
|
233 | #----------------------------------------------------------------------------- | |||
|
234 | ||||
|
235 | ||||
|
236 | class LocalProcessLauncherProtocol(ProcessProtocol): | |||
|
237 | """A ProcessProtocol to go with the LocalProcessLauncher.""" | |||
|
238 | ||||
|
239 | def __init__(self, process_launcher): | |||
|
240 | self.process_launcher = process_launcher | |||
|
241 | self.pid = None | |||
|
242 | ||||
|
243 | def connectionMade(self): | |||
|
244 | self.pid = self.transport.pid | |||
|
245 | self.process_launcher.notify_start(self.transport.pid) | |||
|
246 | ||||
|
247 | def processEnded(self, status): | |||
|
248 | value = status.value | |||
|
249 | if isinstance(value, ProcessDone): | |||
|
250 | self.process_launcher.notify_stop( | |||
|
251 | {'exit_code':0, | |||
|
252 | 'signal':None, | |||
|
253 | 'status':None, | |||
|
254 | 'pid':self.pid | |||
|
255 | } | |||
|
256 | ) | |||
|
257 | elif isinstance(value, ProcessTerminated): | |||
|
258 | self.process_launcher.notify_stop( | |||
|
259 | {'exit_code':value.exitCode, | |||
|
260 | 'signal':value.signal, | |||
|
261 | 'status':value.status, | |||
|
262 | 'pid':self.pid | |||
|
263 | } | |||
|
264 | ) | |||
|
265 | else: | |||
|
266 | raise UnknownStatus("Unknown exit status, this is probably a " | |||
|
267 | "bug in Twisted") | |||
|
268 | ||||
|
269 | def outReceived(self, data): | |||
|
270 | log.msg(data) | |||
|
271 | ||||
|
272 | def errReceived(self, data): | |||
|
273 | log.err(data) | |||
|
274 | ||||
|
275 | ||||
|
276 | class LocalProcessLauncher(BaseLauncher): | |||
|
277 | """Start and stop an external process in an asynchronous manner. | |||
|
278 | ||||
|
279 | This will launch the external process with a working directory of | |||
|
280 | ``self.work_dir``. | |||
|
281 | """ | |||
|
282 | ||||
|
283 | # This is used to to construct self.args, which is passed to | |||
|
284 | # spawnProcess. | |||
|
285 | cmd_and_args = List([]) | |||
|
286 | ||||
|
287 | def __init__(self, work_dir, parent=None, name=None, config=None): | |||
|
288 | super(LocalProcessLauncher, self).__init__( | |||
|
289 | work_dir, parent, name, config | |||
|
290 | ) | |||
|
291 | self.process_protocol = None | |||
|
292 | self.start_deferred = None | |||
|
293 | ||||
|
294 | def find_args(self): | |||
|
295 | return self.cmd_and_args | |||
|
296 | ||||
|
297 | def start(self): | |||
|
298 | if self.state == 'before': | |||
|
299 | self.process_protocol = LocalProcessLauncherProtocol(self) | |||
|
300 | self.start_deferred = defer.Deferred() | |||
|
301 | self.process_transport = reactor.spawnProcess( | |||
|
302 | self.process_protocol, | |||
|
303 | str(self.args[0]), # twisted expects these to be str, not unicode | |||
|
304 | [str(a) for a in self.args], # str expected, not unicode | |||
|
305 | env=os.environ, | |||
|
306 | path=self.work_dir # start in the work_dir | |||
|
307 | ) | |||
|
308 | return self.start_deferred | |||
|
309 | else: | |||
|
310 | s = 'The process was already started and has state: %r' % self.state | |||
|
311 | return defer.fail(ProcessStateError(s)) | |||
|
312 | ||||
|
313 | def notify_start(self, data): | |||
|
314 | super(LocalProcessLauncher, self).notify_start(data) | |||
|
315 | self.start_deferred.callback(data) | |||
|
316 | ||||
|
317 | def stop(self): | |||
|
318 | return self.interrupt_then_kill() | |||
|
319 | ||||
|
320 | @make_deferred | |||
|
321 | def signal(self, sig): | |||
|
322 | if self.state == 'running': | |||
|
323 | self.process_transport.signalProcess(sig) | |||
|
324 | ||||
|
325 | @inlineCallbacks | |||
|
326 | def interrupt_then_kill(self, delay=2.0): | |||
|
327 | """Send INT, wait a delay and then send KILL.""" | |||
|
328 | yield self.signal('INT') | |||
|
329 | yield sleep_deferred(delay) | |||
|
330 | yield self.signal('KILL') | |||
|
331 | ||||
|
332 | ||||
|
333 | class LocalControllerLauncher(LocalProcessLauncher): | |||
|
334 | """Launch a controller as a regular external process.""" | |||
|
335 | ||||
|
336 | controller_cmd = List(find_controller_cmd(), config=True) | |||
|
337 | # Command line arguments to ipcontroller. | |||
|
338 | controller_args = List(['--log-to-file','--log-level', '40'], config=True) | |||
|
339 | ||||
|
340 | def find_args(self): | |||
|
341 | return self.controller_cmd + self.controller_args | |||
|
342 | ||||
|
343 | def start(self, cluster_dir): | |||
|
344 | """Start the controller by cluster_dir.""" | |||
|
345 | self.controller_args.extend(['--cluster-dir', cluster_dir]) | |||
|
346 | self.cluster_dir = unicode(cluster_dir) | |||
|
347 | log.msg("Starting LocalControllerLauncher: %r" % self.args) | |||
|
348 | return super(LocalControllerLauncher, self).start() | |||
|
349 | ||||
|
350 | ||||
|
351 | class LocalEngineLauncher(LocalProcessLauncher): | |||
|
352 | """Launch a single engine as a regular externall process.""" | |||
|
353 | ||||
|
354 | engine_cmd = List(find_engine_cmd(), config=True) | |||
|
355 | # Command line arguments for ipengine. | |||
|
356 | engine_args = List( | |||
|
357 | ['--log-to-file','--log-level', '40'], config=True | |||
|
358 | ) | |||
|
359 | ||||
|
360 | def find_args(self): | |||
|
361 | return self.engine_cmd + self.engine_args | |||
|
362 | ||||
|
363 | def start(self, cluster_dir): | |||
|
364 | """Start the engine by cluster_dir.""" | |||
|
365 | self.engine_args.extend(['--cluster-dir', cluster_dir]) | |||
|
366 | self.cluster_dir = unicode(cluster_dir) | |||
|
367 | return super(LocalEngineLauncher, self).start() | |||
|
368 | ||||
|
369 | ||||
|
370 | class LocalEngineSetLauncher(BaseLauncher): | |||
|
371 | """Launch a set of engines as regular external processes.""" | |||
|
372 | ||||
|
373 | # Command line arguments for ipengine. | |||
|
374 | engine_args = List( | |||
|
375 | ['--log-to-file','--log-level', '40'], config=True | |||
|
376 | ) | |||
|
377 | ||||
|
378 | def __init__(self, work_dir, parent=None, name=None, config=None): | |||
|
379 | super(LocalEngineSetLauncher, self).__init__( | |||
|
380 | work_dir, parent, name, config | |||
|
381 | ) | |||
|
382 | self.launchers = [] | |||
|
383 | ||||
|
384 | def start(self, n, cluster_dir): | |||
|
385 | """Start n engines by profile or cluster_dir.""" | |||
|
386 | self.cluster_dir = unicode(cluster_dir) | |||
|
387 | dlist = [] | |||
|
388 | for i in range(n): | |||
|
389 | el = LocalEngineLauncher(self.work_dir, self) | |||
|
390 | # Copy the engine args over to each engine launcher. | |||
|
391 | import copy | |||
|
392 | el.engine_args = copy.deepcopy(self.engine_args) | |||
|
393 | d = el.start(cluster_dir) | |||
|
394 | if i==0: | |||
|
395 | log.msg("Starting LocalEngineSetLauncher: %r" % el.args) | |||
|
396 | self.launchers.append(el) | |||
|
397 | dlist.append(d) | |||
|
398 | # The consumeErrors here could be dangerous | |||
|
399 | dfinal = gatherBoth(dlist, consumeErrors=True) | |||
|
400 | dfinal.addCallback(self.notify_start) | |||
|
401 | return dfinal | |||
|
402 | ||||
|
403 | def find_args(self): | |||
|
404 | return ['engine set'] | |||
|
405 | ||||
|
406 | def signal(self, sig): | |||
|
407 | dlist = [] | |||
|
408 | for el in self.launchers: | |||
|
409 | d = el.signal(sig) | |||
|
410 | dlist.append(d) | |||
|
411 | dfinal = gatherBoth(dlist, consumeErrors=True) | |||
|
412 | return dfinal | |||
|
413 | ||||
|
414 | def interrupt_then_kill(self, delay=1.0): | |||
|
415 | dlist = [] | |||
|
416 | for el in self.launchers: | |||
|
417 | d = el.interrupt_then_kill(delay) | |||
|
418 | dlist.append(d) | |||
|
419 | dfinal = gatherBoth(dlist, consumeErrors=True) | |||
|
420 | return dfinal | |||
|
421 | ||||
|
422 | def stop(self): | |||
|
423 | return self.interrupt_then_kill() | |||
|
424 | ||||
|
425 | def observe_stop(self): | |||
|
426 | dlist = [el.observe_stop() for el in self.launchers] | |||
|
427 | dfinal = gatherBoth(dlist, consumeErrors=False) | |||
|
428 | dfinal.addCallback(self.notify_stop) | |||
|
429 | return dfinal | |||
|
430 | ||||
|
431 | ||||
|
432 | #----------------------------------------------------------------------------- | |||
|
433 | # MPIExec launchers | |||
|
434 | #----------------------------------------------------------------------------- | |||
|
435 | ||||
|
436 | ||||
|
437 | class MPIExecLauncher(LocalProcessLauncher): | |||
|
438 | """Launch an external process using mpiexec.""" | |||
|
439 | ||||
|
440 | # The mpiexec command to use in starting the process. | |||
|
441 | mpi_cmd = List(['mpiexec'], config=True) | |||
|
442 | # The command line arguments to pass to mpiexec. | |||
|
443 | mpi_args = List([], config=True) | |||
|
444 | # The program to start using mpiexec. | |||
|
445 | program = List(['date'], config=True) | |||
|
446 | # The command line argument to the program. | |||
|
447 | program_args = List([], config=True) | |||
|
448 | # The number of instances of the program to start. | |||
|
449 | n = Int(1, config=True) | |||
|
450 | ||||
|
451 | def find_args(self): | |||
|
452 | """Build self.args using all the fields.""" | |||
|
453 | return self.mpi_cmd + ['-n', self.n] + self.mpi_args + \ | |||
|
454 | self.program + self.program_args | |||
|
455 | ||||
|
456 | def start(self, n): | |||
|
457 | """Start n instances of the program using mpiexec.""" | |||
|
458 | self.n = n | |||
|
459 | return super(MPIExecLauncher, self).start() | |||
|
460 | ||||
|
461 | ||||
|
462 | class MPIExecControllerLauncher(MPIExecLauncher): | |||
|
463 | """Launch a controller using mpiexec.""" | |||
|
464 | ||||
|
465 | controller_cmd = List(find_controller_cmd(), config=True) | |||
|
466 | # Command line arguments to ipcontroller. | |||
|
467 | controller_args = List(['--log-to-file','--log-level', '40'], config=True) | |||
|
468 | n = Int(1, config=False) | |||
|
469 | ||||
|
470 | def start(self, cluster_dir): | |||
|
471 | """Start the controller by cluster_dir.""" | |||
|
472 | self.controller_args.extend(['--cluster-dir', cluster_dir]) | |||
|
473 | self.cluster_dir = unicode(cluster_dir) | |||
|
474 | log.msg("Starting MPIExecControllerLauncher: %r" % self.args) | |||
|
475 | return super(MPIExecControllerLauncher, self).start(1) | |||
|
476 | ||||
|
477 | def find_args(self): | |||
|
478 | return self.mpi_cmd + ['-n', self.n] + self.mpi_args + \ | |||
|
479 | self.controller_cmd + self.controller_args | |||
|
480 | ||||
|
481 | ||||
|
482 | class MPIExecEngineSetLauncher(MPIExecLauncher): | |||
|
483 | ||||
|
484 | engine_cmd = List(find_engine_cmd(), config=True) | |||
|
485 | # Command line arguments for ipengine. | |||
|
486 | engine_args = List( | |||
|
487 | ['--log-to-file','--log-level', '40'], config=True | |||
|
488 | ) | |||
|
489 | n = Int(1, config=True) | |||
|
490 | ||||
|
491 | def start(self, n, cluster_dir): | |||
|
492 | """Start n engines by profile or cluster_dir.""" | |||
|
493 | self.engine_args.extend(['--cluster-dir', cluster_dir]) | |||
|
494 | self.cluster_dir = unicode(cluster_dir) | |||
|
495 | self.n = n | |||
|
496 | log.msg('Starting MPIExecEngineSetLauncher: %r' % self.args) | |||
|
497 | return super(MPIExecEngineSetLauncher, self).start(n) | |||
|
498 | ||||
|
499 | def find_args(self): | |||
|
500 | return self.mpi_cmd + ['-n', self.n] + self.mpi_args + \ | |||
|
501 | self.engine_cmd + self.engine_args | |||
|
502 | ||||
|
503 | ||||
|
504 | #----------------------------------------------------------------------------- | |||
|
505 | # SSH launchers | |||
|
506 | #----------------------------------------------------------------------------- | |||
|
507 | ||||
|
508 | # TODO: Get SSH Launcher working again. | |||
|
509 | ||||
|
510 | class SSHLauncher(BaseLauncher): | |||
|
511 | """A minimal launcher for ssh. | |||
|
512 | ||||
|
513 | To be useful this will probably have to be extended to use the ``sshx`` | |||
|
514 | idea for environment variables. There could be other things this needs | |||
|
515 | as well. | |||
|
516 | """ | |||
|
517 | ||||
|
518 | ssh_cmd = List(['ssh'], config=True) | |||
|
519 | ssh_args = List([], config=True) | |||
|
520 | program = List(['date'], config=True) | |||
|
521 | program_args = List([], config=True) | |||
|
522 | hostname = Str('', config=True) | |||
|
523 | user = Str('', config=True) | |||
|
524 | location = Str('') | |||
|
525 | ||||
|
526 | def _hostname_changed(self, name, old, new): | |||
|
527 | self.location = '%s@%s' % (self.user, new) | |||
|
528 | ||||
|
529 | def _user_changed(self, name, old, new): | |||
|
530 | self.location = '%s@%s' % (new, self.hostname) | |||
|
531 | ||||
|
532 | def find_args(self): | |||
|
533 | return self.ssh_cmd + self.ssh_args + [self.location] + \ | |||
|
534 | self.program + self.program_args | |||
|
535 | ||||
|
536 | def start(self, n, hostname=None, user=None): | |||
|
537 | if hostname is not None: | |||
|
538 | self.hostname = hostname | |||
|
539 | if user is not None: | |||
|
540 | self.user = user | |||
|
541 | return super(SSHLauncher, self).start() | |||
|
542 | ||||
|
543 | ||||
|
544 | class SSHControllerLauncher(SSHLauncher): | |||
|
545 | pass | |||
|
546 | ||||
|
547 | ||||
|
548 | class SSHEngineSetLauncher(BaseLauncher): | |||
|
549 | pass | |||
|
550 | ||||
|
551 | ||||
|
552 | #----------------------------------------------------------------------------- | |||
|
553 | # Windows HPC Server 2008 scheduler launchers | |||
|
554 | #----------------------------------------------------------------------------- | |||
|
555 | ||||
|
556 | ||||
|
557 | # This is only used on Windows. | |||
|
558 | def find_job_cmd(): | |||
|
559 | if os.name=='nt': | |||
|
560 | return find_cmd('job') | |||
|
561 | else: | |||
|
562 | return 'job' | |||
|
563 | ||||
|
564 | ||||
|
565 | class WindowsHPCLauncher(BaseLauncher): | |||
|
566 | ||||
|
567 | # A regular expression used to get the job id from the output of the | |||
|
568 | # submit_command. | |||
|
569 | job_id_regexp = Str(r'\d+', config=True) | |||
|
570 | # The filename of the instantiated job script. | |||
|
571 | job_file_name = Unicode(u'ipython_job.xml', config=True) | |||
|
572 | # The full path to the instantiated job script. This gets made dynamically | |||
|
573 | # by combining the work_dir with the job_file_name. | |||
|
574 | job_file = Unicode(u'') | |||
|
575 | # The hostname of the scheduler to submit the job to | |||
|
576 | scheduler = Str('', config=True) | |||
|
577 | job_cmd = Str(find_job_cmd(), config=True) | |||
|
578 | ||||
|
579 | def __init__(self, work_dir, parent=None, name=None, config=None): | |||
|
580 | super(WindowsHPCLauncher, self).__init__( | |||
|
581 | work_dir, parent, name, config | |||
|
582 | ) | |||
|
583 | ||||
|
584 | @property | |||
|
585 | def job_file(self): | |||
|
586 | return os.path.join(self.work_dir, self.job_file_name) | |||
|
587 | ||||
|
588 | def write_job_file(self, n): | |||
|
589 | raise NotImplementedError("Implement write_job_file in a subclass.") | |||
|
590 | ||||
|
591 | def find_args(self): | |||
|
592 | return ['job.exe'] | |||
|
593 | ||||
|
594 | def parse_job_id(self, output): | |||
|
595 | """Take the output of the submit command and return the job id.""" | |||
|
596 | m = re.search(self.job_id_regexp, output) | |||
|
597 | if m is not None: | |||
|
598 | job_id = m.group() | |||
|
599 | else: | |||
|
600 | raise LauncherError("Job id couldn't be determined: %s" % output) | |||
|
601 | self.job_id = job_id | |||
|
602 | log.msg('Job started with job id: %r' % job_id) | |||
|
603 | return job_id | |||
|
604 | ||||
|
605 | @inlineCallbacks | |||
|
606 | def start(self, n): | |||
|
607 | """Start n copies of the process using the Win HPC job scheduler.""" | |||
|
608 | self.write_job_file(n) | |||
|
609 | args = [ | |||
|
610 | 'submit', | |||
|
611 | '/jobfile:%s' % self.job_file, | |||
|
612 | '/scheduler:%s' % self.scheduler | |||
|
613 | ] | |||
|
614 | log.msg("Starting Win HPC Job: %s" % (self.job_cmd + ' ' + ' '.join(args),)) | |||
|
615 | # Twisted will raise DeprecationWarnings if we try to pass unicode to this | |||
|
616 | output = yield getProcessOutput(str(self.job_cmd), | |||
|
617 | [str(a) for a in args], | |||
|
618 | env=dict((str(k),str(v)) for k,v in os.environ.items()), | |||
|
619 | path=self.work_dir | |||
|
620 | ) | |||
|
621 | job_id = self.parse_job_id(output) | |||
|
622 | self.notify_start(job_id) | |||
|
623 | defer.returnValue(job_id) | |||
|
624 | ||||
|
625 | @inlineCallbacks | |||
|
626 | def stop(self): | |||
|
627 | args = [ | |||
|
628 | 'cancel', | |||
|
629 | self.job_id, | |||
|
630 | '/scheduler:%s' % self.scheduler | |||
|
631 | ] | |||
|
632 | log.msg("Stopping Win HPC Job: %s" % (self.job_cmd + ' ' + ' '.join(args),)) | |||
|
633 | try: | |||
|
634 | # Twisted will raise DeprecationWarnings if we try to pass unicode to this | |||
|
635 | output = yield getProcessOutput(str(self.job_cmd), | |||
|
636 | [str(a) for a in args], | |||
|
637 | env=dict((str(k),str(v)) for k,v in os.environ.items()), | |||
|
638 | path=self.work_dir | |||
|
639 | ) | |||
|
640 | except: | |||
|
641 | output = 'The job already appears to be stoppped: %r' % self.job_id | |||
|
642 | self.notify_stop(output) # Pass the output of the kill cmd | |||
|
643 | defer.returnValue(output) | |||
|
644 | ||||
|
645 | ||||
|
646 | class WindowsHPCControllerLauncher(WindowsHPCLauncher): | |||
|
647 | ||||
|
648 | job_file_name = Unicode(u'ipcontroller_job.xml', config=True) | |||
|
649 | extra_args = List([], config=False) | |||
|
650 | ||||
|
651 | def write_job_file(self, n): | |||
|
652 | job = IPControllerJob(self) | |||
|
653 | ||||
|
654 | t = IPControllerTask(self) | |||
|
655 | # The tasks work directory is *not* the actual work directory of | |||
|
656 | # the controller. It is used as the base path for the stdout/stderr | |||
|
657 | # files that the scheduler redirects to. | |||
|
658 | t.work_directory = self.cluster_dir | |||
|
659 | # Add the --cluster-dir and from self.start(). | |||
|
660 | t.controller_args.extend(self.extra_args) | |||
|
661 | job.add_task(t) | |||
|
662 | ||||
|
663 | log.msg("Writing job description file: %s" % self.job_file) | |||
|
664 | job.write(self.job_file) | |||
|
665 | ||||
|
666 | @property | |||
|
667 | def job_file(self): | |||
|
668 | return os.path.join(self.cluster_dir, self.job_file_name) | |||
|
669 | ||||
|
670 | def start(self, cluster_dir): | |||
|
671 | """Start the controller by cluster_dir.""" | |||
|
672 | self.extra_args = ['--cluster-dir', cluster_dir] | |||
|
673 | self.cluster_dir = unicode(cluster_dir) | |||
|
674 | return super(WindowsHPCControllerLauncher, self).start(1) | |||
|
675 | ||||
|
676 | ||||
|
677 | class WindowsHPCEngineSetLauncher(WindowsHPCLauncher): | |||
|
678 | ||||
|
679 | job_file_name = Unicode(u'ipengineset_job.xml', config=True) | |||
|
680 | extra_args = List([], config=False) | |||
|
681 | ||||
|
682 | def write_job_file(self, n): | |||
|
683 | job = IPEngineSetJob(self) | |||
|
684 | ||||
|
685 | for i in range(n): | |||
|
686 | t = IPEngineTask(self) | |||
|
687 | # The tasks work directory is *not* the actual work directory of | |||
|
688 | # the engine. It is used as the base path for the stdout/stderr | |||
|
689 | # files that the scheduler redirects to. | |||
|
690 | t.work_directory = self.cluster_dir | |||
|
691 | # Add the --cluster-dir and from self.start(). | |||
|
692 | t.engine_args.extend(self.extra_args) | |||
|
693 | job.add_task(t) | |||
|
694 | ||||
|
695 | log.msg("Writing job description file: %s" % self.job_file) | |||
|
696 | job.write(self.job_file) | |||
|
697 | ||||
|
698 | @property | |||
|
699 | def job_file(self): | |||
|
700 | return os.path.join(self.cluster_dir, self.job_file_name) | |||
|
701 | ||||
|
702 | def start(self, n, cluster_dir): | |||
|
703 | """Start the controller by cluster_dir.""" | |||
|
704 | self.extra_args = ['--cluster-dir', cluster_dir] | |||
|
705 | self.cluster_dir = unicode(cluster_dir) | |||
|
706 | return super(WindowsHPCEngineSetLauncher, self).start(n) | |||
|
707 | ||||
|
708 | ||||
|
709 | #----------------------------------------------------------------------------- | |||
|
710 | # Batch (PBS) system launchers | |||
|
711 | #----------------------------------------------------------------------------- | |||
|
712 | ||||
|
713 | # TODO: Get PBS launcher working again. | |||
|
714 | ||||
|
715 | class BatchSystemLauncher(BaseLauncher): | |||
|
716 | """Launch an external process using a batch system. | |||
|
717 | ||||
|
718 | This class is designed to work with UNIX batch systems like PBS, LSF, | |||
|
719 | GridEngine, etc. The overall model is that there are different commands | |||
|
720 | like qsub, qdel, etc. that handle the starting and stopping of the process. | |||
|
721 | ||||
|
722 | This class also has the notion of a batch script. The ``batch_template`` | |||
|
723 | attribute can be set to a string that is a template for the batch script. | |||
|
724 | This template is instantiated using Itpl. Thus the template can use | |||
|
725 | ${n} fot the number of instances. Subclasses can add additional variables | |||
|
726 | to the template dict. | |||
|
727 | """ | |||
|
728 | ||||
|
729 | # Subclasses must fill these in. See PBSEngineSet | |||
|
730 | # The name of the command line program used to submit jobs. | |||
|
731 | submit_command = Str('', config=True) | |||
|
732 | # The name of the command line program used to delete jobs. | |||
|
733 | delete_command = Str('', config=True) | |||
|
734 | # A regular expression used to get the job id from the output of the | |||
|
735 | # submit_command. | |||
|
736 | job_id_regexp = Str('', config=True) | |||
|
737 | # The string that is the batch script template itself. | |||
|
738 | batch_template = Str('', config=True) | |||
|
739 | # The filename of the instantiated batch script. | |||
|
740 | batch_file_name = Unicode(u'batch_script', config=True) | |||
|
741 | # The full path to the instantiated batch script. | |||
|
742 | batch_file = Unicode(u'') | |||
|
743 | ||||
|
744 | def __init__(self, work_dir, parent=None, name=None, config=None): | |||
|
745 | super(BatchSystemLauncher, self).__init__( | |||
|
746 | work_dir, parent, name, config | |||
|
747 | ) | |||
|
748 | self.batch_file = os.path.join(self.work_dir, self.batch_file_name) | |||
|
749 | self.context = {} | |||
|
750 | ||||
|
751 | def parse_job_id(self, output): | |||
|
752 | """Take the output of the submit command and return the job id.""" | |||
|
753 | m = re.match(self.job_id_regexp, output) | |||
|
754 | if m is not None: | |||
|
755 | job_id = m.group() | |||
|
756 | else: | |||
|
757 | raise LauncherError("Job id couldn't be determined: %s" % output) | |||
|
758 | self.job_id = job_id | |||
|
759 | log.msg('Job started with job id: %r' % job_id) | |||
|
760 | return job_id | |||
|
761 | ||||
|
762 | def write_batch_script(self, n): | |||
|
763 | """Instantiate and write the batch script to the work_dir.""" | |||
|
764 | self.context['n'] = n | |||
|
765 | script_as_string = Itpl.itplns(self.batch_template, self.context) | |||
|
766 | log.msg('Writing instantiated batch script: %s' % self.batch_file) | |||
|
767 | f = open(self.batch_file, 'w') | |||
|
768 | f.write(script_as_string) | |||
|
769 | f.close() | |||
|
770 | ||||
|
771 | @inlineCallbacks | |||
|
772 | def start(self, n): | |||
|
773 | """Start n copies of the process using a batch system.""" | |||
|
774 | self.write_batch_script(n) | |||
|
775 | output = yield getProcessOutput(self.submit_command, | |||
|
776 | [self.batch_file], env=os.environ) | |||
|
777 | job_id = self.parse_job_id(output) | |||
|
778 | self.notify_start(job_id) | |||
|
779 | defer.returnValue(job_id) | |||
|
780 | ||||
|
781 | @inlineCallbacks | |||
|
782 | def stop(self): | |||
|
783 | output = yield getProcessOutput(self.delete_command, | |||
|
784 | [self.job_id], env=os.environ | |||
|
785 | ) | |||
|
786 | self.notify_stop(output) # Pass the output of the kill cmd | |||
|
787 | defer.returnValue(output) | |||
|
788 | ||||
|
789 | ||||
|
790 | class PBSLauncher(BatchSystemLauncher): | |||
|
791 | """A BatchSystemLauncher subclass for PBS.""" | |||
|
792 | ||||
|
793 | submit_command = Str('qsub', config=True) | |||
|
794 | delete_command = Str('qdel', config=True) | |||
|
795 | job_id_regexp = Str(r'\d+', config=True) | |||
|
796 | batch_template = Str('', config=True) | |||
|
797 | batch_file_name = Unicode(u'pbs_batch_script', config=True) | |||
|
798 | batch_file = Unicode(u'') | |||
|
799 | ||||
|
800 | ||||
|
801 | class PBSControllerLauncher(PBSLauncher): | |||
|
802 | """Launch a controller using PBS.""" | |||
|
803 | ||||
|
804 | batch_file_name = Unicode(u'pbs_batch_script_controller', config=True) | |||
|
805 | ||||
|
806 | def start(self, cluster_dir): | |||
|
807 | """Start the controller by profile or cluster_dir.""" | |||
|
808 | # Here we save profile and cluster_dir in the context so they | |||
|
809 | # can be used in the batch script template as ${profile} and | |||
|
810 | # ${cluster_dir} | |||
|
811 | self.context['cluster_dir'] = cluster_dir | |||
|
812 | self.cluster_dir = unicode(cluster_dir) | |||
|
813 | log.msg("Starting PBSControllerLauncher: %r" % self.args) | |||
|
814 | return super(PBSControllerLauncher, self).start(1) | |||
|
815 | ||||
|
816 | ||||
|
817 | class PBSEngineSetLauncher(PBSLauncher): | |||
|
818 | ||||
|
819 | batch_file_name = Unicode(u'pbs_batch_script_engines', config=True) | |||
|
820 | ||||
|
821 | def start(self, n, cluster_dir): | |||
|
822 | """Start n engines by profile or cluster_dir.""" | |||
|
823 | self.program_args.extend(['--cluster-dir', cluster_dir]) | |||
|
824 | self.cluster_dir = unicode(cluster_dir) | |||
|
825 | log.msg('Starting PBSEngineSetLauncher: %r' % self.args) | |||
|
826 | return super(PBSEngineSetLauncher, self).start(n) | |||
|
827 | ||||
|
828 | ||||
|
829 | #----------------------------------------------------------------------------- | |||
|
830 | # A launcher for ipcluster itself! | |||
|
831 | #----------------------------------------------------------------------------- | |||
|
832 | ||||
|
833 | ||||
|
834 | def find_ipcluster_cmd(): | |||
|
835 | """Find the command line ipcluster program in a cross platform way.""" | |||
|
836 | if sys.platform == 'win32': | |||
|
837 | # This logic is needed because the ipcluster script doesn't | |||
|
838 | # always get installed in the same way or in the same location. | |||
|
839 | from IPython.kernel import ipclusterapp | |||
|
840 | script_location = ipclusterapp.__file__.replace('.pyc', '.py') | |||
|
841 | # The -u option here turns on unbuffered output, which is required | |||
|
842 | # on Win32 to prevent wierd conflict and problems with Twisted. | |||
|
843 | # Also, use sys.executable to make sure we are picking up the | |||
|
844 | # right python exe. | |||
|
845 | cmd = [sys.executable, '-u', script_location] | |||
|
846 | else: | |||
|
847 | # ipcontroller has to be on the PATH in this case. | |||
|
848 | cmd = ['ipcluster'] | |||
|
849 | return cmd | |||
|
850 | ||||
|
851 | ||||
|
852 | class IPClusterLauncher(LocalProcessLauncher): | |||
|
853 | """Launch the ipcluster program in an external process.""" | |||
|
854 | ||||
|
855 | ipcluster_cmd = List(find_ipcluster_cmd(), config=True) | |||
|
856 | # Command line arguments to pass to ipcluster. | |||
|
857 | ipcluster_args = List( | |||
|
858 | ['--clean-logs', '--log-to-file', '--log-level', '40'], config=True) | |||
|
859 | ipcluster_subcommand = Str('start') | |||
|
860 | ipcluster_n = Int(2) | |||
|
861 | ||||
|
862 | def find_args(self): | |||
|
863 | return self.ipcluster_cmd + [self.ipcluster_subcommand] + \ | |||
|
864 | ['-n', repr(self.ipcluster_n)] + self.ipcluster_args | |||
|
865 | ||||
|
866 | def start(self): | |||
|
867 | log.msg("Starting ipcluster: %r" % self.args) | |||
|
868 | return super(IPClusterLauncher, self).start() | |||
|
869 |
@@ -0,0 +1,318 b'' | |||||
|
1 | #!/usr/bin/env python | |||
|
2 | # encoding: utf-8 | |||
|
3 | """ | |||
|
4 | Job and task components for writing .xml files that the Windows HPC Server | |||
|
5 | 2008 can use to start jobs. | |||
|
6 | """ | |||
|
7 | ||||
|
8 | #----------------------------------------------------------------------------- | |||
|
9 | # Copyright (C) 2008-2009 The IPython Development Team | |||
|
10 | # | |||
|
11 | # Distributed under the terms of the BSD License. The full license is in | |||
|
12 | # the file COPYING, distributed as part of this software. | |||
|
13 | #----------------------------------------------------------------------------- | |||
|
14 | ||||
|
15 | #----------------------------------------------------------------------------- | |||
|
16 | # Imports | |||
|
17 | #----------------------------------------------------------------------------- | |||
|
18 | ||||
|
19 | from __future__ import with_statement | |||
|
20 | ||||
|
21 | import os | |||
|
22 | import re | |||
|
23 | import uuid | |||
|
24 | ||||
|
25 | from xml.etree import ElementTree as ET | |||
|
26 | from xml.dom import minidom | |||
|
27 | ||||
|
28 | from IPython.core.component import Component | |||
|
29 | from IPython.external import Itpl | |||
|
30 | from IPython.utils.traitlets import ( | |||
|
31 | Str, Int, List, Unicode, Instance, | |||
|
32 | Enum, Bool, CStr | |||
|
33 | ) | |||
|
34 | ||||
|
35 | #----------------------------------------------------------------------------- | |||
|
36 | # Job and Task Component | |||
|
37 | #----------------------------------------------------------------------------- | |||
|
38 | ||||
|
39 | ||||
|
40 | def as_str(value): | |||
|
41 | if isinstance(value, str): | |||
|
42 | return value | |||
|
43 | elif isinstance(value, bool): | |||
|
44 | if value: | |||
|
45 | return 'true' | |||
|
46 | else: | |||
|
47 | return 'false' | |||
|
48 | elif isinstance(value, (int, float)): | |||
|
49 | return repr(value) | |||
|
50 | else: | |||
|
51 | return value | |||
|
52 | ||||
|
53 | ||||
|
54 | def indent(elem, level=0): | |||
|
55 | i = "\n" + level*" " | |||
|
56 | if len(elem): | |||
|
57 | if not elem.text or not elem.text.strip(): | |||
|
58 | elem.text = i + " " | |||
|
59 | if not elem.tail or not elem.tail.strip(): | |||
|
60 | elem.tail = i | |||
|
61 | for elem in elem: | |||
|
62 | indent(elem, level+1) | |||
|
63 | if not elem.tail or not elem.tail.strip(): | |||
|
64 | elem.tail = i | |||
|
65 | else: | |||
|
66 | if level and (not elem.tail or not elem.tail.strip()): | |||
|
67 | elem.tail = i | |||
|
68 | ||||
|
69 | ||||
|
70 | def find_username(): | |||
|
71 | domain = os.environ.get('USERDOMAIN') | |||
|
72 | username = os.environ.get('USERNAME','') | |||
|
73 | if domain is None: | |||
|
74 | return username | |||
|
75 | else: | |||
|
76 | return '%s\\%s' % (domain, username) | |||
|
77 | ||||
|
78 | ||||
|
79 | class WinHPCJob(Component): | |||
|
80 | ||||
|
81 | job_id = Str('') | |||
|
82 | job_name = Str('MyJob', config=True) | |||
|
83 | min_cores = Int(1, config=True) | |||
|
84 | max_cores = Int(1, config=True) | |||
|
85 | min_sockets = Int(1, config=True) | |||
|
86 | max_sockets = Int(1, config=True) | |||
|
87 | min_nodes = Int(1, config=True) | |||
|
88 | max_nodes = Int(1, config=True) | |||
|
89 | unit_type = Str("Core", config=True) | |||
|
90 | auto_calculate_min = Bool(True, config=True) | |||
|
91 | auto_calculate_max = Bool(True, config=True) | |||
|
92 | run_until_canceled = Bool(False, config=True) | |||
|
93 | is_exclusive = Bool(False, config=True) | |||
|
94 | username = Str(find_username(), config=True) | |||
|
95 | job_type = Str('Batch', config=True) | |||
|
96 | priority = Enum(('Lowest','BelowNormal','Normal','AboveNormal','Highest'), | |||
|
97 | default_value='Highest', config=True) | |||
|
98 | requested_nodes = Str('', config=True) | |||
|
99 | project = Str('IPython', config=True) | |||
|
100 | xmlns = Str('http://schemas.microsoft.com/HPCS2008/scheduler/') | |||
|
101 | version = Str("2.000") | |||
|
102 | tasks = List([]) | |||
|
103 | ||||
|
104 | @property | |||
|
105 | def owner(self): | |||
|
106 | return self.username | |||
|
107 | ||||
|
108 | def _write_attr(self, root, attr, key): | |||
|
109 | s = as_str(getattr(self, attr, '')) | |||
|
110 | if s: | |||
|
111 | root.set(key, s) | |||
|
112 | ||||
|
113 | def as_element(self): | |||
|
114 | # We have to add _A_ type things to get the right order than | |||
|
115 | # the MSFT XML parser expects. | |||
|
116 | root = ET.Element('Job') | |||
|
117 | self._write_attr(root, 'version', '_A_Version') | |||
|
118 | self._write_attr(root, 'job_name', '_B_Name') | |||
|
119 | self._write_attr(root, 'unit_type', '_C_UnitType') | |||
|
120 | self._write_attr(root, 'min_cores', '_D_MinCores') | |||
|
121 | self._write_attr(root, 'max_cores', '_E_MaxCores') | |||
|
122 | self._write_attr(root, 'min_sockets', '_F_MinSockets') | |||
|
123 | self._write_attr(root, 'max_sockets', '_G_MaxSockets') | |||
|
124 | self._write_attr(root, 'min_nodes', '_H_MinNodes') | |||
|
125 | self._write_attr(root, 'max_nodes', '_I_MaxNodes') | |||
|
126 | self._write_attr(root, 'run_until_canceled', '_J_RunUntilCanceled') | |||
|
127 | self._write_attr(root, 'is_exclusive', '_K_IsExclusive') | |||
|
128 | self._write_attr(root, 'username', '_L_UserName') | |||
|
129 | self._write_attr(root, 'job_type', '_M_JobType') | |||
|
130 | self._write_attr(root, 'priority', '_N_Priority') | |||
|
131 | self._write_attr(root, 'requested_nodes', '_O_RequestedNodes') | |||
|
132 | self._write_attr(root, 'auto_calculate_max', '_P_AutoCalculateMax') | |||
|
133 | self._write_attr(root, 'auto_calculate_min', '_Q_AutoCalculateMin') | |||
|
134 | self._write_attr(root, 'project', '_R_Project') | |||
|
135 | self._write_attr(root, 'owner', '_S_Owner') | |||
|
136 | self._write_attr(root, 'xmlns', '_T_xmlns') | |||
|
137 | dependencies = ET.SubElement(root, "Dependencies") | |||
|
138 | etasks = ET.SubElement(root, "Tasks") | |||
|
139 | for t in self.tasks: | |||
|
140 | etasks.append(t.as_element()) | |||
|
141 | return root | |||
|
142 | ||||
|
143 | def tostring(self): | |||
|
144 | """Return the string representation of the job description XML.""" | |||
|
145 | root = self.as_element() | |||
|
146 | indent(root) | |||
|
147 | txt = ET.tostring(root, encoding="utf-8") | |||
|
148 | # Now remove the tokens used to order the attributes. | |||
|
149 | txt = re.sub(r'_[A-Z]_','',txt) | |||
|
150 | txt = '<?xml version="1.0" encoding="utf-8"?>\n' + txt | |||
|
151 | return txt | |||
|
152 | ||||
|
153 | def write(self, filename): | |||
|
154 | """Write the XML job description to a file.""" | |||
|
155 | txt = self.tostring() | |||
|
156 | with open(filename, 'w') as f: | |||
|
157 | f.write(txt) | |||
|
158 | ||||
|
159 | def add_task(self, task): | |||
|
160 | """Add a task to the job. | |||
|
161 | ||||
|
162 | Parameters | |||
|
163 | ---------- | |||
|
164 | task : :class:`WinHPCTask` | |||
|
165 | The task object to add. | |||
|
166 | """ | |||
|
167 | self.tasks.append(task) | |||
|
168 | ||||
|
169 | ||||
|
170 | class WinHPCTask(Component): | |||
|
171 | ||||
|
172 | task_id = Str('') | |||
|
173 | task_name = Str('') | |||
|
174 | version = Str("2.000") | |||
|
175 | min_cores = Int(1, config=True) | |||
|
176 | max_cores = Int(1, config=True) | |||
|
177 | min_sockets = Int(1, config=True) | |||
|
178 | max_sockets = Int(1, config=True) | |||
|
179 | min_nodes = Int(1, config=True) | |||
|
180 | max_nodes = Int(1, config=True) | |||
|
181 | unit_type = Str("Core", config=True) | |||
|
182 | command_line = CStr('', config=True) | |||
|
183 | work_directory = CStr('', config=True) | |||
|
184 | is_rerunnaable = Bool(True, config=True) | |||
|
185 | std_out_file_path = CStr('', config=True) | |||
|
186 | std_err_file_path = CStr('', config=True) | |||
|
187 | is_parametric = Bool(False, config=True) | |||
|
188 | environment_variables = Instance(dict, args=(), config=True) | |||
|
189 | ||||
|
190 | def _write_attr(self, root, attr, key): | |||
|
191 | s = as_str(getattr(self, attr, '')) | |||
|
192 | if s: | |||
|
193 | root.set(key, s) | |||
|
194 | ||||
|
195 | def as_element(self): | |||
|
196 | root = ET.Element('Task') | |||
|
197 | self._write_attr(root, 'version', '_A_Version') | |||
|
198 | self._write_attr(root, 'task_name', '_B_Name') | |||
|
199 | self._write_attr(root, 'min_cores', '_C_MinCores') | |||
|
200 | self._write_attr(root, 'max_cores', '_D_MaxCores') | |||
|
201 | self._write_attr(root, 'min_sockets', '_E_MinSockets') | |||
|
202 | self._write_attr(root, 'max_sockets', '_F_MaxSockets') | |||
|
203 | self._write_attr(root, 'min_nodes', '_G_MinNodes') | |||
|
204 | self._write_attr(root, 'max_nodes', '_H_MaxNodes') | |||
|
205 | self._write_attr(root, 'command_line', '_I_CommandLine') | |||
|
206 | self._write_attr(root, 'work_directory', '_J_WorkDirectory') | |||
|
207 | self._write_attr(root, 'is_rerunnaable', '_K_IsRerunnable') | |||
|
208 | self._write_attr(root, 'std_out_file_path', '_L_StdOutFilePath') | |||
|
209 | self._write_attr(root, 'std_err_file_path', '_M_StdErrFilePath') | |||
|
210 | self._write_attr(root, 'is_parametric', '_N_IsParametric') | |||
|
211 | self._write_attr(root, 'unit_type', '_O_UnitType') | |||
|
212 | root.append(self.get_env_vars()) | |||
|
213 | return root | |||
|
214 | ||||
|
215 | def get_env_vars(self): | |||
|
216 | env_vars = ET.Element('EnvironmentVariables') | |||
|
217 | for k, v in self.environment_variables.items(): | |||
|
218 | variable = ET.SubElement(env_vars, "Variable") | |||
|
219 | name = ET.SubElement(variable, "Name") | |||
|
220 | name.text = k | |||
|
221 | value = ET.SubElement(variable, "Value") | |||
|
222 | value.text = v | |||
|
223 | return env_vars | |||
|
224 | ||||
|
225 | ||||
|
226 | ||||
|
227 | # By declaring these, we can configure the controller and engine separately! | |||
|
228 | ||||
|
229 | class IPControllerJob(WinHPCJob): | |||
|
230 | job_name = Str('IPController', config=False) | |||
|
231 | is_exclusive = Bool(False, config=True) | |||
|
232 | username = Str(find_username(), config=True) | |||
|
233 | priority = Enum(('Lowest','BelowNormal','Normal','AboveNormal','Highest'), | |||
|
234 | default_value='Highest', config=True) | |||
|
235 | requested_nodes = Str('', config=True) | |||
|
236 | project = Str('IPython', config=True) | |||
|
237 | ||||
|
238 | ||||
|
239 | class IPEngineSetJob(WinHPCJob): | |||
|
240 | job_name = Str('IPEngineSet', config=False) | |||
|
241 | is_exclusive = Bool(False, config=True) | |||
|
242 | username = Str(find_username(), config=True) | |||
|
243 | priority = Enum(('Lowest','BelowNormal','Normal','AboveNormal','Highest'), | |||
|
244 | default_value='Highest', config=True) | |||
|
245 | requested_nodes = Str('', config=True) | |||
|
246 | project = Str('IPython', config=True) | |||
|
247 | ||||
|
248 | ||||
|
249 | class IPControllerTask(WinHPCTask): | |||
|
250 | ||||
|
251 | task_name = Str('IPController', config=True) | |||
|
252 | controller_cmd = List(['ipcontroller.exe'], config=True) | |||
|
253 | controller_args = List(['--log-to-file', '--log-level', '40'], config=True) | |||
|
254 | # I don't want these to be configurable | |||
|
255 | std_out_file_path = CStr('', config=False) | |||
|
256 | std_err_file_path = CStr('', config=False) | |||
|
257 | min_cores = Int(1, config=False) | |||
|
258 | max_cores = Int(1, config=False) | |||
|
259 | min_sockets = Int(1, config=False) | |||
|
260 | max_sockets = Int(1, config=False) | |||
|
261 | min_nodes = Int(1, config=False) | |||
|
262 | max_nodes = Int(1, config=False) | |||
|
263 | unit_type = Str("Core", config=False) | |||
|
264 | work_directory = CStr('', config=False) | |||
|
265 | ||||
|
266 | def __init__(self, parent, name=None, config=None): | |||
|
267 | super(IPControllerTask, self).__init__(parent, name, config) | |||
|
268 | the_uuid = uuid.uuid1() | |||
|
269 | self.std_out_file_path = os.path.join('log','ipcontroller-%s.out' % the_uuid) | |||
|
270 | self.std_err_file_path = os.path.join('log','ipcontroller-%s.err' % the_uuid) | |||
|
271 | ||||
|
272 | @property | |||
|
273 | def command_line(self): | |||
|
274 | return ' '.join(self.controller_cmd + self.controller_args) | |||
|
275 | ||||
|
276 | ||||
|
277 | class IPEngineTask(WinHPCTask): | |||
|
278 | ||||
|
279 | task_name = Str('IPEngine', config=True) | |||
|
280 | engine_cmd = List(['ipengine.exe'], config=True) | |||
|
281 | engine_args = List(['--log-to-file', '--log-level', '40'], config=True) | |||
|
282 | # I don't want these to be configurable | |||
|
283 | std_out_file_path = CStr('', config=False) | |||
|
284 | std_err_file_path = CStr('', config=False) | |||
|
285 | min_cores = Int(1, config=False) | |||
|
286 | max_cores = Int(1, config=False) | |||
|
287 | min_sockets = Int(1, config=False) | |||
|
288 | max_sockets = Int(1, config=False) | |||
|
289 | min_nodes = Int(1, config=False) | |||
|
290 | max_nodes = Int(1, config=False) | |||
|
291 | unit_type = Str("Core", config=False) | |||
|
292 | work_directory = CStr('', config=False) | |||
|
293 | ||||
|
294 | def __init__(self, parent, name=None, config=None): | |||
|
295 | super(IPEngineTask,self).__init__(parent, name, config) | |||
|
296 | the_uuid = uuid.uuid1() | |||
|
297 | self.std_out_file_path = os.path.join('log','ipengine-%s.out' % the_uuid) | |||
|
298 | self.std_err_file_path = os.path.join('log','ipengine-%s.err' % the_uuid) | |||
|
299 | ||||
|
300 | @property | |||
|
301 | def command_line(self): | |||
|
302 | return ' '.join(self.engine_cmd + self.engine_args) | |||
|
303 | ||||
|
304 | ||||
|
305 | # j = WinHPCJob(None) | |||
|
306 | # j.job_name = 'IPCluster' | |||
|
307 | # j.username = 'GNET\\bgranger' | |||
|
308 | # j.requested_nodes = 'GREEN' | |||
|
309 | # | |||
|
310 | # t = WinHPCTask(None) | |||
|
311 | # t.task_name = 'Controller' | |||
|
312 | # t.command_line = r"\\blue\domainusers$\bgranger\Python\Python25\Scripts\ipcontroller.exe --log-to-file -p default --log-level 10" | |||
|
313 | # t.work_directory = r"\\blue\domainusers$\bgranger\.ipython\cluster_default" | |||
|
314 | # t.std_out_file_path = 'controller-out.txt' | |||
|
315 | # t.std_err_file_path = 'controller-err.txt' | |||
|
316 | # t.environment_variables['PYTHONPATH'] = r"\\blue\domainusers$\bgranger\Python\Python25\Lib\site-packages" | |||
|
317 | # j.add_task(t) | |||
|
318 |
@@ -0,0 +1,17 b'' | |||||
|
1 | # This shows how to use the new top-level embed function. It is a simpler | |||
|
2 | # API that manages the creation of the embedded shell. | |||
|
3 | ||||
|
4 | from IPython import embed | |||
|
5 | ||||
|
6 | a = 10 | |||
|
7 | b = 20 | |||
|
8 | ||||
|
9 | embed('First time') | |||
|
10 | ||||
|
11 | c = 30 | |||
|
12 | d = 40 | |||
|
13 | ||||
|
14 | try: | |||
|
15 | raise Exception('adsfasdf') | |||
|
16 | except: | |||
|
17 | embed('The second time') |
@@ -0,0 +1,90 b'' | |||||
|
1 | from numpy import * | |||
|
2 | ||||
|
3 | def mandel(n, m, itermax, xmin, xmax, ymin, ymax): | |||
|
4 | ''' | |||
|
5 | Fast mandelbrot computation using numpy. | |||
|
6 | ||||
|
7 | (n, m) are the output image dimensions | |||
|
8 | itermax is the maximum number of iterations to do | |||
|
9 | xmin, xmax, ymin, ymax specify the region of the | |||
|
10 | set to compute. | |||
|
11 | ''' | |||
|
12 | # The point of ix and iy is that they are 2D arrays | |||
|
13 | # giving the x-coord and y-coord at each point in | |||
|
14 | # the array. The reason for doing this will become | |||
|
15 | # clear below... | |||
|
16 | ix, iy = mgrid[0:n, 0:m] | |||
|
17 | # Now x and y are the x-values and y-values at each | |||
|
18 | # point in the array, linspace(start, end, n) | |||
|
19 | # is an array of n linearly spaced points between | |||
|
20 | # start and end, and we then index this array using | |||
|
21 | # numpy fancy indexing. If A is an array and I is | |||
|
22 | # an array of indices, then A[I] has the same shape | |||
|
23 | # as I and at each place i in I has the value A[i]. | |||
|
24 | x = linspace(xmin, xmax, n)[ix] | |||
|
25 | y = linspace(ymin, ymax, m)[iy] | |||
|
26 | # c is the complex number with the given x, y coords | |||
|
27 | c = x+complex(0,1)*y | |||
|
28 | del x, y # save a bit of memory, we only need z | |||
|
29 | # the output image coloured according to the number | |||
|
30 | # of iterations it takes to get to the boundary | |||
|
31 | # abs(z)>2 | |||
|
32 | img = zeros(c.shape, dtype=int) | |||
|
33 | # Here is where the improvement over the standard | |||
|
34 | # algorithm for drawing fractals in numpy comes in. | |||
|
35 | # We flatten all the arrays ix, iy and c. This | |||
|
36 | # flattening doesn't use any more memory because | |||
|
37 | # we are just changing the shape of the array, the | |||
|
38 | # data in memory stays the same. It also affects | |||
|
39 | # each array in the same way, so that index i in | |||
|
40 | # array c has x, y coords ix[i], iy[i]. The way the | |||
|
41 | # algorithm works is that whenever abs(z)>2 we | |||
|
42 | # remove the corresponding index from each of the | |||
|
43 | # arrays ix, iy and c. Since we do the same thing | |||
|
44 | # to each array, the correspondence between c and | |||
|
45 | # the x, y coords stored in ix and iy is kept. | |||
|
46 | ix.shape = n*m | |||
|
47 | iy.shape = n*m | |||
|
48 | c.shape = n*m | |||
|
49 | # we iterate z->z^2+c with z starting at 0, but the | |||
|
50 | # first iteration makes z=c so we just start there. | |||
|
51 | # We need to copy c because otherwise the operation | |||
|
52 | # z->z^2 will send c->c^2. | |||
|
53 | z = copy(c) | |||
|
54 | for i in xrange(itermax): | |||
|
55 | if not len(z): break # all points have escaped | |||
|
56 | # equivalent to z = z*z+c but quicker and uses | |||
|
57 | # less memory | |||
|
58 | multiply(z, z, z) | |||
|
59 | add(z, c, z) | |||
|
60 | # these are the points that have escaped | |||
|
61 | rem = abs(z)>2.0 | |||
|
62 | # colour them with the iteration number, we | |||
|
63 | # add one so that points which haven't | |||
|
64 | # escaped have 0 as their iteration number, | |||
|
65 | # this is why we keep the arrays ix and iy | |||
|
66 | # because we need to know which point in img | |||
|
67 | # to colour | |||
|
68 | img[ix[rem], iy[rem]] = i+1 | |||
|
69 | # -rem is the array of points which haven't | |||
|
70 | # escaped, in numpy -A for a boolean array A | |||
|
71 | # is the NOT operation. | |||
|
72 | rem = -rem | |||
|
73 | # So we select out the points in | |||
|
74 | # z, ix, iy and c which are still to be | |||
|
75 | # iterated on in the next step | |||
|
76 | z = z[rem] | |||
|
77 | ix, iy = ix[rem], iy[rem] | |||
|
78 | c = c[rem] | |||
|
79 | return img | |||
|
80 | ||||
|
81 | if __name__=='__main__': | |||
|
82 | from pylab import * | |||
|
83 | import time | |||
|
84 | start = time.time() | |||
|
85 | I = mandel(400, 400, 100, -2, .5, -1.25, 1.25) | |||
|
86 | print 'Time taken:', time.time()-start | |||
|
87 | I[I==0] = 101 | |||
|
88 | img = imshow(I.T, origin='lower left') | |||
|
89 | img.write_png('mandel.png', noscale=True) | |||
|
90 | show() |
@@ -0,0 +1,54 b'' | |||||
|
1 | """Calculate statistics on the digits of pi in parallel. | |||
|
2 | ||||
|
3 | This program uses the functions in :file:`pidigits.py` to calculate | |||
|
4 | the frequencies of 2 digit sequences in the digits of pi. The | |||
|
5 | results are plotted using matplotlib. | |||
|
6 | ||||
|
7 | To run, text files from http://www.super-computing.org/ | |||
|
8 | must be installed in the working directory of the IPython engines. | |||
|
9 | The actual filenames to be used can be set with the ``filestring`` | |||
|
10 | variable below. | |||
|
11 | ||||
|
12 | The dataset we have been using for this is the 200 million digit one here: | |||
|
13 | ftp://pi.super-computing.org/.2/pi200m/ | |||
|
14 | """ | |||
|
15 | ||||
|
16 | from IPython.kernel import client | |||
|
17 | from matplotlib import pyplot as plt | |||
|
18 | import numpy as np | |||
|
19 | from pidigits import * | |||
|
20 | from timeit import default_timer as clock | |||
|
21 | ||||
|
22 | ||||
|
23 | # Files with digits of pi (10m digits each) | |||
|
24 | filestring = 'pi200m-ascii-%(i)02dof20.txt' | |||
|
25 | files = [filestring % {'i':i} for i in range(1,16)] | |||
|
26 | ||||
|
27 | ||||
|
28 | # Connect to the IPython cluster | |||
|
29 | mec = client.MultiEngineClient(profile='mycluster') | |||
|
30 | mec.run('pidigits.py') | |||
|
31 | ||||
|
32 | ||||
|
33 | # Run 10m digits on 1 engine | |||
|
34 | mapper = mec.mapper(targets=0) | |||
|
35 | t1 = clock() | |||
|
36 | freqs10m = mapper.map(compute_two_digit_freqs, files[:1])[0] | |||
|
37 | t2 = clock() | |||
|
38 | digits_per_second1 = 10.0e6/(t2-t1) | |||
|
39 | print "Digits per second (1 core, 10m digits): ", digits_per_second1 | |||
|
40 | ||||
|
41 | ||||
|
42 | # Run 150m digits on 15 engines (8 cores) | |||
|
43 | t1 = clock() | |||
|
44 | freqs_all = mec.map(compute_two_digit_freqs, files[:len(mec)]) | |||
|
45 | freqs150m = reduce_freqs(freqs_all) | |||
|
46 | t2 = clock() | |||
|
47 | digits_per_second8 = 150.0e6/(t2-t1) | |||
|
48 | print "Digits per second (8 cores, 150m digits): ", digits_per_second8 | |||
|
49 | ||||
|
50 | print "Speedup: ", digits_per_second8/digits_per_second1 | |||
|
51 | ||||
|
52 | plot_two_digit_freqs(freqs150m) | |||
|
53 | plt.title("2 digit sequences in 150m digits of pi") | |||
|
54 |
@@ -0,0 +1,144 b'' | |||||
|
1 | """Compute statistics on the digits of pi. | |||
|
2 | ||||
|
3 | This uses precomputed digits of pi from the website | |||
|
4 | of Professor Yasumasa Kanada at the University of | |||
|
5 | Tokoyo: http://www.super-computing.org/ | |||
|
6 | ||||
|
7 | Currently, there are only functions to read the | |||
|
8 | .txt (non-compressed, non-binary) files, but adding | |||
|
9 | support for compression and binary files would be | |||
|
10 | straightforward. | |||
|
11 | ||||
|
12 | This focuses on computing the number of times that | |||
|
13 | all 1, 2, n digits sequences occur in the digits of pi. | |||
|
14 | If the digits of pi are truly random, these frequencies | |||
|
15 | should be equal. | |||
|
16 | """ | |||
|
17 | ||||
|
18 | # Import statements | |||
|
19 | ||||
|
20 | from __future__ import division, with_statement | |||
|
21 | import numpy as np | |||
|
22 | from matplotlib import pyplot as plt | |||
|
23 | ||||
|
24 | # Top-level functions | |||
|
25 | ||||
|
26 | def compute_one_digit_freqs(filename): | |||
|
27 | """ | |||
|
28 | Read digits of pi from a file and compute the 1 digit frequencies. | |||
|
29 | """ | |||
|
30 | d = txt_file_to_digits(filename) | |||
|
31 | freqs = one_digit_freqs(d) | |||
|
32 | return freqs | |||
|
33 | ||||
|
34 | def compute_two_digit_freqs(filename): | |||
|
35 | """ | |||
|
36 | Read digits of pi from a file and compute the 2 digit frequencies. | |||
|
37 | """ | |||
|
38 | d = txt_file_to_digits(filename) | |||
|
39 | freqs = two_digit_freqs(d) | |||
|
40 | return freqs | |||
|
41 | ||||
|
42 | def reduce_freqs(freqlist): | |||
|
43 | """ | |||
|
44 | Add up a list of freq counts to get the total counts. | |||
|
45 | """ | |||
|
46 | allfreqs = np.zeros_like(freqlist[0]) | |||
|
47 | for f in freqlist: | |||
|
48 | allfreqs += f | |||
|
49 | return allfreqs | |||
|
50 | ||||
|
51 | def compute_n_digit_freqs(filename, n): | |||
|
52 | """ | |||
|
53 | Read digits of pi from a file and compute the n digit frequencies. | |||
|
54 | """ | |||
|
55 | d = txt_file_to_digits(filename) | |||
|
56 | freqs = n_digit_freqs(d, n) | |||
|
57 | return freqs | |||
|
58 | ||||
|
59 | # Read digits from a txt file | |||
|
60 | ||||
|
61 | def txt_file_to_digits(filename, the_type=str): | |||
|
62 | """ | |||
|
63 | Yield the digits of pi read from a .txt file. | |||
|
64 | """ | |||
|
65 | with open(filename, 'r') as f: | |||
|
66 | for line in f.readlines(): | |||
|
67 | for c in line: | |||
|
68 | if c != '\n' and c!= ' ': | |||
|
69 | yield the_type(c) | |||
|
70 | ||||
|
71 | # Actual counting functions | |||
|
72 | ||||
|
73 | def one_digit_freqs(digits, normalize=False): | |||
|
74 | """ | |||
|
75 | Consume digits of pi and compute 1 digit freq. counts. | |||
|
76 | """ | |||
|
77 | freqs = np.zeros(10, dtype='i4') | |||
|
78 | for d in digits: | |||
|
79 | freqs[int(d)] += 1 | |||
|
80 | if normalize: | |||
|
81 | freqs = freqs/freqs.sum() | |||
|
82 | return freqs | |||
|
83 | ||||
|
84 | def two_digit_freqs(digits, normalize=False): | |||
|
85 | """ | |||
|
86 | Consume digits of pi and compute 2 digits freq. counts. | |||
|
87 | """ | |||
|
88 | freqs = np.zeros(100, dtype='i4') | |||
|
89 | last = digits.next() | |||
|
90 | this = digits.next() | |||
|
91 | for d in digits: | |||
|
92 | index = int(last + this) | |||
|
93 | freqs[index] += 1 | |||
|
94 | last = this | |||
|
95 | this = d | |||
|
96 | if normalize: | |||
|
97 | freqs = freqs/freqs.sum() | |||
|
98 | return freqs | |||
|
99 | ||||
|
100 | def n_digit_freqs(digits, n, normalize=False): | |||
|
101 | """ | |||
|
102 | Consume digits of pi and compute n digits freq. counts. | |||
|
103 | ||||
|
104 | This should only be used for 1-6 digits. | |||
|
105 | """ | |||
|
106 | freqs = np.zeros(pow(10,n), dtype='i4') | |||
|
107 | current = np.zeros(n, dtype=int) | |||
|
108 | for i in range(n): | |||
|
109 | current[i] = digits.next() | |||
|
110 | for d in digits: | |||
|
111 | index = int(''.join(map(str, current))) | |||
|
112 | freqs[index] += 1 | |||
|
113 | current[0:-1] = current[1:] | |||
|
114 | current[-1] = d | |||
|
115 | if normalize: | |||
|
116 | freqs = freqs/freqs.sum() | |||
|
117 | return freqs | |||
|
118 | ||||
|
119 | # Plotting functions | |||
|
120 | ||||
|
121 | def plot_two_digit_freqs(f2): | |||
|
122 | """ | |||
|
123 | Plot two digits frequency counts using matplotlib. | |||
|
124 | """ | |||
|
125 | f2_copy = f2.copy() | |||
|
126 | f2_copy.shape = (10,10) | |||
|
127 | ax = plt.matshow(f2_copy) | |||
|
128 | plt.colorbar() | |||
|
129 | for i in range(10): | |||
|
130 | for j in range(10): | |||
|
131 | plt.text(i-0.2, j+0.2, str(j)+str(i)) | |||
|
132 | plt.ylabel('First digit') | |||
|
133 | plt.xlabel('Second digit') | |||
|
134 | return ax | |||
|
135 | ||||
|
136 | def plot_one_digit_freqs(f1): | |||
|
137 | """ | |||
|
138 | Plot one digit frequency counts using matplotlib. | |||
|
139 | """ | |||
|
140 | ax = plt.plot(f1,'bo-') | |||
|
141 | plt.title('Single digit counts in pi') | |||
|
142 | plt.xlabel('Digit') | |||
|
143 | plt.ylabel('Count') | |||
|
144 | return ax |
@@ -0,0 +1,59 b'' | |||||
|
1 | ==================================================== | |||
|
2 | Notes on code execution in :class:`InteractiveShell` | |||
|
3 | ==================================================== | |||
|
4 | ||||
|
5 | Overview | |||
|
6 | ======== | |||
|
7 | ||||
|
8 | This section contains information and notes about the code execution | |||
|
9 | system in :class:`InteractiveShell`. This system needs to be refactored | |||
|
10 | and we are keeping notes about this process here. | |||
|
11 | ||||
|
12 | Current design | |||
|
13 | ============== | |||
|
14 | ||||
|
15 | Here is a script that shows the relationships between the various | |||
|
16 | methods in :class:`InteractiveShell` that manage code execution:: | |||
|
17 | ||||
|
18 | import networkx as nx | |||
|
19 | import matplotlib.pyplot as plt | |||
|
20 | ||||
|
21 | exec_init_cmd = 'exec_init_cmd' | |||
|
22 | interact = 'interact' | |||
|
23 | runlines = 'runlines' | |||
|
24 | runsource = 'runsource' | |||
|
25 | runcode = 'runcode' | |||
|
26 | push_line = 'push_line' | |||
|
27 | mainloop = 'mainloop' | |||
|
28 | embed_mainloop = 'embed_mainloop' | |||
|
29 | ri = 'raw_input' | |||
|
30 | prefilter = 'prefilter' | |||
|
31 | ||||
|
32 | g = nx.DiGraph() | |||
|
33 | ||||
|
34 | g.add_node(exec_init_cmd) | |||
|
35 | g.add_node(interact) | |||
|
36 | g.add_node(runlines) | |||
|
37 | g.add_node(runsource) | |||
|
38 | g.add_node(push_line) | |||
|
39 | g.add_node(mainloop) | |||
|
40 | g.add_node(embed_mainloop) | |||
|
41 | g.add_node(ri) | |||
|
42 | g.add_node(prefilter) | |||
|
43 | ||||
|
44 | g.add_edge(exec_init_cmd, push_line) | |||
|
45 | g.add_edge(exec_init_cmd, prefilter) | |||
|
46 | g.add_edge(mainloop, exec_init_cmd) | |||
|
47 | g.add_edge(mainloop, interact) | |||
|
48 | g.add_edge(embed_mainloop, interact) | |||
|
49 | g.add_edge(interact, ri) | |||
|
50 | g.add_edge(interact, push_line) | |||
|
51 | g.add_edge(push_line, runsource) | |||
|
52 | g.add_edge(runlines, push_line) | |||
|
53 | g.add_edge(runlines, prefilter) | |||
|
54 | g.add_edge(runsource, runcode) | |||
|
55 | g.add_edge(ri, prefilter) | |||
|
56 | ||||
|
57 | nx.draw_spectral(g, node_size=100, alpha=0.6, node_color='r', | |||
|
58 | font_size=10, node_shape='o') | |||
|
59 | plt.show() |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
@@ -0,0 +1,282 b'' | |||||
|
1 | ================= | |||
|
2 | Parallel examples | |||
|
3 | ================= | |||
|
4 | ||||
|
5 | In this section we describe two more involved examples of using an IPython | |||
|
6 | cluster to perform a parallel computation. In these examples, we will be using | |||
|
7 | IPython's "pylab" mode, which enables interactive plotting using the | |||
|
8 | Matplotlib package. IPython can be started in this mode by typing:: | |||
|
9 | ||||
|
10 | ipython -p pylab | |||
|
11 | ||||
|
12 | at the system command line. If this prints an error message, you will | |||
|
13 | need to install the default profiles from within IPython by doing, | |||
|
14 | ||||
|
15 | .. sourcecode:: ipython | |||
|
16 | ||||
|
17 | In [1]: %install_profiles | |||
|
18 | ||||
|
19 | and then restarting IPython. | |||
|
20 | ||||
|
21 | 150 million digits of pi | |||
|
22 | ======================== | |||
|
23 | ||||
|
24 | In this example we would like to study the distribution of digits in the | |||
|
25 | number pi (in base 10). While it is not known if pi is a normal number (a | |||
|
26 | number is normal in base 10 if 0-9 occur with equal likelihood) numerical | |||
|
27 | investigations suggest that it is. We will begin with a serial calculation on | |||
|
28 | 10,000 digits of pi and then perform a parallel calculation involving 150 | |||
|
29 | million digits. | |||
|
30 | ||||
|
31 | In both the serial and parallel calculation we will be using functions defined | |||
|
32 | in the :file:`pidigits.py` file, which is available in the | |||
|
33 | :file:`docs/examples/kernel` directory of the IPython source distribution. | |||
|
34 | These functions provide basic facilities for working with the digits of pi and | |||
|
35 | can be loaded into IPython by putting :file:`pidigits.py` in your current | |||
|
36 | working directory and then doing: | |||
|
37 | ||||
|
38 | .. sourcecode:: ipython | |||
|
39 | ||||
|
40 | In [1]: run pidigits.py | |||
|
41 | ||||
|
42 | Serial calculation | |||
|
43 | ------------------ | |||
|
44 | ||||
|
45 | For the serial calculation, we will use SymPy (http://www.sympy.org) to | |||
|
46 | calculate 10,000 digits of pi and then look at the frequencies of the digits | |||
|
47 | 0-9. Out of 10,000 digits, we expect each digit to occur 1,000 times. While | |||
|
48 | SymPy is capable of calculating many more digits of pi, our purpose here is to | |||
|
49 | set the stage for the much larger parallel calculation. | |||
|
50 | ||||
|
51 | In this example, we use two functions from :file:`pidigits.py`: | |||
|
52 | :func:`one_digit_freqs` (which calculates how many times each digit occurs) | |||
|
53 | and :func:`plot_one_digit_freqs` (which uses Matplotlib to plot the result). | |||
|
54 | Here is an interactive IPython session that uses these functions with | |||
|
55 | SymPy: | |||
|
56 | ||||
|
57 | .. sourcecode:: ipython | |||
|
58 | ||||
|
59 | In [7]: import sympy | |||
|
60 | ||||
|
61 | In [8]: pi = sympy.pi.evalf(40) | |||
|
62 | ||||
|
63 | In [9]: pi | |||
|
64 | Out[9]: 3.141592653589793238462643383279502884197 | |||
|
65 | ||||
|
66 | In [10]: pi = sympy.pi.evalf(10000) | |||
|
67 | ||||
|
68 | In [11]: digits = (d for d in str(pi)[2:]) # create a sequence of digits | |||
|
69 | ||||
|
70 | In [12]: run pidigits.py # load one_digit_freqs/plot_one_digit_freqs | |||
|
71 | ||||
|
72 | In [13]: freqs = one_digit_freqs(digits) | |||
|
73 | ||||
|
74 | In [14]: plot_one_digit_freqs(freqs) | |||
|
75 | Out[14]: [<matplotlib.lines.Line2D object at 0x18a55290>] | |||
|
76 | ||||
|
77 | The resulting plot of the single digit counts shows that each digit occurs | |||
|
78 | approximately 1,000 times, but that with only 10,000 digits the | |||
|
79 | statistical fluctuations are still rather large: | |||
|
80 | ||||
|
81 | .. image:: single_digits.* | |||
|
82 | ||||
|
83 | It is clear that to reduce the relative fluctuations in the counts, we need | |||
|
84 | to look at many more digits of pi. That brings us to the parallel calculation. | |||
|
85 | ||||
|
86 | Parallel calculation | |||
|
87 | -------------------- | |||
|
88 | ||||
|
89 | Calculating many digits of pi is a challenging computational problem in itself. | |||
|
90 | Because we want to focus on the distribution of digits in this example, we | |||
|
91 | will use pre-computed digit of pi from the website of Professor Yasumasa | |||
|
92 | Kanada at the University of Tokoyo (http://www.super-computing.org). These | |||
|
93 | digits come in a set of text files (ftp://pi.super-computing.org/.2/pi200m/) | |||
|
94 | that each have 10 million digits of pi. | |||
|
95 | ||||
|
96 | For the parallel calculation, we have copied these files to the local hard | |||
|
97 | drives of the compute nodes. A total of 15 of these files will be used, for a | |||
|
98 | total of 150 million digits of pi. To make things a little more interesting we | |||
|
99 | will calculate the frequencies of all 2 digits sequences (00-99) and then plot | |||
|
100 | the result using a 2D matrix in Matplotlib. | |||
|
101 | ||||
|
102 | The overall idea of the calculation is simple: each IPython engine will | |||
|
103 | compute the two digit counts for the digits in a single file. Then in a final | |||
|
104 | step the counts from each engine will be added up. To perform this | |||
|
105 | calculation, we will need two top-level functions from :file:`pidigits.py`: | |||
|
106 | ||||
|
107 | .. literalinclude:: ../../examples/kernel/pidigits.py | |||
|
108 | :language: python | |||
|
109 | :lines: 34-49 | |||
|
110 | ||||
|
111 | We will also use the :func:`plot_two_digit_freqs` function to plot the | |||
|
112 | results. The code to run this calculation in parallel is contained in | |||
|
113 | :file:`docs/examples/kernel/parallelpi.py`. This code can be run in parallel | |||
|
114 | using IPython by following these steps: | |||
|
115 | ||||
|
116 | 1. Copy the text files with the digits of pi | |||
|
117 | (ftp://pi.super-computing.org/.2/pi200m/) to the working directory of the | |||
|
118 | engines on the compute nodes. | |||
|
119 | 2. Use :command:`ipcluster` to start 15 engines. We used an 8 core (2 quad | |||
|
120 | core CPUs) cluster with hyperthreading enabled which makes the 8 cores | |||
|
121 | looks like 16 (1 controller + 15 engines) in the OS. However, the maximum | |||
|
122 | speedup we can observe is still only 8x. | |||
|
123 | 3. With the file :file:`parallelpi.py` in your current working directory, open | |||
|
124 | up IPython in pylab mode and type ``run parallelpi.py``. | |||
|
125 | ||||
|
126 | When run on our 8 core cluster, we observe a speedup of 7.7x. This is slightly | |||
|
127 | less than linear scaling (8x) because the controller is also running on one of | |||
|
128 | the cores. | |||
|
129 | ||||
|
130 | To emphasize the interactive nature of IPython, we now show how the | |||
|
131 | calculation can also be run by simply typing the commands from | |||
|
132 | :file:`parallelpi.py` interactively into IPython: | |||
|
133 | ||||
|
134 | .. sourcecode:: ipython | |||
|
135 | ||||
|
136 | In [1]: from IPython.kernel import client | |||
|
137 | 2009-11-19 11:32:38-0800 [-] Log opened. | |||
|
138 | ||||
|
139 | # The MultiEngineClient allows us to use the engines interactively. | |||
|
140 | # We simply pass MultiEngineClient the name of the cluster profile we | |||
|
141 | # are using. | |||
|
142 | In [2]: mec = client.MultiEngineClient(profile='mycluster') | |||
|
143 | 2009-11-19 11:32:44-0800 [-] Connecting [0] | |||
|
144 | 2009-11-19 11:32:44-0800 [Negotiation,client] Connected: ./ipcontroller-mec.furl | |||
|
145 | ||||
|
146 | In [3]: mec.get_ids() | |||
|
147 | Out[3]: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] | |||
|
148 | ||||
|
149 | In [4]: run pidigits.py | |||
|
150 | ||||
|
151 | In [5]: filestring = 'pi200m-ascii-%(i)02dof20.txt' | |||
|
152 | ||||
|
153 | # Create the list of files to process. | |||
|
154 | In [6]: files = [filestring % {'i':i} for i in range(1,16)] | |||
|
155 | ||||
|
156 | In [7]: files | |||
|
157 | Out[7]: | |||
|
158 | ['pi200m-ascii-01of20.txt', | |||
|
159 | 'pi200m-ascii-02of20.txt', | |||
|
160 | 'pi200m-ascii-03of20.txt', | |||
|
161 | 'pi200m-ascii-04of20.txt', | |||
|
162 | 'pi200m-ascii-05of20.txt', | |||
|
163 | 'pi200m-ascii-06of20.txt', | |||
|
164 | 'pi200m-ascii-07of20.txt', | |||
|
165 | 'pi200m-ascii-08of20.txt', | |||
|
166 | 'pi200m-ascii-09of20.txt', | |||
|
167 | 'pi200m-ascii-10of20.txt', | |||
|
168 | 'pi200m-ascii-11of20.txt', | |||
|
169 | 'pi200m-ascii-12of20.txt', | |||
|
170 | 'pi200m-ascii-13of20.txt', | |||
|
171 | 'pi200m-ascii-14of20.txt', | |||
|
172 | 'pi200m-ascii-15of20.txt'] | |||
|
173 | ||||
|
174 | # This is the parallel calculation using the MultiEngineClient.map method | |||
|
175 | # which applies compute_two_digit_freqs to each file in files in parallel. | |||
|
176 | In [8]: freqs_all = mec.map(compute_two_digit_freqs, files) | |||
|
177 | ||||
|
178 | # Add up the frequencies from each engine. | |||
|
179 | In [8]: freqs = reduce_freqs(freqs_all) | |||
|
180 | ||||
|
181 | In [9]: plot_two_digit_freqs(freqs) | |||
|
182 | Out[9]: <matplotlib.image.AxesImage object at 0x18beb110> | |||
|
183 | ||||
|
184 | In [10]: plt.title('2 digit counts of 150m digits of pi') | |||
|
185 | Out[10]: <matplotlib.text.Text object at 0x18d1f9b0> | |||
|
186 | ||||
|
187 | The resulting plot generated by Matplotlib is shown below. The colors indicate | |||
|
188 | which two digit sequences are more (red) or less (blue) likely to occur in the | |||
|
189 | first 150 million digits of pi. We clearly see that the sequence "41" is | |||
|
190 | most likely and that "06" and "07" are least likely. Further analysis would | |||
|
191 | show that the relative size of the statistical fluctuations have decreased | |||
|
192 | compared to the 10,000 digit calculation. | |||
|
193 | ||||
|
194 | .. image:: two_digit_counts.* | |||
|
195 | ||||
|
196 | ||||
|
197 | Parallel options pricing | |||
|
198 | ======================== | |||
|
199 | ||||
|
200 | An option is a financial contract that gives the buyer of the contract the | |||
|
201 | right to buy (a "call") or sell (a "put") a secondary asset (a stock for | |||
|
202 | example) at a particular date in the future (the expiration date) for a | |||
|
203 | pre-agreed upon price (the strike price). For this right, the buyer pays the | |||
|
204 | seller a premium (the option price). There are a wide variety of flavors of | |||
|
205 | options (American, European, Asian, etc.) that are useful for different | |||
|
206 | purposes: hedging against risk, speculation, etc. | |||
|
207 | ||||
|
208 | Much of modern finance is driven by the need to price these contracts | |||
|
209 | accurately based on what is known about the properties (such as volatility) of | |||
|
210 | the underlying asset. One method of pricing options is to use a Monte Carlo | |||
|
211 | simulation of the underlying asset price. In this example we use this approach | |||
|
212 | to price both European and Asian (path dependent) options for various strike | |||
|
213 | prices and volatilities. | |||
|
214 | ||||
|
215 | The code for this example can be found in the :file:`docs/examples/kernel` | |||
|
216 | directory of the IPython source. The function :func:`price_options` in | |||
|
217 | :file:`mcpricer.py` implements the basic Monte Carlo pricing algorithm using | |||
|
218 | the NumPy package and is shown here: | |||
|
219 | ||||
|
220 | .. literalinclude:: ../../examples/kernel/mcpricer.py | |||
|
221 | :language: python | |||
|
222 | ||||
|
223 | To run this code in parallel, we will use IPython's :class:`TaskClient` class, | |||
|
224 | which distributes work to the engines using dynamic load balancing. This | |||
|
225 | client can be used along side the :class:`MultiEngineClient` class shown in | |||
|
226 | the previous example. The parallel calculation using :class:`TaskClient` can | |||
|
227 | be found in the file :file:`mcpricer.py`. The code in this file creates a | |||
|
228 | :class:`TaskClient` instance and then submits a set of tasks using | |||
|
229 | :meth:`TaskClient.run` that calculate the option prices for different | |||
|
230 | volatilities and strike prices. The results are then plotted as a 2D contour | |||
|
231 | plot using Matplotlib. | |||
|
232 | ||||
|
233 | .. literalinclude:: ../../examples/kernel/mcdriver.py | |||
|
234 | :language: python | |||
|
235 | ||||
|
236 | To use this code, start an IPython cluster using :command:`ipcluster`, open | |||
|
237 | IPython in the pylab mode with the file :file:`mcdriver.py` in your current | |||
|
238 | working directory and then type: | |||
|
239 | ||||
|
240 | .. sourcecode:: ipython | |||
|
241 | ||||
|
242 | In [7]: run mcdriver.py | |||
|
243 | Submitted tasks: [0, 1, 2, ...] | |||
|
244 | ||||
|
245 | Once all the tasks have finished, the results can be plotted using the | |||
|
246 | :func:`plot_options` function. Here we make contour plots of the Asian | |||
|
247 | call and Asian put options as function of the volatility and strike price: | |||
|
248 | ||||
|
249 | .. sourcecode:: ipython | |||
|
250 | ||||
|
251 | In [8]: plot_options(sigma_vals, K_vals, prices['acall']) | |||
|
252 | ||||
|
253 | In [9]: plt.figure() | |||
|
254 | Out[9]: <matplotlib.figure.Figure object at 0x18c178d0> | |||
|
255 | ||||
|
256 | In [10]: plot_options(sigma_vals, K_vals, prices['aput']) | |||
|
257 | ||||
|
258 | These results are shown in the two figures below. On a 8 core cluster the | |||
|
259 | entire calculation (10 strike prices, 10 volatilities, 100,000 paths for each) | |||
|
260 | took 30 seconds in parallel, giving a speedup of 7.7x, which is comparable | |||
|
261 | to the speedup observed in our previous example. | |||
|
262 | ||||
|
263 | .. image:: asian_call.* | |||
|
264 | ||||
|
265 | .. image:: asian_put.* | |||
|
266 | ||||
|
267 | Conclusion | |||
|
268 | ========== | |||
|
269 | ||||
|
270 | To conclude these examples, we summarize the key features of IPython's | |||
|
271 | parallel architecture that have been demonstrated: | |||
|
272 | ||||
|
273 | * Serial code can be parallelized often with only a few extra lines of code. | |||
|
274 | We have used the :class:`MultiEngineClient` and :class:`TaskClient` classes | |||
|
275 | for this purpose. | |||
|
276 | * The resulting parallel code can be run without ever leaving the IPython's | |||
|
277 | interactive shell. | |||
|
278 | * Any data computed in parallel can be explored interactively through | |||
|
279 | visualization or further numerical calculations. | |||
|
280 | * We have run these examples on a cluster running Windows HPC Server 2008. | |||
|
281 | IPython's built in support for the Windows HPC job scheduler makes it | |||
|
282 | easy to get started with IPython's parallel capabilities. |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
@@ -0,0 +1,333 b'' | |||||
|
1 | ============================================ | |||
|
2 | Getting started with Windows HPC Server 2008 | |||
|
3 | ============================================ | |||
|
4 | ||||
|
5 | Introduction | |||
|
6 | ============ | |||
|
7 | ||||
|
8 | The Python programming language is an increasingly popular language for | |||
|
9 | numerical computing. This is due to a unique combination of factors. First, | |||
|
10 | Python is a high-level and *interactive* language that is well matched to | |||
|
11 | interactive numerical work. Second, it is easy (often times trivial) to | |||
|
12 | integrate legacy C/C++/Fortran code into Python. Third, a large number of | |||
|
13 | high-quality open source projects provide all the needed building blocks for | |||
|
14 | numerical computing: numerical arrays (NumPy), algorithms (SciPy), 2D/3D | |||
|
15 | Visualization (Matplotlib, Mayavi, Chaco), Symbolic Mathematics (Sage, Sympy) | |||
|
16 | and others. | |||
|
17 | ||||
|
18 | The IPython project is a core part of this open-source toolchain and is | |||
|
19 | focused on creating a comprehensive environment for interactive and | |||
|
20 | exploratory computing in the Python programming language. It enables all of | |||
|
21 | the above tools to be used interactively and consists of two main components: | |||
|
22 | ||||
|
23 | * An enhanced interactive Python shell with support for interactive plotting | |||
|
24 | and visualization. | |||
|
25 | * An architecture for interactive parallel computing. | |||
|
26 | ||||
|
27 | With these components, it is possible to perform all aspects of a parallel | |||
|
28 | computation interactively. This type of workflow is particularly relevant in | |||
|
29 | scientific and numerical computing where algorithms, code and data are | |||
|
30 | continually evolving as the user/developer explores a problem. The broad | |||
|
31 | treads in computing (commodity clusters, multicore, cloud computing, etc.) | |||
|
32 | make these capabilities of IPython particularly relevant. | |||
|
33 | ||||
|
34 | While IPython is a cross platform tool, it has particularly strong support for | |||
|
35 | Windows based compute clusters running Windows HPC Server 2008. This document | |||
|
36 | describes how to get started with IPython on Windows HPC Server 2008. The | |||
|
37 | content and emphasis here is practical: installing IPython, configuring | |||
|
38 | IPython to use the Windows job scheduler and running example parallel programs | |||
|
39 | interactively. A more complete description of IPython's parallel computing | |||
|
40 | capabilities can be found in IPython's online documentation | |||
|
41 | (http://ipython.scipy.org/moin/Documentation). | |||
|
42 | ||||
|
43 | Setting up your Windows cluster | |||
|
44 | =============================== | |||
|
45 | ||||
|
46 | This document assumes that you already have a cluster running Windows | |||
|
47 | HPC Server 2008. Here is a broad overview of what is involved with setting up | |||
|
48 | such a cluster: | |||
|
49 | ||||
|
50 | 1. Install Windows Server 2008 on the head and compute nodes in the cluster. | |||
|
51 | 2. Setup the network configuration on each host. Each host should have a | |||
|
52 | static IP address. | |||
|
53 | 3. On the head node, activate the "Active Directory Domain Services" role | |||
|
54 | and make the head node the domain controller. | |||
|
55 | 4. Join the compute nodes to the newly created Active Directory (AD) domain. | |||
|
56 | 5. Setup user accounts in the domain with shared home directories. | |||
|
57 | 6. Install the HPC Pack 2008 on the head node to create a cluster. | |||
|
58 | 7. Install the HPC Pack 2008 on the compute nodes. | |||
|
59 | ||||
|
60 | More details about installing and configuring Windows HPC Server 2008 can be | |||
|
61 | found on the Windows HPC Home Page (http://www.microsoft.com/hpc). Regardless | |||
|
62 | of what steps you follow to set up your cluster, the remainder of this | |||
|
63 | document will assume that: | |||
|
64 | ||||
|
65 | * There are domain users that can log on to the AD domain and submit jobs | |||
|
66 | to the cluster scheduler. | |||
|
67 | * These domain users have shared home directories. While shared home | |||
|
68 | directories are not required to use IPython, they make it much easier to | |||
|
69 | use IPython. | |||
|
70 | ||||
|
71 | Installation of IPython and its dependencies | |||
|
72 | ============================================ | |||
|
73 | ||||
|
74 | IPython and all of its dependencies are freely available and open source. | |||
|
75 | These packages provide a powerful and cost-effective approach to numerical and | |||
|
76 | scientific computing on Windows. The following dependencies are needed to run | |||
|
77 | IPython on Windows: | |||
|
78 | ||||
|
79 | * Python 2.5 or 2.6 (http://www.python.org) | |||
|
80 | * pywin32 (http://sourceforge.net/projects/pywin32/) | |||
|
81 | * PyReadline (https://launchpad.net/pyreadline) | |||
|
82 | * zope.interface and Twisted (http://twistedmatrix.com) | |||
|
83 | * Foolcap (http://foolscap.lothar.com/trac) | |||
|
84 | * pyOpenSSL (https://launchpad.net/pyopenssl) | |||
|
85 | * IPython (http://ipython.scipy.org) | |||
|
86 | ||||
|
87 | In addition, the following dependencies are needed to run the demos described | |||
|
88 | in this document. | |||
|
89 | ||||
|
90 | * NumPy and SciPy (http://www.scipy.org) | |||
|
91 | * wxPython (http://www.wxpython.org) | |||
|
92 | * Matplotlib (http://matplotlib.sourceforge.net/) | |||
|
93 | ||||
|
94 | The easiest way of obtaining these dependencies is through the Enthought | |||
|
95 | Python Distribution (EPD) (http://www.enthought.com/products/epd.php). EPD is | |||
|
96 | produced by Enthought, Inc. and contains all of these packages and others in a | |||
|
97 | single installer and is available free for academic users. While it is also | |||
|
98 | possible to download and install each package individually, this is a tedious | |||
|
99 | process. Thus, we highly recommend using EPD to install these packages on | |||
|
100 | Windows. | |||
|
101 | ||||
|
102 | Regardless of how you install the dependencies, here are the steps you will | |||
|
103 | need to follow: | |||
|
104 | ||||
|
105 | 1. Install all of the packages listed above, either individually or using EPD | |||
|
106 | on the head node, compute nodes and user workstations. | |||
|
107 | ||||
|
108 | 2. Make sure that :file:`C:\\Python25` and :file:`C:\\Python25\\Scripts` are | |||
|
109 | in the system :envvar:`%PATH%` variable on each node. | |||
|
110 | ||||
|
111 | 3. Install the latest development version of IPython. This can be done by | |||
|
112 | downloading the the development version from the IPython website | |||
|
113 | (http://ipython.scipy.org) and following the installation instructions. | |||
|
114 | ||||
|
115 | Further details about installing IPython or its dependencies can be found in | |||
|
116 | the online IPython documentation (http://ipython.scipy.org/moin/Documentation) | |||
|
117 | Once you are finished with the installation, you can try IPython out by | |||
|
118 | opening a Windows Command Prompt and typing ``ipython``. This will | |||
|
119 | start IPython's interactive shell and you should see something like the | |||
|
120 | following screenshot: | |||
|
121 | ||||
|
122 | .. image:: ipython_shell.* | |||
|
123 | ||||
|
124 | Starting an IPython cluster | |||
|
125 | =========================== | |||
|
126 | ||||
|
127 | To use IPython's parallel computing capabilities, you will need to start an | |||
|
128 | IPython cluster. An IPython cluster consists of one controller and multiple | |||
|
129 | engines: | |||
|
130 | ||||
|
131 | IPython controller | |||
|
132 | The IPython controller manages the engines and acts as a gateway between | |||
|
133 | the engines and the client, which runs in the user's interactive IPython | |||
|
134 | session. The controller is started using the :command:`ipcontroller` | |||
|
135 | command. | |||
|
136 | ||||
|
137 | IPython engine | |||
|
138 | IPython engines run a user's Python code in parallel on the compute nodes. | |||
|
139 | Engines are starting using the :command:`ipengine` command. | |||
|
140 | ||||
|
141 | Once these processes are started, a user can run Python code interactively and | |||
|
142 | in parallel on the engines from within the IPython shell using an appropriate | |||
|
143 | client. This includes the ability to interact with, plot and visualize data | |||
|
144 | from the engines. | |||
|
145 | ||||
|
146 | IPython has a command line program called :command:`ipcluster` that automates | |||
|
147 | all aspects of starting the controller and engines on the compute nodes. | |||
|
148 | :command:`ipcluster` has full support for the Windows HPC job scheduler, | |||
|
149 | meaning that :command:`ipcluster` can use this job scheduler to start the | |||
|
150 | controller and engines. In our experience, the Windows HPC job scheduler is | |||
|
151 | particularly well suited for interactive applications, such as IPython. Once | |||
|
152 | :command:`ipcluster` is configured properly, a user can start an IPython | |||
|
153 | cluster from their local workstation almost instantly, without having to log | |||
|
154 | on to the head node (as is typically required by Unix based job schedulers). | |||
|
155 | This enables a user to move seamlessly between serial and parallel | |||
|
156 | computations. | |||
|
157 | ||||
|
158 | In this section we show how to use :command:`ipcluster` to start an IPython | |||
|
159 | cluster using the Windows HPC Server 2008 job scheduler. To make sure that | |||
|
160 | :command:`ipcluster` is installed and working properly, you should first try | |||
|
161 | to start an IPython cluster on your local host. To do this, open a Windows | |||
|
162 | Command Prompt and type the following command:: | |||
|
163 | ||||
|
164 | ipcluster start -n 2 | |||
|
165 | ||||
|
166 | You should see a number of messages printed to the screen, ending with | |||
|
167 | "IPython cluster: started". The result should look something like the following | |||
|
168 | screenshot: | |||
|
169 | ||||
|
170 | .. image:: ipcluster_start.* | |||
|
171 | ||||
|
172 | At this point, the controller and two engines are running on your local host. | |||
|
173 | This configuration is useful for testing and for situations where you want to | |||
|
174 | take advantage of multiple cores on your local computer. | |||
|
175 | ||||
|
176 | Now that we have confirmed that :command:`ipcluster` is working properly, we | |||
|
177 | describe how to configure and run an IPython cluster on an actual compute | |||
|
178 | cluster running Windows HPC Server 2008. Here is an outline of the needed | |||
|
179 | steps: | |||
|
180 | ||||
|
181 | 1. Create a cluster profile using: ``ipcluster create -p mycluster`` | |||
|
182 | ||||
|
183 | 2. Edit configuration files in the directory :file:`.ipython\\cluster_mycluster` | |||
|
184 | ||||
|
185 | 3. Start the cluster using: ``ipcluser start -p mycluster -n 32`` | |||
|
186 | ||||
|
187 | Creating a cluster profile | |||
|
188 | -------------------------- | |||
|
189 | ||||
|
190 | In most cases, you will have to create a cluster profile to use IPython on a | |||
|
191 | cluster. A cluster profile is a name (like "mycluster") that is associated | |||
|
192 | with a particular cluster configuration. The profile name is used by | |||
|
193 | :command:`ipcluster` when working with the cluster. | |||
|
194 | ||||
|
195 | Associated with each cluster profile is a cluster directory. This cluster | |||
|
196 | directory is a specially named directory (typically located in the | |||
|
197 | :file:`.ipython` subdirectory of your home directory) that contains the | |||
|
198 | configuration files for a particular cluster profile, as well as log files and | |||
|
199 | security keys. The naming convention for cluster directories is: | |||
|
200 | :file:`cluster_<profile name>`. Thus, the cluster directory for a profile named | |||
|
201 | "foo" would be :file:`.ipython\\cluster_foo`. | |||
|
202 | ||||
|
203 | To create a new cluster profile (named "mycluster") and the associated cluster | |||
|
204 | directory, type the following command at the Windows Command Prompt:: | |||
|
205 | ||||
|
206 | ipcluster create -p mycluster | |||
|
207 | ||||
|
208 | The output of this command is shown in the screenshot below. Notice how | |||
|
209 | :command:`ipcluster` prints out the location of the newly created cluster | |||
|
210 | directory. | |||
|
211 | ||||
|
212 | .. image:: ipcluster_create.* | |||
|
213 | ||||
|
214 | Configuring a cluster profile | |||
|
215 | ----------------------------- | |||
|
216 | ||||
|
217 | Next, you will need to configure the newly created cluster profile by editing | |||
|
218 | the following configuration files in the cluster directory: | |||
|
219 | ||||
|
220 | * :file:`ipcluster_config.py` | |||
|
221 | * :file:`ipcontroller_config.py` | |||
|
222 | * :file:`ipengine_config.py` | |||
|
223 | ||||
|
224 | When :command:`ipcluster` is run, these configuration files are used to | |||
|
225 | determine how the engines and controller will be started. In most cases, | |||
|
226 | you will only have to set a few of the attributes in these files. | |||
|
227 | ||||
|
228 | To configure :command:`ipcluster` to use the Windows HPC job scheduler, you | |||
|
229 | will need to edit the following attributes in the file | |||
|
230 | :file:`ipcluster_config.py`:: | |||
|
231 | ||||
|
232 | # Set these at the top of the file to tell ipcluster to use the | |||
|
233 | # Windows HPC job scheduler. | |||
|
234 | c.Global.controller_launcher = \ | |||
|
235 | 'IPython.kernel.launcher.WindowsHPCControllerLauncher' | |||
|
236 | c.Global.engine_launcher = \ | |||
|
237 | 'IPython.kernel.launcher.WindowsHPCEngineSetLauncher' | |||
|
238 | ||||
|
239 | # Set these to the host name of the scheduler (head node) of your cluster. | |||
|
240 | c.WindowsHPCControllerLauncher.scheduler = 'HEADNODE' | |||
|
241 | c.WindowsHPCEngineSetLauncher.scheduler = 'HEADNODE' | |||
|
242 | ||||
|
243 | There are a number of other configuration attributes that can be set, but | |||
|
244 | in most cases these will be sufficient to get you started. | |||
|
245 | ||||
|
246 | .. warning:: | |||
|
247 | If any of your configuration attributes involve specifying the location | |||
|
248 | of shared directories or files, you must make sure that you use UNC paths | |||
|
249 | like :file:`\\\\host\\share`. It is also important that you specify | |||
|
250 | these paths using raw Python strings: ``r'\\host\share'`` to make sure | |||
|
251 | that the backslashes are properly escaped. | |||
|
252 | ||||
|
253 | Starting the cluster profile | |||
|
254 | ---------------------------- | |||
|
255 | ||||
|
256 | Once a cluster profile has been configured, starting an IPython cluster using | |||
|
257 | the profile is simple:: | |||
|
258 | ||||
|
259 | ipcluster start -p mycluster -n 32 | |||
|
260 | ||||
|
261 | The ``-n`` option tells :command:`ipcluster` how many engines to start (in | |||
|
262 | this case 32). Stopping the cluster is as simple as typing Control-C. | |||
|
263 | ||||
|
264 | Using the HPC Job Manager | |||
|
265 | ------------------------- | |||
|
266 | ||||
|
267 | When ``ipcluster start`` is run the first time, :command:`ipcluster` creates | |||
|
268 | two XML job description files in the cluster directory: | |||
|
269 | ||||
|
270 | * :file:`ipcontroller_job.xml` | |||
|
271 | * :file:`ipengineset_job.xml` | |||
|
272 | ||||
|
273 | Once these files have been created, they can be imported into the HPC Job | |||
|
274 | Manager application. Then, the controller and engines for that profile can be | |||
|
275 | started using the HPC Job Manager directly, without using :command:`ipcluster`. | |||
|
276 | However, anytime the cluster profile is re-configured, ``ipcluster start`` | |||
|
277 | must be run again to regenerate the XML job description files. The | |||
|
278 | following screenshot shows what the HPC Job Manager interface looks like | |||
|
279 | with a running IPython cluster. | |||
|
280 | ||||
|
281 | .. image:: hpc_job_manager.* | |||
|
282 | ||||
|
283 | Performing a simple interactive parallel computation | |||
|
284 | ==================================================== | |||
|
285 | ||||
|
286 | Once you have started your IPython cluster, you can start to use it. To do | |||
|
287 | this, open up a new Windows Command Prompt and start up IPython's interactive | |||
|
288 | shell by typing:: | |||
|
289 | ||||
|
290 | ipython | |||
|
291 | ||||
|
292 | Then you can create a :class:`MultiEngineClient` instance for your profile and | |||
|
293 | use the resulting instance to do a simple interactive parallel computation. In | |||
|
294 | the code and screenshot that follows, we take a simple Python function and | |||
|
295 | apply it to each element of an array of integers in parallel using the | |||
|
296 | :meth:`MultiEngineClient.map` method: | |||
|
297 | ||||
|
298 | .. sourcecode:: ipython | |||
|
299 | ||||
|
300 | In [1]: from IPython.kernel.client import * | |||
|
301 | ||||
|
302 | In [2]: mec = MultiEngineClient(profile='mycluster') | |||
|
303 | ||||
|
304 | In [3]: mec.get_ids() | |||
|
305 | Out[3]: [0, 1, 2, 3, 4, 5, 67, 8, 9, 10, 11, 12, 13, 14] | |||
|
306 | ||||
|
307 | In [4]: def f(x): | |||
|
308 | ...: return x**10 | |||
|
309 | ||||
|
310 | In [5]: mec.map(f, range(15)) # f is applied in parallel | |||
|
311 | Out[5]: | |||
|
312 | [0, | |||
|
313 | 1, | |||
|
314 | 1024, | |||
|
315 | 59049, | |||
|
316 | 1048576, | |||
|
317 | 9765625, | |||
|
318 | 60466176, | |||
|
319 | 282475249, | |||
|
320 | 1073741824, | |||
|
321 | 3486784401L, | |||
|
322 | 10000000000L, | |||
|
323 | 25937424601L, | |||
|
324 | 61917364224L, | |||
|
325 | 137858491849L, | |||
|
326 | 289254654976L] | |||
|
327 | ||||
|
328 | The :meth:`map` method has the same signature as Python's builtin :func:`map` | |||
|
329 | function, but runs the calculation in parallel. More involved examples of using | |||
|
330 | :class:`MultiEngineClient` are provided in the examples that follow. | |||
|
331 | ||||
|
332 | .. image:: mec_simple.* | |||
|
333 |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
1 | NO CONTENT: new file 100644, binary diff hidden |
|
NO CONTENT: new file 100644, binary diff hidden |
@@ -0,0 +1,14 b'' | |||||
|
1 | ======================================== | |||
|
2 | Using IPython on Windows HPC Server 2008 | |||
|
3 | ======================================== | |||
|
4 | ||||
|
5 | ||||
|
6 | Contents | |||
|
7 | ======== | |||
|
8 | ||||
|
9 | .. toctree:: | |||
|
10 | :maxdepth: 1 | |||
|
11 | ||||
|
12 | parallel_winhpc.txt | |||
|
13 | parallel_demos.txt | |||
|
14 |
@@ -13,7 +13,7 b' c = get_config()' | |||||
13 |
|
13 | |||
14 | # Set this to determine the detail of what is logged at startup. |
|
14 | # Set this to determine the detail of what is logged at startup. | |
15 | # The default is 30 and possible values are 0,10,20,30,40,50. |
|
15 | # The default is 30 and possible values are 0,10,20,30,40,50. | |
16 | c.Global.log_level = 20 |
|
16 | # c.Global.log_level = 20 | |
17 |
|
17 | |||
18 | # This should be a list of importable Python modules that have an |
|
18 | # This should be a list of importable Python modules that have an | |
19 | # load_in_ipython(ip) method. This method gets called when the extension |
|
19 | # load_in_ipython(ip) method. This method gets called when the extension | |
@@ -35,7 +35,7 b' c.Global.log_level = 20' | |||||
35 | # These files are run in IPython in the user's namespace. Files with a .py |
|
35 | # These files are run in IPython in the user's namespace. Files with a .py | |
36 | # extension need to be pure Python. Files with a .ipy extension can have |
|
36 | # extension need to be pure Python. Files with a .ipy extension can have | |
37 | # custom IPython syntax (like magics, etc.). |
|
37 | # custom IPython syntax (like magics, etc.). | |
38 | # These files need to be in the cwd, the ipythondir or be absolute paths. |
|
38 | # These files need to be in the cwd, the ipython_dir or be absolute paths. | |
39 | # c.Global.exec_files = [ |
|
39 | # c.Global.exec_files = [ | |
40 | # 'mycode.py', |
|
40 | # 'mycode.py', | |
41 | # 'fancy.ipy' |
|
41 | # 'fancy.ipy' | |
@@ -71,9 +71,9 b' c.Global.log_level = 20' | |||||
71 |
|
71 | |||
72 | # c.InteractiveShell.logstart = True |
|
72 | # c.InteractiveShell.logstart = True | |
73 |
|
73 | |||
74 | # c.InteractiveShell.logfile = 'ipython_log.py' |
|
74 | # c.InteractiveShell.logfile = u'ipython_log.py' | |
75 |
|
75 | |||
76 | # c.InteractiveShell.logappend = 'mylog.py' |
|
76 | # c.InteractiveShell.logappend = u'mylog.py' | |
77 |
|
77 | |||
78 | # c.InteractiveShell.object_info_string_level = 0 |
|
78 | # c.InteractiveShell.object_info_string_level = 0 | |
79 |
|
79 |
@@ -244,8 +244,14 b' class PyFileConfigLoader(FileConfigLoader):' | |||||
244 | # with the parents. |
|
244 | # with the parents. | |
245 | def load_subconfig(fname): |
|
245 | def load_subconfig(fname): | |
246 | loader = PyFileConfigLoader(fname, self.path) |
|
246 | loader = PyFileConfigLoader(fname, self.path) | |
247 | sub_config = loader.load_config() |
|
247 | try: | |
248 | self.config._merge(sub_config) |
|
248 | sub_config = loader.load_config() | |
|
249 | except IOError: | |||
|
250 | # Pass silently if the sub config is not there. This happens | |||
|
251 | # when a user us using a profile, but not the default config. | |||
|
252 | pass | |||
|
253 | else: | |||
|
254 | self.config._merge(sub_config) | |||
249 |
|
255 | |||
250 | # Again, this needs to be a closure and should be used in config |
|
256 | # Again, this needs to be a closure and should be used in config | |
251 | # files to get the config being loaded. |
|
257 | # files to get the config being loaded. | |
@@ -271,6 +277,7 b' class CommandLineConfigLoader(ConfigLoader):' | |||||
271 | class NoConfigDefault(object): pass |
|
277 | class NoConfigDefault(object): pass | |
272 | NoConfigDefault = NoConfigDefault() |
|
278 | NoConfigDefault = NoConfigDefault() | |
273 |
|
279 | |||
|
280 | ||||
274 | class ArgParseConfigLoader(CommandLineConfigLoader): |
|
281 | class ArgParseConfigLoader(CommandLineConfigLoader): | |
275 |
|
282 | |||
276 | # arguments = [(('-f','--file'),dict(type=str,dest='file'))] |
|
283 | # arguments = [(('-f','--file'),dict(type=str,dest='file'))] |
1 | NO CONTENT: file renamed from IPython/config/profile/__init_.py to IPython/config/profile/__init__.py |
|
NO CONTENT: file renamed from IPython/config/profile/__init_.py to IPython/config/profile/__init__.py |
@@ -1,7 +1,13 b'' | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
3 | """ |
|
3 | """ | |
4 | An application for IPython |
|
4 | An application for IPython. | |
|
5 | ||||
|
6 | All top-level applications should use the classes in this module for | |||
|
7 | handling configuration and creating componenets. | |||
|
8 | ||||
|
9 | The job of an :class:`Application` is to create the master configuration | |||
|
10 | object and then create the components, passing the config to them. | |||
5 |
|
11 | |||
6 | Authors: |
|
12 | Authors: | |
7 |
|
13 | |||
@@ -26,10 +32,9 b' Notes' | |||||
26 | import logging |
|
32 | import logging | |
27 | import os |
|
33 | import os | |
28 | import sys |
|
34 | import sys | |
29 | import traceback |
|
|||
30 | from copy import deepcopy |
|
|||
31 |
|
35 | |||
32 | from IPython.utils.genutils import get_ipython_dir, filefind |
|
36 | from IPython.core import release | |
|
37 | from IPython.utils.genutils import get_ipython_dir | |||
33 | from IPython.config.loader import ( |
|
38 | from IPython.config.loader import ( | |
34 | PyFileConfigLoader, |
|
39 | PyFileConfigLoader, | |
35 | ArgParseConfigLoader, |
|
40 | ArgParseConfigLoader, | |
@@ -42,22 +47,27 b' from IPython.config.loader import (' | |||||
42 | #----------------------------------------------------------------------------- |
|
47 | #----------------------------------------------------------------------------- | |
43 |
|
48 | |||
44 |
|
49 | |||
45 |
class |
|
50 | class BaseAppArgParseConfigLoader(ArgParseConfigLoader): | |
46 | """Default command line options for IPython based applications.""" |
|
51 | """Default command line options for IPython based applications.""" | |
47 |
|
52 | |||
48 | def _add_other_arguments(self): |
|
53 | def _add_other_arguments(self): | |
49 |
self.parser.add_argument('-ipythondir', |
|
54 | self.parser.add_argument('--ipython-dir', | |
50 | help='Set to override default location of Global.ipythondir.', |
|
55 | dest='Global.ipython_dir',type=unicode, | |
|
56 | help='Set to override default location of Global.ipython_dir.', | |||
51 | default=NoConfigDefault, |
|
57 | default=NoConfigDefault, | |
52 | metavar='Global.ipythondir') |
|
58 | metavar='Global.ipython_dir') | |
53 |
self.parser.add_argument('-p','-profile', |
|
59 | self.parser.add_argument('-p', '--profile', | |
|
60 | dest='Global.profile',type=unicode, | |||
54 | help='The string name of the ipython profile to be used.', |
|
61 | help='The string name of the ipython profile to be used.', | |
55 | default=NoConfigDefault, |
|
62 | default=NoConfigDefault, | |
56 | metavar='Global.profile') |
|
63 | metavar='Global.profile') | |
57 |
self.parser.add_argument('- |
|
64 | self.parser.add_argument('--log-level', | |
|
65 | dest="Global.log_level",type=int, | |||
58 | help='Set the log level (0,10,20,30,40,50). Default is 30.', |
|
66 | help='Set the log level (0,10,20,30,40,50). Default is 30.', | |
59 |
default=NoConfigDefault |
|
67 | default=NoConfigDefault, | |
60 | self.parser.add_argument('-config_file',dest='Global.config_file',type=str, |
|
68 | metavar='Global.log_level') | |
|
69 | self.parser.add_argument('--config-file', | |||
|
70 | dest='Global.config_file',type=unicode, | |||
61 | help='Set the config file name to override default.', |
|
71 | help='Set the config file name to override default.', | |
62 | default=NoConfigDefault, |
|
72 | default=NoConfigDefault, | |
63 | metavar='Global.config_file') |
|
73 | metavar='Global.config_file') | |
@@ -68,20 +78,24 b' class ApplicationError(Exception):' | |||||
68 |
|
78 | |||
69 |
|
79 | |||
70 | class Application(object): |
|
80 | class Application(object): | |
71 |
"""Load a config, construct |
|
81 | """Load a config, construct components and set them running.""" | |
72 | """ |
|
|||
73 |
|
82 | |||
74 |
|
|
83 | name = u'ipython' | |
75 | name = 'ipython' |
|
84 | description = 'IPython: an enhanced interactive Python shell.' | |
|
85 | config_file_name = u'ipython_config.py' | |||
|
86 | default_log_level = logging.WARN | |||
76 |
|
87 | |||
77 | def __init__(self): |
|
88 | def __init__(self): | |
|
89 | self._exiting = False | |||
78 | self.init_logger() |
|
90 | self.init_logger() | |
|
91 | # Track the default and actual separately because some messages are | |||
|
92 | # only printed if we aren't using the default. | |||
79 | self.default_config_file_name = self.config_file_name |
|
93 | self.default_config_file_name = self.config_file_name | |
80 |
|
94 | |||
81 | def init_logger(self): |
|
95 | def init_logger(self): | |
82 | self.log = logging.getLogger(self.__class__.__name__) |
|
96 | self.log = logging.getLogger(self.__class__.__name__) | |
83 | # This is used as the default until the command line arguments are read. |
|
97 | # This is used as the default until the command line arguments are read. | |
84 |
self.log.setLevel( |
|
98 | self.log.setLevel(self.default_log_level) | |
85 | self._log_handler = logging.StreamHandler() |
|
99 | self._log_handler = logging.StreamHandler() | |
86 | self._log_formatter = logging.Formatter("[%(name)s] %(message)s") |
|
100 | self._log_formatter = logging.Formatter("[%(name)s] %(message)s") | |
87 | self._log_handler.setFormatter(self._log_formatter) |
|
101 | self._log_handler.setFormatter(self._log_formatter) | |
@@ -98,16 +112,24 b' class Application(object):' | |||||
98 | def start(self): |
|
112 | def start(self): | |
99 | """Start the application.""" |
|
113 | """Start the application.""" | |
100 | self.attempt(self.create_default_config) |
|
114 | self.attempt(self.create_default_config) | |
|
115 | self.log_default_config() | |||
|
116 | self.set_default_config_log_level() | |||
101 | self.attempt(self.pre_load_command_line_config) |
|
117 | self.attempt(self.pre_load_command_line_config) | |
102 | self.attempt(self.load_command_line_config, action='abort') |
|
118 | self.attempt(self.load_command_line_config, action='abort') | |
|
119 | self.set_command_line_config_log_level() | |||
103 | self.attempt(self.post_load_command_line_config) |
|
120 | self.attempt(self.post_load_command_line_config) | |
104 | self.attempt(self.find_ipythondir) |
|
121 | self.log_command_line_config() | |
|
122 | self.attempt(self.find_ipython_dir) | |||
|
123 | self.attempt(self.find_resources) | |||
105 | self.attempt(self.find_config_file_name) |
|
124 | self.attempt(self.find_config_file_name) | |
106 | self.attempt(self.find_config_file_paths) |
|
125 | self.attempt(self.find_config_file_paths) | |
107 | self.attempt(self.pre_load_file_config) |
|
126 | self.attempt(self.pre_load_file_config) | |
108 | self.attempt(self.load_file_config) |
|
127 | self.attempt(self.load_file_config) | |
|
128 | self.set_file_config_log_level() | |||
109 | self.attempt(self.post_load_file_config) |
|
129 | self.attempt(self.post_load_file_config) | |
|
130 | self.log_file_config() | |||
110 | self.attempt(self.merge_configs) |
|
131 | self.attempt(self.merge_configs) | |
|
132 | self.log_master_config() | |||
111 | self.attempt(self.pre_construct) |
|
133 | self.attempt(self.pre_construct) | |
112 | self.attempt(self.construct) |
|
134 | self.attempt(self.construct) | |
113 | self.attempt(self.post_construct) |
|
135 | self.attempt(self.post_construct) | |
@@ -127,65 +149,89 b' class Application(object):' | |||||
127 | don't belong to a particular component. |
|
149 | don't belong to a particular component. | |
128 | """ |
|
150 | """ | |
129 | self.default_config = Config() |
|
151 | self.default_config = Config() | |
130 | self.default_config.Global.ipythondir = get_ipython_dir() |
|
152 | self.default_config.Global.ipython_dir = get_ipython_dir() | |
|
153 | self.default_config.Global.log_level = self.log_level | |||
|
154 | ||||
|
155 | def log_default_config(self): | |||
131 | self.log.debug('Default config loaded:') |
|
156 | self.log.debug('Default config loaded:') | |
132 | self.log.debug(repr(self.default_config)) |
|
157 | self.log.debug(repr(self.default_config)) | |
133 |
|
158 | |||
|
159 | def set_default_config_log_level(self): | |||
|
160 | try: | |||
|
161 | self.log_level = self.default_config.Global.log_level | |||
|
162 | except AttributeError: | |||
|
163 | # Fallback to the default_log_level class attribute | |||
|
164 | pass | |||
|
165 | ||||
134 | def create_command_line_config(self): |
|
166 | def create_command_line_config(self): | |
135 | """Create and return a command line config loader.""" |
|
167 | """Create and return a command line config loader.""" | |
136 |
return |
|
168 | return BaseAppArgParseConfigLoader( | |
|
169 | description=self.description, | |||
|
170 | version=release.version | |||
|
171 | ) | |||
137 |
|
172 | |||
138 | def pre_load_command_line_config(self): |
|
173 | def pre_load_command_line_config(self): | |
139 | """Do actions just before loading the command line config.""" |
|
174 | """Do actions just before loading the command line config.""" | |
140 | pass |
|
175 | pass | |
141 |
|
176 | |||
142 | def load_command_line_config(self): |
|
177 | def load_command_line_config(self): | |
143 | """Load the command line config. |
|
178 | """Load the command line config.""" | |
144 |
|
||||
145 | This method also sets ``self.debug``. |
|
|||
146 | """ |
|
|||
147 |
|
||||
148 | loader = self.create_command_line_config() |
|
179 | loader = self.create_command_line_config() | |
149 | self.command_line_config = loader.load_config() |
|
180 | self.command_line_config = loader.load_config() | |
150 | self.extra_args = loader.get_extra_args() |
|
181 | self.extra_args = loader.get_extra_args() | |
151 |
|
182 | |||
|
183 | def set_command_line_config_log_level(self): | |||
152 | try: |
|
184 | try: | |
153 | self.log_level = self.command_line_config.Global.log_level |
|
185 | self.log_level = self.command_line_config.Global.log_level | |
154 | except AttributeError: |
|
186 | except AttributeError: | |
155 | pass # Use existing value which is set in Application.init_logger. |
|
187 | pass | |
156 | self.log.debug("Command line config loaded:") |
|
|||
157 | self.log.debug(repr(self.command_line_config)) |
|
|||
158 |
|
188 | |||
159 | def post_load_command_line_config(self): |
|
189 | def post_load_command_line_config(self): | |
160 | """Do actions just after loading the command line config.""" |
|
190 | """Do actions just after loading the command line config.""" | |
161 | pass |
|
191 | pass | |
162 |
|
192 | |||
163 |
def |
|
193 | def log_command_line_config(self): | |
|
194 | self.log.debug("Command line config loaded:") | |||
|
195 | self.log.debug(repr(self.command_line_config)) | |||
|
196 | ||||
|
197 | def find_ipython_dir(self): | |||
164 | """Set the IPython directory. |
|
198 | """Set the IPython directory. | |
165 |
|
199 | |||
166 | This sets ``self.ipythondir``, but the actual value that is passed |
|
200 | This sets ``self.ipython_dir``, but the actual value that is passed | |
167 | to the application is kept in either ``self.default_config`` or |
|
201 | to the application is kept in either ``self.default_config`` or | |
168 |
``self.command_line_config``. This also add |
|
202 | ``self.command_line_config``. This also adds ``self.ipython_dir`` to | |
169 | ``sys.path`` so config files there can be references by other config |
|
203 | ``sys.path`` so config files there can be references by other config | |
170 | files. |
|
204 | files. | |
171 | """ |
|
205 | """ | |
172 |
|
206 | |||
173 | try: |
|
207 | try: | |
174 | self.ipythondir = self.command_line_config.Global.ipythondir |
|
208 | self.ipython_dir = self.command_line_config.Global.ipython_dir | |
175 | except AttributeError: |
|
209 | except AttributeError: | |
176 | self.ipythondir = self.default_config.Global.ipythondir |
|
210 | self.ipython_dir = self.default_config.Global.ipython_dir | |
177 | sys.path.append(os.path.abspath(self.ipythondir)) |
|
211 | sys.path.append(os.path.abspath(self.ipython_dir)) | |
178 | if not os.path.isdir(self.ipythondir): |
|
212 | if not os.path.isdir(self.ipython_dir): | |
179 |
os.makedirs(self.ipythondir, mode |
|
213 | os.makedirs(self.ipython_dir, mode=0777) | |
180 | self.log.debug("IPYTHONDIR set to: %s" % self.ipythondir) |
|
214 | self.log.debug("IPYTHON_DIR set to: %s" % self.ipython_dir) | |
|
215 | ||||
|
216 | def find_resources(self): | |||
|
217 | """Find other resources that need to be in place. | |||
|
218 | ||||
|
219 | Things like cluster directories need to be in place to find the | |||
|
220 | config file. These happen right after the IPython directory has | |||
|
221 | been set. | |||
|
222 | """ | |||
|
223 | pass | |||
181 |
|
224 | |||
182 | def find_config_file_name(self): |
|
225 | def find_config_file_name(self): | |
183 | """Find the config file name for this application. |
|
226 | """Find the config file name for this application. | |
184 |
|
227 | |||
|
228 | This must set ``self.config_file_name`` to the filename of the | |||
|
229 | config file to use (just the filename). The search paths for the | |||
|
230 | config file are set in :meth:`find_config_file_paths` and then passed | |||
|
231 | to the config file loader where they are resolved to an absolute path. | |||
|
232 | ||||
185 | If a profile has been set at the command line, this will resolve |
|
233 | If a profile has been set at the command line, this will resolve | |
186 | it. The search paths for the config file are set in |
|
234 | it. | |
187 | :meth:`find_config_file_paths` and then passed to the config file |
|
|||
188 | loader where they are resolved to an absolute path. |
|
|||
189 | """ |
|
235 | """ | |
190 |
|
236 | |||
191 | try: |
|
237 | try: | |
@@ -196,14 +242,18 b' class Application(object):' | |||||
196 | try: |
|
242 | try: | |
197 | self.profile_name = self.command_line_config.Global.profile |
|
243 | self.profile_name = self.command_line_config.Global.profile | |
198 | name_parts = self.config_file_name.split('.') |
|
244 | name_parts = self.config_file_name.split('.') | |
199 | name_parts.insert(1, '_' + self.profile_name + '.') |
|
245 | name_parts.insert(1, u'_' + self.profile_name + u'.') | |
200 | self.config_file_name = ''.join(name_parts) |
|
246 | self.config_file_name = ''.join(name_parts) | |
201 | except AttributeError: |
|
247 | except AttributeError: | |
202 | pass |
|
248 | pass | |
203 |
|
249 | |||
204 | def find_config_file_paths(self): |
|
250 | def find_config_file_paths(self): | |
205 |
"""Set the search paths for resolving the config file. |
|
251 | """Set the search paths for resolving the config file. | |
206 | self.config_file_paths = (os.getcwd(), self.ipythondir) |
|
252 | ||
|
253 | This must set ``self.config_file_paths`` to a sequence of search | |||
|
254 | paths to pass to the config file loader. | |||
|
255 | """ | |||
|
256 | self.config_file_paths = (os.getcwd(), self.ipython_dir) | |||
207 |
|
257 | |||
208 | def pre_load_file_config(self): |
|
258 | def pre_load_file_config(self): | |
209 | """Do actions before the config file is loaded.""" |
|
259 | """Do actions before the config file is loaded.""" | |
@@ -216,7 +266,7 b' class Application(object):' | |||||
216 | ``CONFIG_FILE`` config variable is set to the resolved config file |
|
266 | ``CONFIG_FILE`` config variable is set to the resolved config file | |
217 | location. If not successful, an empty config is used. |
|
267 | location. If not successful, an empty config is used. | |
218 | """ |
|
268 | """ | |
219 |
self.log.debug("Attempting to load config file: |
|
269 | self.log.debug("Attempting to load config file: %s" % self.config_file_name) | |
220 | loader = PyFileConfigLoader(self.config_file_name, |
|
270 | loader = PyFileConfigLoader(self.config_file_name, | |
221 | path=self.config_file_paths) |
|
271 | path=self.config_file_paths) | |
222 | try: |
|
272 | try: | |
@@ -225,19 +275,18 b' class Application(object):' | |||||
225 | except IOError: |
|
275 | except IOError: | |
226 | # Only warn if the default config file was NOT being used. |
|
276 | # Only warn if the default config file was NOT being used. | |
227 | if not self.config_file_name==self.default_config_file_name: |
|
277 | if not self.config_file_name==self.default_config_file_name: | |
228 |
self.log.warn("Config file not found, skipping: |
|
278 | self.log.warn("Config file not found, skipping: %s" % \ | |
229 | self.config_file_name, exc_info=True) |
|
279 | self.config_file_name, exc_info=True) | |
230 | self.file_config = Config() |
|
280 | self.file_config = Config() | |
231 | except: |
|
281 | except: | |
232 |
self.log.warn("Error loading config file: |
|
282 | self.log.warn("Error loading config file: %s" % \ | |
233 |
|
|
283 | self.config_file_name, exc_info=True) | |
234 | self.file_config = Config() |
|
284 | self.file_config = Config() | |
235 | else: |
|
285 | ||
236 | self.log.debug("Config file loaded: <%s>" % loader.full_filename) |
|
286 | def set_file_config_log_level(self): | |
237 | self.log.debug(repr(self.file_config)) |
|
|||
238 | # We need to keeep self.log_level updated. But we only use the value |
|
287 | # We need to keeep self.log_level updated. But we only use the value | |
239 | # of the file_config if a value was not specified at the command |
|
288 | # of the file_config if a value was not specified at the command | |
240 | # line. |
|
289 | # line, because the command line overrides everything. | |
241 | if not hasattr(self.command_line_config.Global, 'log_level'): |
|
290 | if not hasattr(self.command_line_config.Global, 'log_level'): | |
242 | try: |
|
291 | try: | |
243 | self.log_level = self.file_config.Global.log_level |
|
292 | self.log_level = self.file_config.Global.log_level | |
@@ -248,6 +297,11 b' class Application(object):' | |||||
248 | """Do actions after the config file is loaded.""" |
|
297 | """Do actions after the config file is loaded.""" | |
249 | pass |
|
298 | pass | |
250 |
|
299 | |||
|
300 | def log_file_config(self): | |||
|
301 | if hasattr(self.file_config.Global, 'config_file'): | |||
|
302 | self.log.debug("Config file loaded: %s" % self.file_config.Global.config_file) | |||
|
303 | self.log.debug(repr(self.file_config)) | |||
|
304 | ||||
251 | def merge_configs(self): |
|
305 | def merge_configs(self): | |
252 | """Merge the default, command line and file config objects.""" |
|
306 | """Merge the default, command line and file config objects.""" | |
253 | config = Config() |
|
307 | config = Config() | |
@@ -255,6 +309,8 b' class Application(object):' | |||||
255 | config._merge(self.file_config) |
|
309 | config._merge(self.file_config) | |
256 | config._merge(self.command_line_config) |
|
310 | config._merge(self.command_line_config) | |
257 | self.master_config = config |
|
311 | self.master_config = config | |
|
312 | ||||
|
313 | def log_master_config(self): | |||
258 | self.log.debug("Master config created:") |
|
314 | self.log.debug("Master config created:") | |
259 | self.log.debug(repr(self.master_config)) |
|
315 | self.log.debug(repr(self.master_config)) | |
260 |
|
316 | |||
@@ -280,21 +336,29 b' class Application(object):' | |||||
280 |
|
336 | |||
281 | def abort(self): |
|
337 | def abort(self): | |
282 | """Abort the starting of the application.""" |
|
338 | """Abort the starting of the application.""" | |
283 | self.log.critical("Aborting application: %s" % self.name, exc_info=True) |
|
339 | if self._exiting: | |
284 | sys.exit(1) |
|
340 | pass | |
|
341 | else: | |||
|
342 | self.log.critical("Aborting application: %s" % self.name, exc_info=True) | |||
|
343 | self._exiting = True | |||
|
344 | sys.exit(1) | |||
285 |
|
345 | |||
286 | def exit(self): |
|
346 | def exit(self, exit_status=0): | |
287 | self.log.critical("Aborting application: %s" % self.name) |
|
347 | if self._exiting: | |
288 | sys.exit(1) |
|
348 | pass | |
|
349 | else: | |||
|
350 | self.log.debug("Exiting application: %s" % self.name) | |||
|
351 | self._exiting = True | |||
|
352 | sys.exit(exit_status) | |||
289 |
|
353 | |||
290 | def attempt(self, func, action='abort'): |
|
354 | def attempt(self, func, action='abort'): | |
291 | try: |
|
355 | try: | |
292 | func() |
|
356 | func() | |
293 | except SystemExit: |
|
357 | except SystemExit: | |
294 |
se |
|
358 | raise | |
295 | except: |
|
359 | except: | |
296 | if action == 'abort': |
|
360 | if action == 'abort': | |
297 | self.abort() |
|
361 | self.abort() | |
298 | elif action == 'exit': |
|
362 | elif action == 'exit': | |
299 | self.exit() |
|
363 | self.exit(0) | |
300 | No newline at end of file |
|
364 |
@@ -86,6 +86,7 b' class BuiltinTrap(Component):' | |||||
86 | """Store ipython references in the __builtin__ namespace.""" |
|
86 | """Store ipython references in the __builtin__ namespace.""" | |
87 | self.add_builtin('exit', Quitter(self.shell, 'exit')) |
|
87 | self.add_builtin('exit', Quitter(self.shell, 'exit')) | |
88 | self.add_builtin('quit', Quitter(self.shell, 'quit')) |
|
88 | self.add_builtin('quit', Quitter(self.shell, 'quit')) | |
|
89 | self.add_builtin('get_ipython', self.shell.get_ipython) | |||
89 |
|
90 | |||
90 | # Recursive reload function |
|
91 | # Recursive reload function | |
91 | try: |
|
92 | try: |
@@ -237,14 +237,20 b' class Component(HasTraitlets):' | |||||
237 | self.config = config |
|
237 | self.config = config | |
238 | # We used to deepcopy, but for now we are trying to just save |
|
238 | # We used to deepcopy, but for now we are trying to just save | |
239 | # by reference. This *could* have side effects as all components |
|
239 | # by reference. This *could* have side effects as all components | |
240 | # will share config. |
|
240 | # will share config. In fact, I did find such a side effect in | |
|
241 | # _config_changed below. If a config attribute value was a mutable type | |||
|
242 | # all instances of a component were getting the same copy, effectively | |||
|
243 | # making that a class attribute. | |||
241 | # self.config = deepcopy(config) |
|
244 | # self.config = deepcopy(config) | |
242 | else: |
|
245 | else: | |
243 | if self.parent is not None: |
|
246 | if self.parent is not None: | |
244 | self.config = self.parent.config |
|
247 | self.config = self.parent.config | |
245 | # We used to deepcopy, but for now we are trying to just save |
|
248 | # We used to deepcopy, but for now we are trying to just save | |
246 | # by reference. This *could* have side effects as all components |
|
249 | # by reference. This *could* have side effects as all components | |
247 | # will share config. |
|
250 | # will share config. In fact, I did find such a side effect in | |
|
251 | # _config_changed below. If a config attribute value was a mutable type | |||
|
252 | # all instances of a component were getting the same copy, effectively | |||
|
253 | # making that a class attribute. | |||
248 | # self.config = deepcopy(self.parent.config) |
|
254 | # self.config = deepcopy(self.parent.config) | |
249 |
|
255 | |||
250 | self.created = datetime.datetime.now() |
|
256 | self.created = datetime.datetime.now() | |
@@ -296,14 +302,29 b' class Component(HasTraitlets):' | |||||
296 | if new._has_section(sname): |
|
302 | if new._has_section(sname): | |
297 | my_config = new[sname] |
|
303 | my_config = new[sname] | |
298 | for k, v in traitlets.items(): |
|
304 | for k, v in traitlets.items(): | |
|
305 | # Don't allow traitlets with config=True to start with | |||
|
306 | # uppercase. Otherwise, they are confused with Config | |||
|
307 | # subsections. But, developers shouldn't have uppercase | |||
|
308 | # attributes anyways! (PEP 6) | |||
|
309 | if k[0].upper()==k[0] and not k.startswith('_'): | |||
|
310 | raise ComponentError('Component traitlets with ' | |||
|
311 | 'config=True must start with a lowercase so they are ' | |||
|
312 | 'not confused with Config subsections: %s.%s' % \ | |||
|
313 | (self.__class__.__name__, k)) | |||
299 | try: |
|
314 | try: | |
|
315 | # Here we grab the value from the config | |||
|
316 | # If k has the naming convention of a config | |||
|
317 | # section, it will be auto created. | |||
300 | config_value = my_config[k] |
|
318 | config_value = my_config[k] | |
301 | except KeyError: |
|
319 | except KeyError: | |
302 | pass |
|
320 | pass | |
303 | else: |
|
321 | else: | |
304 | # print "Setting %s.%s from %s.%s=%r" % \ |
|
322 | # print "Setting %s.%s from %s.%s=%r" % \ | |
305 | # (self.__class__.__name__,k,sname,k,config_value) |
|
323 | # (self.__class__.__name__,k,sname,k,config_value) | |
306 | setattr(self, k, config_value) |
|
324 | # We have to do a deepcopy here if we don't deepcopy the entire | |
|
325 | # config object. If we don't, a mutable config_value will be | |||
|
326 | # shared by all instances, effectively making it a class attribute. | |||
|
327 | setattr(self, k, deepcopy(config_value)) | |||
307 |
|
328 | |||
308 | @property |
|
329 | @property | |
309 | def children(self): |
|
330 | def children(self): |
@@ -124,7 +124,7 b' $self.bug_tracker' | |||||
124 | #color_scheme = 'Linux' # dbg |
|
124 | #color_scheme = 'Linux' # dbg | |
125 |
|
125 | |||
126 | try: |
|
126 | try: | |
127 |
rptdir = self.IP. |
|
127 | rptdir = self.IP.ipython_dir | |
128 | except: |
|
128 | except: | |
129 | rptdir = os.getcwd() |
|
129 | rptdir = os.getcwd() | |
130 | if not os.path.isdir(rptdir): |
|
130 | if not os.path.isdir(rptdir): |
@@ -70,6 +70,7 b' def BdbQuit_excepthook(et,ev,tb):' | |||||
70 | def BdbQuit_IPython_excepthook(self,et,ev,tb): |
|
70 | def BdbQuit_IPython_excepthook(self,et,ev,tb): | |
71 | print 'Exiting Debugger.' |
|
71 | print 'Exiting Debugger.' | |
72 |
|
72 | |||
|
73 | ||||
73 | class Tracer(object): |
|
74 | class Tracer(object): | |
74 | """Class for local debugging, similar to pdb.set_trace. |
|
75 | """Class for local debugging, similar to pdb.set_trace. | |
75 |
|
76 | |||
@@ -105,12 +106,10 b' class Tracer(object):' | |||||
105 | from the Python standard library for usage details. |
|
106 | from the Python standard library for usage details. | |
106 | """ |
|
107 | """ | |
107 |
|
108 | |||
108 | global __IPYTHON__ |
|
|||
109 | try: |
|
109 | try: | |
110 | __IPYTHON__ |
|
110 | ip = ipapi.get() | |
111 |
except |
|
111 | except: | |
112 | # Outside of ipython, we set our own exception hook manually |
|
112 | # Outside of ipython, we set our own exception hook manually | |
113 | __IPYTHON__ = ipapi.get() |
|
|||
114 | BdbQuit_excepthook.excepthook_ori = sys.excepthook |
|
113 | BdbQuit_excepthook.excepthook_ori = sys.excepthook | |
115 | sys.excepthook = BdbQuit_excepthook |
|
114 | sys.excepthook = BdbQuit_excepthook | |
116 | def_colors = 'NoColor' |
|
115 | def_colors = 'NoColor' | |
@@ -122,9 +121,8 b' class Tracer(object):' | |||||
122 | pass |
|
121 | pass | |
123 | else: |
|
122 | else: | |
124 | # In ipython, we use its custom exception handler mechanism |
|
123 | # In ipython, we use its custom exception handler mechanism | |
125 | ip = ipapi.get() |
|
|||
126 | def_colors = ip.colors |
|
124 | def_colors = ip.colors | |
127 | ip.set_custom_exc((bdb.BdbQuit,),BdbQuit_IPython_excepthook) |
|
125 | ip.set_custom_exc((bdb.BdbQuit,), BdbQuit_IPython_excepthook) | |
128 |
|
126 | |||
129 | if colors is None: |
|
127 | if colors is None: | |
130 | colors = def_colors |
|
128 | colors = def_colors | |
@@ -138,6 +136,7 b' class Tracer(object):' | |||||
138 |
|
136 | |||
139 | self.debugger.set_trace(sys._getframe().f_back) |
|
137 | self.debugger.set_trace(sys._getframe().f_back) | |
140 |
|
138 | |||
|
139 | ||||
141 | def decorate_fn_with_doc(new_fn, old_fn, additional_text=""): |
|
140 | def decorate_fn_with_doc(new_fn, old_fn, additional_text=""): | |
142 | """Make new_fn have old_fn's doc string. This is particularly useful |
|
141 | """Make new_fn have old_fn's doc string. This is particularly useful | |
143 | for the do_... commands that hook into the help system. |
|
142 | for the do_... commands that hook into the help system. | |
@@ -149,6 +148,7 b' def decorate_fn_with_doc(new_fn, old_fn, additional_text=""):' | |||||
149 | wrapper.__doc__ = old_fn.__doc__ + additional_text |
|
148 | wrapper.__doc__ = old_fn.__doc__ + additional_text | |
150 | return wrapper |
|
149 | return wrapper | |
151 |
|
150 | |||
|
151 | ||||
152 | def _file_lines(fname): |
|
152 | def _file_lines(fname): | |
153 | """Return the contents of a named file as a list of lines. |
|
153 | """Return the contents of a named file as a list of lines. | |
154 |
|
154 | |||
@@ -164,143 +164,98 b' def _file_lines(fname):' | |||||
164 | outfile.close() |
|
164 | outfile.close() | |
165 | return out |
|
165 | return out | |
166 |
|
166 | |||
|
167 | ||||
167 | class Pdb(OldPdb): |
|
168 | class Pdb(OldPdb): | |
168 | """Modified Pdb class, does not load readline.""" |
|
169 | """Modified Pdb class, does not load readline.""" | |
169 |
|
170 | |||
170 | if sys.version[:3] >= '2.5' or has_pydb: |
|
171 | def __init__(self,color_scheme='NoColor',completekey=None, | |
171 | def __init__(self,color_scheme='NoColor',completekey=None, |
|
172 | stdin=None, stdout=None): | |
172 | stdin=None, stdout=None): |
|
|||
173 |
|
173 | |||
174 |
|
|
174 | # Parent constructor: | |
175 |
|
|
175 | if has_pydb and completekey is None: | |
176 |
|
|
176 | OldPdb.__init__(self,stdin=stdin,stdout=Term.cout) | |
177 |
|
|
177 | else: | |
178 |
|
|
178 | OldPdb.__init__(self,completekey,stdin,stdout) | |
179 |
|
||||
180 | self.prompt = prompt # The default prompt is '(Pdb)' |
|
|||
181 |
|
179 | |||
182 | # IPython changes... |
|
180 | self.prompt = prompt # The default prompt is '(Pdb)' | |
183 | self.is_pydb = has_pydb |
|
181 | ||
184 |
|
182 | # IPython changes... | ||
185 |
|
|
183 | self.is_pydb = has_pydb | |
186 |
|
||||
187 | # iplib.py's ipalias seems to want pdb's checkline |
|
|||
188 | # which located in pydb.fn |
|
|||
189 | import pydb.fns |
|
|||
190 | self.checkline = lambda filename, lineno: \ |
|
|||
191 | pydb.fns.checkline(self, filename, lineno) |
|
|||
192 |
|
||||
193 | self.curframe = None |
|
|||
194 | self.do_restart = self.new_do_restart |
|
|||
195 |
|
||||
196 | self.old_all_completions = __IPYTHON__.Completer.all_completions |
|
|||
197 | __IPYTHON__.Completer.all_completions=self.all_completions |
|
|||
198 |
|
||||
199 | self.do_list = decorate_fn_with_doc(self.list_command_pydb, |
|
|||
200 | OldPdb.do_list) |
|
|||
201 | self.do_l = self.do_list |
|
|||
202 | self.do_frame = decorate_fn_with_doc(self.new_do_frame, |
|
|||
203 | OldPdb.do_frame) |
|
|||
204 |
|
||||
205 | self.aliases = {} |
|
|||
206 |
|
||||
207 | # Create color table: we copy the default one from the traceback |
|
|||
208 | # module and add a few attributes needed for debugging |
|
|||
209 | self.color_scheme_table = exception_colors() |
|
|||
210 |
|
184 | |||
211 | # shorthands |
|
185 | self.shell = ipapi.get() | |
212 | C = coloransi.TermColors |
|
|||
213 | cst = self.color_scheme_table |
|
|||
214 |
|
186 | |||
215 | cst['NoColor'].colors.breakpoint_enabled = C.NoColor |
|
187 | if self.is_pydb: | |
216 | cst['NoColor'].colors.breakpoint_disabled = C.NoColor |
|
|||
217 |
|
188 | |||
218 | cst['Linux'].colors.breakpoint_enabled = C.LightRed |
|
189 | # iplib.py's ipalias seems to want pdb's checkline | |
219 | cst['Linux'].colors.breakpoint_disabled = C.Red |
|
190 | # which located in pydb.fn | |
|
191 | import pydb.fns | |||
|
192 | self.checkline = lambda filename, lineno: \ | |||
|
193 | pydb.fns.checkline(self, filename, lineno) | |||
220 |
|
194 | |||
221 | cst['LightBG'].colors.breakpoint_enabled = C.LightRed |
|
195 | self.curframe = None | |
222 | cst['LightBG'].colors.breakpoint_disabled = C.Red |
|
196 | self.do_restart = self.new_do_restart | |
223 |
|
197 | |||
224 | self.set_colors(color_scheme) |
|
198 | self.old_all_completions = self.shell.Completer.all_completions | |
|
199 | self.shell.Completer.all_completions=self.all_completions | |||
225 |
|
200 | |||
226 | # Add a python parser so we can syntax highlight source while |
|
201 | self.do_list = decorate_fn_with_doc(self.list_command_pydb, | |
227 | # debugging. |
|
202 | OldPdb.do_list) | |
228 | self.parser = PyColorize.Parser() |
|
203 | self.do_l = self.do_list | |
|
204 | self.do_frame = decorate_fn_with_doc(self.new_do_frame, | |||
|
205 | OldPdb.do_frame) | |||
229 |
|
206 | |||
|
207 | self.aliases = {} | |||
230 |
|
208 | |||
231 | else: |
|
209 | # Create color table: we copy the default one from the traceback | |
232 | # Ugly hack: for Python 2.3-2.4, we can't call the parent constructor, |
|
210 | # module and add a few attributes needed for debugging | |
233 | # because it binds readline and breaks tab-completion. This means we |
|
211 | self.color_scheme_table = exception_colors() | |
234 | # have to COPY the constructor here. |
|
|||
235 | def __init__(self,color_scheme='NoColor'): |
|
|||
236 | bdb.Bdb.__init__(self) |
|
|||
237 | cmd.Cmd.__init__(self,completekey=None) # don't load readline |
|
|||
238 | self.prompt = 'ipdb> ' # The default prompt is '(Pdb)' |
|
|||
239 | self.aliases = {} |
|
|||
240 |
|
||||
241 | # These two lines are part of the py2.4 constructor, let's put them |
|
|||
242 | # unconditionally here as they won't cause any problems in 2.3. |
|
|||
243 | self.mainpyfile = '' |
|
|||
244 | self._wait_for_mainpyfile = 0 |
|
|||
245 |
|
||||
246 | # Read $HOME/.pdbrc and ./.pdbrc |
|
|||
247 | try: |
|
|||
248 | self.rcLines = _file_lines(os.path.join(os.environ['HOME'], |
|
|||
249 | ".pdbrc")) |
|
|||
250 | except KeyError: |
|
|||
251 | self.rcLines = [] |
|
|||
252 | self.rcLines.extend(_file_lines(".pdbrc")) |
|
|||
253 |
|
212 | |||
254 | # Create color table: we copy the default one from the traceback |
|
213 | # shorthands | |
255 | # module and add a few attributes needed for debugging |
|
214 | C = coloransi.TermColors | |
256 |
|
|
215 | cst = self.color_scheme_table | |
257 |
|
216 | |||
258 | # shorthands |
|
217 | cst['NoColor'].colors.breakpoint_enabled = C.NoColor | |
259 | C = coloransi.TermColors |
|
218 | cst['NoColor'].colors.breakpoint_disabled = C.NoColor | |
260 | cst = self.color_scheme_table |
|
|||
261 |
|
219 | |||
262 |
|
|
220 | cst['Linux'].colors.breakpoint_enabled = C.LightRed | |
263 |
|
|
221 | cst['Linux'].colors.breakpoint_disabled = C.Red | |
264 |
|
222 | |||
265 |
|
|
223 | cst['LightBG'].colors.breakpoint_enabled = C.LightRed | |
266 |
|
|
224 | cst['LightBG'].colors.breakpoint_disabled = C.Red | |
267 |
|
225 | |||
268 | cst['LightBG'].colors.breakpoint_enabled = C.LightRed |
|
226 | self.set_colors(color_scheme) | |
269 | cst['LightBG'].colors.breakpoint_disabled = C.Red |
|
|||
270 |
|
227 | |||
271 | self.set_colors(color_scheme) |
|
228 | # Add a python parser so we can syntax highlight source while | |
|
229 | # debugging. | |||
|
230 | self.parser = PyColorize.Parser() | |||
272 |
|
231 | |||
273 | # Add a python parser so we can syntax highlight source while |
|
|||
274 | # debugging. |
|
|||
275 | self.parser = PyColorize.Parser() |
|
|||
276 |
|
||||
277 | def set_colors(self, scheme): |
|
232 | def set_colors(self, scheme): | |
278 | """Shorthand access to the color table scheme selector method.""" |
|
233 | """Shorthand access to the color table scheme selector method.""" | |
279 | self.color_scheme_table.set_active_scheme(scheme) |
|
234 | self.color_scheme_table.set_active_scheme(scheme) | |
280 |
|
235 | |||
281 | def interaction(self, frame, traceback): |
|
236 | def interaction(self, frame, traceback): | |
282 |
|
|
237 | self.shell.set_completer_frame(frame) | |
283 | OldPdb.interaction(self, frame, traceback) |
|
238 | OldPdb.interaction(self, frame, traceback) | |
284 |
|
239 | |||
285 | def new_do_up(self, arg): |
|
240 | def new_do_up(self, arg): | |
286 | OldPdb.do_up(self, arg) |
|
241 | OldPdb.do_up(self, arg) | |
287 |
|
|
242 | self.shell.set_completer_frame(self.curframe) | |
288 | do_u = do_up = decorate_fn_with_doc(new_do_up, OldPdb.do_up) |
|
243 | do_u = do_up = decorate_fn_with_doc(new_do_up, OldPdb.do_up) | |
289 |
|
244 | |||
290 | def new_do_down(self, arg): |
|
245 | def new_do_down(self, arg): | |
291 | OldPdb.do_down(self, arg) |
|
246 | OldPdb.do_down(self, arg) | |
292 |
|
|
247 | self.shell.set_completer_frame(self.curframe) | |
293 |
|
248 | |||
294 | do_d = do_down = decorate_fn_with_doc(new_do_down, OldPdb.do_down) |
|
249 | do_d = do_down = decorate_fn_with_doc(new_do_down, OldPdb.do_down) | |
295 |
|
250 | |||
296 | def new_do_frame(self, arg): |
|
251 | def new_do_frame(self, arg): | |
297 | OldPdb.do_frame(self, arg) |
|
252 | OldPdb.do_frame(self, arg) | |
298 |
|
|
253 | self.shell.set_completer_frame(self.curframe) | |
299 |
|
254 | |||
300 | def new_do_quit(self, arg): |
|
255 | def new_do_quit(self, arg): | |
301 |
|
256 | |||
302 | if hasattr(self, 'old_all_completions'): |
|
257 | if hasattr(self, 'old_all_completions'): | |
303 |
|
|
258 | self.shell.Completer.all_completions=self.old_all_completions | |
304 |
|
259 | |||
305 |
|
260 | |||
306 | return OldPdb.do_quit(self, arg) |
|
261 | return OldPdb.do_quit(self, arg) | |
@@ -314,7 +269,7 b' class Pdb(OldPdb):' | |||||
314 | return self.do_quit(arg) |
|
269 | return self.do_quit(arg) | |
315 |
|
270 | |||
316 | def postloop(self): |
|
271 | def postloop(self): | |
317 |
|
|
272 | self.shell.set_completer_frame(None) | |
318 |
|
273 | |||
319 | def print_stack_trace(self): |
|
274 | def print_stack_trace(self): | |
320 | try: |
|
275 | try: | |
@@ -331,7 +286,7 b' class Pdb(OldPdb):' | |||||
331 | # vds: >> |
|
286 | # vds: >> | |
332 | frame, lineno = frame_lineno |
|
287 | frame, lineno = frame_lineno | |
333 | filename = frame.f_code.co_filename |
|
288 | filename = frame.f_code.co_filename | |
334 |
|
|
289 | self.shell.hooks.synchronize_with_editor(filename, lineno, 0) | |
335 | # vds: << |
|
290 | # vds: << | |
336 |
|
291 | |||
337 | def format_stack_entry(self, frame_lineno, lprefix=': ', context = 3): |
|
292 | def format_stack_entry(self, frame_lineno, lprefix=': ', context = 3): | |
@@ -500,7 +455,7 b' class Pdb(OldPdb):' | |||||
500 | # vds: >> |
|
455 | # vds: >> | |
501 | lineno = first |
|
456 | lineno = first | |
502 | filename = self.curframe.f_code.co_filename |
|
457 | filename = self.curframe.f_code.co_filename | |
503 |
|
|
458 | self.shell.hooks.synchronize_with_editor(filename, lineno, 0) | |
504 | # vds: << |
|
459 | # vds: << | |
505 |
|
460 | |||
506 | do_l = do_list |
|
461 | do_l = do_list | |
@@ -509,16 +464,16 b' class Pdb(OldPdb):' | |||||
509 | """The debugger interface to magic_pdef""" |
|
464 | """The debugger interface to magic_pdef""" | |
510 | namespaces = [('Locals', self.curframe.f_locals), |
|
465 | namespaces = [('Locals', self.curframe.f_locals), | |
511 | ('Globals', self.curframe.f_globals)] |
|
466 | ('Globals', self.curframe.f_globals)] | |
512 |
|
|
467 | self.shell.magic_pdef(arg, namespaces=namespaces) | |
513 |
|
468 | |||
514 | def do_pdoc(self, arg): |
|
469 | def do_pdoc(self, arg): | |
515 | """The debugger interface to magic_pdoc""" |
|
470 | """The debugger interface to magic_pdoc""" | |
516 | namespaces = [('Locals', self.curframe.f_locals), |
|
471 | namespaces = [('Locals', self.curframe.f_locals), | |
517 | ('Globals', self.curframe.f_globals)] |
|
472 | ('Globals', self.curframe.f_globals)] | |
518 |
|
|
473 | self.shell.magic_pdoc(arg, namespaces=namespaces) | |
519 |
|
474 | |||
520 | def do_pinfo(self, arg): |
|
475 | def do_pinfo(self, arg): | |
521 | """The debugger equivalant of ?obj""" |
|
476 | """The debugger equivalant of ?obj""" | |
522 | namespaces = [('Locals', self.curframe.f_locals), |
|
477 | namespaces = [('Locals', self.curframe.f_locals), | |
523 | ('Globals', self.curframe.f_globals)] |
|
478 | ('Globals', self.curframe.f_globals)] | |
524 |
|
|
479 | self.shell.magic_pinfo("pinfo %s" % arg, namespaces=namespaces) |
@@ -46,11 +46,11 b' class DisplayTrap(Component):' | |||||
46 | # Only turn off the trap when the outermost call to __exit__ is made. |
|
46 | # Only turn off the trap when the outermost call to __exit__ is made. | |
47 | self._nested_level = 0 |
|
47 | self._nested_level = 0 | |
48 |
|
48 | |||
49 | @auto_attr |
|
49 | # @auto_attr | |
50 | def shell(self): |
|
50 | # def shell(self): | |
51 | return Component.get_instances( |
|
51 | # return Component.get_instances( | |
52 | root=self.root, |
|
52 | # root=self.root, | |
53 | klass='IPython.core.iplib.InteractiveShell')[0] |
|
53 | # klass='IPython.core.iplib.InteractiveShell')[0] | |
54 |
|
54 | |||
55 | def __enter__(self): |
|
55 | def __enter__(self): | |
56 | if self._nested_level == 0: |
|
56 | if self._nested_level == 0: |
@@ -68,7 +68,7 b' class InteractiveShellEmbed(InteractiveShell):' | |||||
68 | # is True by default. |
|
68 | # is True by default. | |
69 | display_banner = CBool(True) |
|
69 | display_banner = CBool(True) | |
70 |
|
70 | |||
71 | def __init__(self, parent=None, config=None, ipythondir=None, usage=None, |
|
71 | def __init__(self, parent=None, config=None, ipython_dir=None, usage=None, | |
72 | user_ns=None, user_global_ns=None, |
|
72 | user_ns=None, user_global_ns=None, | |
73 | banner1=None, banner2=None, display_banner=None, |
|
73 | banner1=None, banner2=None, display_banner=None, | |
74 | custom_exceptions=((),None), exit_msg=''): |
|
74 | custom_exceptions=((),None), exit_msg=''): | |
@@ -76,7 +76,7 b' class InteractiveShellEmbed(InteractiveShell):' | |||||
76 | self.save_sys_ipcompleter() |
|
76 | self.save_sys_ipcompleter() | |
77 |
|
77 | |||
78 | super(InteractiveShellEmbed,self).__init__( |
|
78 | super(InteractiveShellEmbed,self).__init__( | |
79 | parent=parent, config=config, ipythondir=ipythondir, usage=usage, |
|
79 | parent=parent, config=config, ipython_dir=ipython_dir, usage=usage, | |
80 | user_ns=user_ns, user_global_ns=user_global_ns, |
|
80 | user_ns=user_ns, user_global_ns=user_global_ns, | |
81 | banner1=banner1, banner2=banner2, display_banner=display_banner, |
|
81 | banner1=banner1, banner2=banner2, display_banner=display_banner, | |
82 | custom_exceptions=custom_exceptions) |
|
82 | custom_exceptions=custom_exceptions) | |
@@ -233,14 +233,6 b' class InteractiveShellEmbed(InteractiveShell):' | |||||
233 | for var in local_varnames: |
|
233 | for var in local_varnames: | |
234 | delvar(var,None) |
|
234 | delvar(var,None) | |
235 |
|
235 | |||
236 | def set_completer_frame(self, frame=None): |
|
|||
237 | if frame: |
|
|||
238 | self.Completer.namespace = frame.f_locals |
|
|||
239 | self.Completer.global_namespace = frame.f_globals |
|
|||
240 | else: |
|
|||
241 | self.Completer.namespace = self.user_ns |
|
|||
242 | self.Completer.global_namespace = self.user_global_ns |
|
|||
243 |
|
||||
244 |
|
236 | |||
245 | _embedded_shell = None |
|
237 | _embedded_shell = None | |
246 |
|
238 |
@@ -18,16 +18,19 b' has been made into a component, this module will be sent to deathrow.' | |||||
18 | # Imports |
|
18 | # Imports | |
19 | #----------------------------------------------------------------------------- |
|
19 | #----------------------------------------------------------------------------- | |
20 |
|
20 | |||
21 | from IPython.core.error import TryNext, UsageError |
|
21 | from IPython.core.error import TryNext, UsageError, IPythonCoreError | |
22 |
|
22 | |||
23 | #----------------------------------------------------------------------------- |
|
23 | #----------------------------------------------------------------------------- | |
24 | # Classes and functions |
|
24 | # Classes and functions | |
25 | #----------------------------------------------------------------------------- |
|
25 | #----------------------------------------------------------------------------- | |
26 |
|
26 | |||
|
27 | ||||
27 | def get(): |
|
28 | def get(): | |
28 | """Get the most recently created InteractiveShell instance.""" |
|
29 | """Get the most recently created InteractiveShell instance.""" | |
29 | from IPython.core.iplib import InteractiveShell |
|
30 | from IPython.core.iplib import InteractiveShell | |
30 | insts = InteractiveShell.get_instances() |
|
31 | insts = InteractiveShell.get_instances() | |
|
32 | if len(insts)==0: | |||
|
33 | return None | |||
31 | most_recent = insts[0] |
|
34 | most_recent = insts[0] | |
32 | for inst in insts[1:]: |
|
35 | for inst in insts[1:]: | |
33 | if inst.created > most_recent.created: |
|
36 | if inst.created > most_recent.created: |
@@ -1,7 +1,8 b'' | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
3 | """ |
|
3 | """ | |
4 | The main IPython application object |
|
4 | The :class:`~IPython.core.application.Application` object for the command | |
|
5 | line :command:`ipython` program. | |||
5 |
|
6 | |||
6 | Authors: |
|
7 | Authors: | |
7 |
|
8 | |||
@@ -28,19 +29,17 b' import os' | |||||
28 | import sys |
|
29 | import sys | |
29 | import warnings |
|
30 | import warnings | |
30 |
|
31 | |||
31 |
from IPython.core.application import Application, |
|
32 | from IPython.core.application import Application, BaseAppArgParseConfigLoader | |
32 | from IPython.core import release |
|
33 | from IPython.core import release | |
33 | from IPython.core.iplib import InteractiveShell |
|
34 | from IPython.core.iplib import InteractiveShell | |
34 | from IPython.config.loader import ( |
|
35 | from IPython.config.loader import ( | |
35 | NoConfigDefault, |
|
36 | NoConfigDefault, | |
36 | Config, |
|
37 | Config, | |
37 | ConfigError, |
|
|||
38 | PyFileConfigLoader |
|
38 | PyFileConfigLoader | |
39 | ) |
|
39 | ) | |
40 |
|
40 | |||
41 | from IPython.lib import inputhook |
|
41 | from IPython.lib import inputhook | |
42 |
|
42 | |||
43 | from IPython.utils.ipstruct import Struct |
|
|||
44 | from IPython.utils.genutils import filefind, get_ipython_dir |
|
43 | from IPython.utils.genutils import filefind, get_ipython_dir | |
45 |
|
44 | |||
46 | #----------------------------------------------------------------------------- |
|
45 | #----------------------------------------------------------------------------- | |
@@ -80,181 +79,181 b' See the %gui magic for information on the new interface.' | |||||
80 | #----------------------------------------------------------------------------- |
|
79 | #----------------------------------------------------------------------------- | |
81 |
|
80 | |||
82 | cl_args = ( |
|
81 | cl_args = ( | |
83 | (('-autocall',), dict( |
|
82 | (('--autocall',), dict( | |
84 | type=int, dest='InteractiveShell.autocall', default=NoConfigDefault, |
|
83 | type=int, dest='InteractiveShell.autocall', default=NoConfigDefault, | |
85 | help='Set the autocall value (0,1,2).', |
|
84 | help='Set the autocall value (0,1,2).', | |
86 | metavar='InteractiveShell.autocall') |
|
85 | metavar='InteractiveShell.autocall') | |
87 | ), |
|
86 | ), | |
88 | (('-autoindent',), dict( |
|
87 | (('--autoindent',), dict( | |
89 | action='store_true', dest='InteractiveShell.autoindent', default=NoConfigDefault, |
|
88 | action='store_true', dest='InteractiveShell.autoindent', default=NoConfigDefault, | |
90 | help='Turn on autoindenting.') |
|
89 | help='Turn on autoindenting.') | |
91 | ), |
|
90 | ), | |
92 | (('-noautoindent',), dict( |
|
91 | (('--no-autoindent',), dict( | |
93 | action='store_false', dest='InteractiveShell.autoindent', default=NoConfigDefault, |
|
92 | action='store_false', dest='InteractiveShell.autoindent', default=NoConfigDefault, | |
94 | help='Turn off autoindenting.') |
|
93 | help='Turn off autoindenting.') | |
95 | ), |
|
94 | ), | |
96 | (('-automagic',), dict( |
|
95 | (('--automagic',), dict( | |
97 | action='store_true', dest='InteractiveShell.automagic', default=NoConfigDefault, |
|
96 | action='store_true', dest='InteractiveShell.automagic', default=NoConfigDefault, | |
98 | help='Turn on the auto calling of magic commands.') |
|
97 | help='Turn on the auto calling of magic commands.') | |
99 | ), |
|
98 | ), | |
100 | (('-noautomagic',), dict( |
|
99 | (('--no-automagic',), dict( | |
101 | action='store_false', dest='InteractiveShell.automagic', default=NoConfigDefault, |
|
100 | action='store_false', dest='InteractiveShell.automagic', default=NoConfigDefault, | |
102 | help='Turn off the auto calling of magic commands.') |
|
101 | help='Turn off the auto calling of magic commands.') | |
103 | ), |
|
102 | ), | |
104 |
(('-autoedit |
|
103 | (('--autoedit-syntax',), dict( | |
105 | action='store_true', dest='InteractiveShell.autoedit_syntax', default=NoConfigDefault, |
|
104 | action='store_true', dest='InteractiveShell.autoedit_syntax', default=NoConfigDefault, | |
106 | help='Turn on auto editing of files with syntax errors.') |
|
105 | help='Turn on auto editing of files with syntax errors.') | |
107 | ), |
|
106 | ), | |
108 |
(('-noautoedit |
|
107 | (('--no-autoedit-syntax',), dict( | |
109 | action='store_false', dest='InteractiveShell.autoedit_syntax', default=NoConfigDefault, |
|
108 | action='store_false', dest='InteractiveShell.autoedit_syntax', default=NoConfigDefault, | |
110 | help='Turn off auto editing of files with syntax errors.') |
|
109 | help='Turn off auto editing of files with syntax errors.') | |
111 | ), |
|
110 | ), | |
112 | (('-banner',), dict( |
|
111 | (('--banner',), dict( | |
113 | action='store_true', dest='Global.display_banner', default=NoConfigDefault, |
|
112 | action='store_true', dest='Global.display_banner', default=NoConfigDefault, | |
114 | help='Display a banner upon starting IPython.') |
|
113 | help='Display a banner upon starting IPython.') | |
115 | ), |
|
114 | ), | |
116 | (('-nobanner',), dict( |
|
115 | (('--no-banner',), dict( | |
117 | action='store_false', dest='Global.display_banner', default=NoConfigDefault, |
|
116 | action='store_false', dest='Global.display_banner', default=NoConfigDefault, | |
118 | help="Don't display a banner upon starting IPython.") |
|
117 | help="Don't display a banner upon starting IPython.") | |
119 | ), |
|
118 | ), | |
120 |
(('-cache |
|
119 | (('--cache-size',), dict( | |
121 | type=int, dest='InteractiveShell.cache_size', default=NoConfigDefault, |
|
120 | type=int, dest='InteractiveShell.cache_size', default=NoConfigDefault, | |
122 | help="Set the size of the output cache.", |
|
121 | help="Set the size of the output cache.", | |
123 | metavar='InteractiveShell.cache_size') |
|
122 | metavar='InteractiveShell.cache_size') | |
124 | ), |
|
123 | ), | |
125 | (('-classic',), dict( |
|
124 | (('--classic',), dict( | |
126 | action='store_true', dest='Global.classic', default=NoConfigDefault, |
|
125 | action='store_true', dest='Global.classic', default=NoConfigDefault, | |
127 | help="Gives IPython a similar feel to the classic Python prompt.") |
|
126 | help="Gives IPython a similar feel to the classic Python prompt.") | |
128 | ), |
|
127 | ), | |
129 | (('-colors',), dict( |
|
128 | (('--colors',), dict( | |
130 | type=str, dest='InteractiveShell.colors', default=NoConfigDefault, |
|
129 | type=str, dest='InteractiveShell.colors', default=NoConfigDefault, | |
131 | help="Set the color scheme (NoColor, Linux, and LightBG).", |
|
130 | help="Set the color scheme (NoColor, Linux, and LightBG).", | |
132 | metavar='InteractiveShell.colors') |
|
131 | metavar='InteractiveShell.colors') | |
133 | ), |
|
132 | ), | |
134 |
(('-color |
|
133 | (('--color-info',), dict( | |
135 | action='store_true', dest='InteractiveShell.color_info', default=NoConfigDefault, |
|
134 | action='store_true', dest='InteractiveShell.color_info', default=NoConfigDefault, | |
136 | help="Enable using colors for info related things.") |
|
135 | help="Enable using colors for info related things.") | |
137 | ), |
|
136 | ), | |
138 |
(('- |
|
137 | (('--no-color-info',), dict( | |
139 | action='store_false', dest='InteractiveShell.color_info', default=NoConfigDefault, |
|
138 | action='store_false', dest='InteractiveShell.color_info', default=NoConfigDefault, | |
140 | help="Disable using colors for info related things.") |
|
139 | help="Disable using colors for info related things.") | |
141 | ), |
|
140 | ), | |
142 |
(('-confirm |
|
141 | (('--confirm-exit',), dict( | |
143 | action='store_true', dest='InteractiveShell.confirm_exit', default=NoConfigDefault, |
|
142 | action='store_true', dest='InteractiveShell.confirm_exit', default=NoConfigDefault, | |
144 | help="Prompt the user when existing.") |
|
143 | help="Prompt the user when existing.") | |
145 | ), |
|
144 | ), | |
146 |
(('-noconfirm |
|
145 | (('--no-confirm-exit',), dict( | |
147 | action='store_false', dest='InteractiveShell.confirm_exit', default=NoConfigDefault, |
|
146 | action='store_false', dest='InteractiveShell.confirm_exit', default=NoConfigDefault, | |
148 | help="Don't prompt the user when existing.") |
|
147 | help="Don't prompt the user when existing.") | |
149 | ), |
|
148 | ), | |
150 |
(('- |
|
149 | (('--deep-reload',), dict( | |
151 | action='store_true', dest='InteractiveShell.deep_reload', default=NoConfigDefault, |
|
150 | action='store_true', dest='InteractiveShell.deep_reload', default=NoConfigDefault, | |
152 | help="Enable deep (recursive) reloading by default.") |
|
151 | help="Enable deep (recursive) reloading by default.") | |
153 | ), |
|
152 | ), | |
154 |
(('- |
|
153 | (('--no-deep-reload',), dict( | |
155 | action='store_false', dest='InteractiveShell.deep_reload', default=NoConfigDefault, |
|
154 | action='store_false', dest='InteractiveShell.deep_reload', default=NoConfigDefault, | |
156 | help="Disable deep (recursive) reloading by default.") |
|
155 | help="Disable deep (recursive) reloading by default.") | |
157 | ), |
|
156 | ), | |
158 | (('-editor',), dict( |
|
157 | (('--editor',), dict( | |
159 | type=str, dest='InteractiveShell.editor', default=NoConfigDefault, |
|
158 | type=str, dest='InteractiveShell.editor', default=NoConfigDefault, | |
160 | help="Set the editor used by IPython (default to $EDITOR/vi/notepad).", |
|
159 | help="Set the editor used by IPython (default to $EDITOR/vi/notepad).", | |
161 | metavar='InteractiveShell.editor') |
|
160 | metavar='InteractiveShell.editor') | |
162 | ), |
|
161 | ), | |
163 | (('-log','-l'), dict( |
|
162 | (('--log','-l'), dict( | |
164 | action='store_true', dest='InteractiveShell.logstart', default=NoConfigDefault, |
|
163 | action='store_true', dest='InteractiveShell.logstart', default=NoConfigDefault, | |
165 | help="Start logging to the default file (./ipython_log.py).") |
|
164 | help="Start logging to the default file (./ipython_log.py).") | |
166 | ), |
|
165 | ), | |
167 | (('-logfile','-lf'), dict( |
|
166 | (('--logfile','-lf'), dict( | |
168 |
type= |
|
167 | type=unicode, dest='InteractiveShell.logfile', default=NoConfigDefault, | |
169 | help="Start logging to logfile.", |
|
168 | help="Start logging to logfile.", | |
170 | metavar='InteractiveShell.logfile') |
|
169 | metavar='InteractiveShell.logfile') | |
171 | ), |
|
170 | ), | |
172 | (('-logappend','-la'), dict( |
|
171 | (('--log-append','-la'), dict( | |
173 |
type= |
|
172 | type=unicode, dest='InteractiveShell.logappend', default=NoConfigDefault, | |
174 |
help="Start logging to |
|
173 | help="Start logging to the give file in append mode.", | |
175 | metavar='InteractiveShell.logfile') |
|
174 | metavar='InteractiveShell.logfile') | |
176 | ), |
|
175 | ), | |
177 | (('-pdb',), dict( |
|
176 | (('--pdb',), dict( | |
178 | action='store_true', dest='InteractiveShell.pdb', default=NoConfigDefault, |
|
177 | action='store_true', dest='InteractiveShell.pdb', default=NoConfigDefault, | |
179 | help="Enable auto calling the pdb debugger after every exception.") |
|
178 | help="Enable auto calling the pdb debugger after every exception.") | |
180 | ), |
|
179 | ), | |
181 | (('-nopdb',), dict( |
|
180 | (('--no-pdb',), dict( | |
182 | action='store_false', dest='InteractiveShell.pdb', default=NoConfigDefault, |
|
181 | action='store_false', dest='InteractiveShell.pdb', default=NoConfigDefault, | |
183 | help="Disable auto calling the pdb debugger after every exception.") |
|
182 | help="Disable auto calling the pdb debugger after every exception.") | |
184 | ), |
|
183 | ), | |
185 | (('-pprint',), dict( |
|
184 | (('--pprint',), dict( | |
186 | action='store_true', dest='InteractiveShell.pprint', default=NoConfigDefault, |
|
185 | action='store_true', dest='InteractiveShell.pprint', default=NoConfigDefault, | |
187 | help="Enable auto pretty printing of results.") |
|
186 | help="Enable auto pretty printing of results.") | |
188 | ), |
|
187 | ), | |
189 | (('-nopprint',), dict( |
|
188 | (('--no-pprint',), dict( | |
190 | action='store_false', dest='InteractiveShell.pprint', default=NoConfigDefault, |
|
189 | action='store_false', dest='InteractiveShell.pprint', default=NoConfigDefault, | |
191 | help="Disable auto auto pretty printing of results.") |
|
190 | help="Disable auto auto pretty printing of results.") | |
192 | ), |
|
191 | ), | |
193 |
(('-prompt |
|
192 | (('--prompt-in1','-pi1'), dict( | |
194 | type=str, dest='InteractiveShell.prompt_in1', default=NoConfigDefault, |
|
193 | type=str, dest='InteractiveShell.prompt_in1', default=NoConfigDefault, | |
195 | help="Set the main input prompt ('In [\#]: ')", |
|
194 | help="Set the main input prompt ('In [\#]: ')", | |
196 | metavar='InteractiveShell.prompt_in1') |
|
195 | metavar='InteractiveShell.prompt_in1') | |
197 | ), |
|
196 | ), | |
198 |
(('-prompt |
|
197 | (('--prompt-in2','-pi2'), dict( | |
199 | type=str, dest='InteractiveShell.prompt_in2', default=NoConfigDefault, |
|
198 | type=str, dest='InteractiveShell.prompt_in2', default=NoConfigDefault, | |
200 | help="Set the secondary input prompt (' .\D.: ')", |
|
199 | help="Set the secondary input prompt (' .\D.: ')", | |
201 | metavar='InteractiveShell.prompt_in2') |
|
200 | metavar='InteractiveShell.prompt_in2') | |
202 | ), |
|
201 | ), | |
203 |
(('-prompt |
|
202 | (('--prompt-out','-po'), dict( | |
204 | type=str, dest='InteractiveShell.prompt_out', default=NoConfigDefault, |
|
203 | type=str, dest='InteractiveShell.prompt_out', default=NoConfigDefault, | |
205 | help="Set the output prompt ('Out[\#]:')", |
|
204 | help="Set the output prompt ('Out[\#]:')", | |
206 | metavar='InteractiveShell.prompt_out') |
|
205 | metavar='InteractiveShell.prompt_out') | |
207 | ), |
|
206 | ), | |
208 | (('-quick',), dict( |
|
207 | (('--quick',), dict( | |
209 | action='store_true', dest='Global.quick', default=NoConfigDefault, |
|
208 | action='store_true', dest='Global.quick', default=NoConfigDefault, | |
210 | help="Enable quick startup with no config files.") |
|
209 | help="Enable quick startup with no config files.") | |
211 | ), |
|
210 | ), | |
212 | (('-readline',), dict( |
|
211 | (('--readline',), dict( | |
213 | action='store_true', dest='InteractiveShell.readline_use', default=NoConfigDefault, |
|
212 | action='store_true', dest='InteractiveShell.readline_use', default=NoConfigDefault, | |
214 | help="Enable readline for command line usage.") |
|
213 | help="Enable readline for command line usage.") | |
215 | ), |
|
214 | ), | |
216 | (('-noreadline',), dict( |
|
215 | (('--no-readline',), dict( | |
217 | action='store_false', dest='InteractiveShell.readline_use', default=NoConfigDefault, |
|
216 | action='store_false', dest='InteractiveShell.readline_use', default=NoConfigDefault, | |
218 | help="Disable readline for command line usage.") |
|
217 | help="Disable readline for command line usage.") | |
219 | ), |
|
218 | ), | |
220 |
(('-screen |
|
219 | (('--screen-length','-sl'), dict( | |
221 | type=int, dest='InteractiveShell.screen_length', default=NoConfigDefault, |
|
220 | type=int, dest='InteractiveShell.screen_length', default=NoConfigDefault, | |
222 | help='Number of lines on screen, used to control printing of long strings.', |
|
221 | help='Number of lines on screen, used to control printing of long strings.', | |
223 | metavar='InteractiveShell.screen_length') |
|
222 | metavar='InteractiveShell.screen_length') | |
224 | ), |
|
223 | ), | |
225 |
(('-separate |
|
224 | (('--separate-in','-si'), dict( | |
226 | type=str, dest='InteractiveShell.separate_in', default=NoConfigDefault, |
|
225 | type=str, dest='InteractiveShell.separate_in', default=NoConfigDefault, | |
227 | help="Separator before input prompts. Default '\n'.", |
|
226 | help="Separator before input prompts. Default '\n'.", | |
228 | metavar='InteractiveShell.separate_in') |
|
227 | metavar='InteractiveShell.separate_in') | |
229 | ), |
|
228 | ), | |
230 |
(('-separate |
|
229 | (('--separate-out','-so'), dict( | |
231 | type=str, dest='InteractiveShell.separate_out', default=NoConfigDefault, |
|
230 | type=str, dest='InteractiveShell.separate_out', default=NoConfigDefault, | |
232 | help="Separator before output prompts. Default 0 (nothing).", |
|
231 | help="Separator before output prompts. Default 0 (nothing).", | |
233 | metavar='InteractiveShell.separate_out') |
|
232 | metavar='InteractiveShell.separate_out') | |
234 | ), |
|
233 | ), | |
235 |
(('-separate |
|
234 | (('--separate-out2','-so2'), dict( | |
236 | type=str, dest='InteractiveShell.separate_out2', default=NoConfigDefault, |
|
235 | type=str, dest='InteractiveShell.separate_out2', default=NoConfigDefault, | |
237 | help="Separator after output prompts. Default 0 (nonight).", |
|
236 | help="Separator after output prompts. Default 0 (nonight).", | |
238 | metavar='InteractiveShell.separate_out2') |
|
237 | metavar='InteractiveShell.separate_out2') | |
239 | ), |
|
238 | ), | |
240 | (('-nosep',), dict( |
|
239 | (('-no-sep',), dict( | |
241 | action='store_true', dest='Global.nosep', default=NoConfigDefault, |
|
240 | action='store_true', dest='Global.nosep', default=NoConfigDefault, | |
242 | help="Eliminate all spacing between prompts.") |
|
241 | help="Eliminate all spacing between prompts.") | |
243 | ), |
|
242 | ), | |
244 |
(('- |
|
243 | (('--term-title',), dict( | |
245 | action='store_true', dest='InteractiveShell.term_title', default=NoConfigDefault, |
|
244 | action='store_true', dest='InteractiveShell.term_title', default=NoConfigDefault, | |
246 | help="Enable auto setting the terminal title.") |
|
245 | help="Enable auto setting the terminal title.") | |
247 | ), |
|
246 | ), | |
248 |
(('- |
|
247 | (('--no-term-title',), dict( | |
249 | action='store_false', dest='InteractiveShell.term_title', default=NoConfigDefault, |
|
248 | action='store_false', dest='InteractiveShell.term_title', default=NoConfigDefault, | |
250 | help="Disable auto setting the terminal title.") |
|
249 | help="Disable auto setting the terminal title.") | |
251 | ), |
|
250 | ), | |
252 | (('-xmode',), dict( |
|
251 | (('--xmode',), dict( | |
253 | type=str, dest='InteractiveShell.xmode', default=NoConfigDefault, |
|
252 | type=str, dest='InteractiveShell.xmode', default=NoConfigDefault, | |
254 | help="Exception mode ('Plain','Context','Verbose')", |
|
253 | help="Exception mode ('Plain','Context','Verbose')", | |
255 | metavar='InteractiveShell.xmode') |
|
254 | metavar='InteractiveShell.xmode') | |
256 | ), |
|
255 | ), | |
257 | (('-ext',), dict( |
|
256 | (('--ext',), dict( | |
258 | type=str, dest='Global.extra_extension', default=NoConfigDefault, |
|
257 | type=str, dest='Global.extra_extension', default=NoConfigDefault, | |
259 | help="The dotted module name of an IPython extension to load.", |
|
258 | help="The dotted module name of an IPython extension to load.", | |
260 | metavar='Global.extra_extension') |
|
259 | metavar='Global.extra_extension') | |
@@ -268,36 +267,39 b' cl_args = (' | |||||
268 | action='store_true', dest='Global.force_interact', default=NoConfigDefault, |
|
267 | action='store_true', dest='Global.force_interact', default=NoConfigDefault, | |
269 | help="If running code from the command line, become interactive afterwards.") |
|
268 | help="If running code from the command line, become interactive afterwards.") | |
270 | ), |
|
269 | ), | |
271 | (('-wthread',), dict( |
|
270 | (('--wthread',), dict( | |
272 | action='store_true', dest='Global.wthread', default=NoConfigDefault, |
|
271 | action='store_true', dest='Global.wthread', default=NoConfigDefault, | |
273 | help="Enable wxPython event loop integration.") |
|
272 | help="Enable wxPython event loop integration.") | |
274 | ), |
|
273 | ), | |
275 | (('-q4thread','-qthread'), dict( |
|
274 | (('--q4thread','--qthread'), dict( | |
276 | action='store_true', dest='Global.q4thread', default=NoConfigDefault, |
|
275 | action='store_true', dest='Global.q4thread', default=NoConfigDefault, | |
277 | help="Enable Qt4 event loop integration. Qt3 is no longer supported.") |
|
276 | help="Enable Qt4 event loop integration. Qt3 is no longer supported.") | |
278 | ), |
|
277 | ), | |
279 | (('-gthread',), dict( |
|
278 | (('--gthread',), dict( | |
280 | action='store_true', dest='Global.gthread', default=NoConfigDefault, |
|
279 | action='store_true', dest='Global.gthread', default=NoConfigDefault, | |
281 | help="Enable GTK event loop integration.") |
|
280 | help="Enable GTK event loop integration.") | |
282 | ), |
|
281 | ), | |
283 | # # These are only here to get the proper deprecation warnings |
|
282 | # # These are only here to get the proper deprecation warnings | |
284 | (('-pylab',), dict( |
|
283 | (('--pylab',), dict( | |
285 | action='store_true', dest='Global.pylab', default=NoConfigDefault, |
|
284 | action='store_true', dest='Global.pylab', default=NoConfigDefault, | |
286 |
help="Disabled. Pylab has been disabled until matplotlib |
|
285 | help="Disabled. Pylab has been disabled until matplotlib " | |
|
286 | "supports this version of IPython.") | |||
287 | ) |
|
287 | ) | |
288 | ) |
|
288 | ) | |
289 |
|
289 | |||
290 |
|
290 | |||
291 |
class IPythonAppCLConfigLoader( |
|
291 | class IPythonAppCLConfigLoader(BaseAppArgParseConfigLoader): | |
292 |
|
292 | |||
293 | arguments = cl_args |
|
293 | arguments = cl_args | |
294 |
|
294 | |||
295 |
|
295 | |||
296 |
|
|
296 | default_config_file_name = u'ipython_config.py' | |
|
297 | ||||
297 |
|
298 | |||
298 | class IPythonApp(Application): |
|
299 | class IPythonApp(Application): | |
299 | name = 'ipython' |
|
300 | name = u'ipython' | |
300 | config_file_name = _default_config_file_name |
|
301 | description = 'IPython: an enhanced interactive Python shell.' | |
|
302 | config_file_name = default_config_file_name | |||
301 |
|
303 | |||
302 | def create_default_config(self): |
|
304 | def create_default_config(self): | |
303 | super(IPythonApp, self).create_default_config() |
|
305 | super(IPythonApp, self).create_default_config() | |
@@ -313,11 +315,6 b' class IPythonApp(Application):' | |||||
313 | # By default always interact by starting the IPython mainloop. |
|
315 | # By default always interact by starting the IPython mainloop. | |
314 | self.default_config.Global.interact = True |
|
316 | self.default_config.Global.interact = True | |
315 |
|
317 | |||
316 | # Let the parent class set the default, but each time log_level |
|
|||
317 | # changes from config, we need to update self.log_level as that is |
|
|||
318 | # what updates the actual log level in self.log. |
|
|||
319 | self.default_config.Global.log_level = self.log_level |
|
|||
320 |
|
||||
321 | # No GUI integration by default |
|
318 | # No GUI integration by default | |
322 | self.default_config.Global.wthread = False |
|
319 | self.default_config.Global.wthread = False | |
323 | self.default_config.Global.q4thread = False |
|
320 | self.default_config.Global.q4thread = False | |
@@ -326,8 +323,9 b' class IPythonApp(Application):' | |||||
326 | def create_command_line_config(self): |
|
323 | def create_command_line_config(self): | |
327 | """Create and return a command line config loader.""" |
|
324 | """Create and return a command line config loader.""" | |
328 | return IPythonAppCLConfigLoader( |
|
325 | return IPythonAppCLConfigLoader( | |
329 |
description= |
|
326 | description=self.description, | |
330 |
version=release.version |
|
327 | version=release.version | |
|
328 | ) | |||
331 |
|
329 | |||
332 | def post_load_command_line_config(self): |
|
330 | def post_load_command_line_config(self): | |
333 | """Do actions after loading cl config.""" |
|
331 | """Do actions after loading cl config.""" | |
@@ -477,9 +475,9 b' class IPythonApp(Application):' | |||||
477 | self.shell.showtraceback() |
|
475 | self.shell.showtraceback() | |
478 |
|
476 | |||
479 | def _exec_file(self, fname): |
|
477 | def _exec_file(self, fname): | |
480 | full_filename = filefind(fname, ['.', self.ipythondir]) |
|
478 | full_filename = filefind(fname, [u'.', self.ipython_dir]) | |
481 | if os.path.isfile(full_filename): |
|
479 | if os.path.isfile(full_filename): | |
482 | if full_filename.endswith('.py'): |
|
480 | if full_filename.endswith(u'.py'): | |
483 | self.log.info("Running file in user namespace: %s" % full_filename) |
|
481 | self.log.info("Running file in user namespace: %s" % full_filename) | |
484 | self.shell.safe_execfile(full_filename, self.shell.user_ns) |
|
482 | self.shell.safe_execfile(full_filename, self.shell.user_ns) | |
485 | elif full_filename.endswith('.ipy'): |
|
483 | elif full_filename.endswith('.ipy'): | |
@@ -527,20 +525,20 b' class IPythonApp(Application):' | |||||
527 | self.shell.mainloop() |
|
525 | self.shell.mainloop() | |
528 |
|
526 | |||
529 |
|
527 | |||
530 | def load_default_config(ipythondir=None): |
|
528 | def load_default_config(ipython_dir=None): | |
531 | """Load the default config file from the default ipythondir. |
|
529 | """Load the default config file from the default ipython_dir. | |
532 |
|
530 | |||
533 | This is useful for embedded shells. |
|
531 | This is useful for embedded shells. | |
534 | """ |
|
532 | """ | |
535 | if ipythondir is None: |
|
533 | if ipython_dir is None: | |
536 | ipythondir = get_ipython_dir() |
|
534 | ipython_dir = get_ipython_dir() | |
537 |
cl = PyFileConfigLoader( |
|
535 | cl = PyFileConfigLoader(default_config_file_name, ipython_dir) | |
538 | config = cl.load_config() |
|
536 | config = cl.load_config() | |
539 | return config |
|
537 | return config | |
540 |
|
538 | |||
541 |
|
539 | |||
542 | def launch_new_instance(): |
|
540 | def launch_new_instance(): | |
543 | """Create a run a full blown IPython instance""" |
|
541 | """Create and run a full blown IPython instance""" | |
544 | app = IPythonApp() |
|
542 | app = IPythonApp() | |
545 | app.start() |
|
543 | app.start() | |
546 |
|
544 |
@@ -162,6 +162,15 b' def get_default_editor():' | |||||
162 | return ed |
|
162 | return ed | |
163 |
|
163 | |||
164 |
|
164 | |||
|
165 | def get_default_colors(): | |||
|
166 | if sys.platform=='darwin': | |||
|
167 | return "LightBG" | |||
|
168 | elif os.name=='nt': | |||
|
169 | return 'Linux' | |||
|
170 | else: | |||
|
171 | return 'Linux' | |||
|
172 | ||||
|
173 | ||||
165 | class SeparateStr(Str): |
|
174 | class SeparateStr(Str): | |
166 | """A Str subclass to validate separate_in, separate_out, etc. |
|
175 | """A Str subclass to validate separate_in, separate_out, etc. | |
167 |
|
176 | |||
@@ -182,7 +191,7 b' class SeparateStr(Str):' | |||||
182 | class InteractiveShell(Component, Magic): |
|
191 | class InteractiveShell(Component, Magic): | |
183 | """An enhanced, interactive shell for Python.""" |
|
192 | """An enhanced, interactive shell for Python.""" | |
184 |
|
193 | |||
185 | autocall = Enum((0,1,2), config=True) |
|
194 | autocall = Enum((0,1,2), default_value=1, config=True) | |
186 | autoedit_syntax = CBool(False, config=True) |
|
195 | autoedit_syntax = CBool(False, config=True) | |
187 | autoindent = CBool(True, config=True) |
|
196 | autoindent = CBool(True, config=True) | |
188 | automagic = CBool(True, config=True) |
|
197 | automagic = CBool(True, config=True) | |
@@ -192,7 +201,7 b' class InteractiveShell(Component, Magic):' | |||||
192 | cache_size = Int(1000, config=True) |
|
201 | cache_size = Int(1000, config=True) | |
193 | color_info = CBool(True, config=True) |
|
202 | color_info = CBool(True, config=True) | |
194 | colors = CaselessStrEnum(('NoColor','LightBG','Linux'), |
|
203 | colors = CaselessStrEnum(('NoColor','LightBG','Linux'), | |
195 |
default_value= |
|
204 | default_value=get_default_colors(), config=True) | |
196 | confirm_exit = CBool(True, config=True) |
|
205 | confirm_exit = CBool(True, config=True) | |
197 | debug = CBool(False, config=True) |
|
206 | debug = CBool(False, config=True) | |
198 | deep_reload = CBool(False, config=True) |
|
207 | deep_reload = CBool(False, config=True) | |
@@ -206,7 +215,7 b' class InteractiveShell(Component, Magic):' | |||||
206 | embedded_active = CBool(False) |
|
215 | embedded_active = CBool(False) | |
207 | editor = Str(get_default_editor(), config=True) |
|
216 | editor = Str(get_default_editor(), config=True) | |
208 | filename = Str("<ipython console>") |
|
217 | filename = Str("<ipython console>") | |
209 | ipythondir= Unicode('', config=True) # Set to get_ipython_dir() in __init__ |
|
218 | ipython_dir= Unicode('', config=True) # Set to get_ipython_dir() in __init__ | |
210 | logstart = CBool(False, config=True) |
|
219 | logstart = CBool(False, config=True) | |
211 | logfile = Str('', config=True) |
|
220 | logfile = Str('', config=True) | |
212 | logappend = Str('', config=True) |
|
221 | logappend = Str('', config=True) | |
@@ -264,7 +273,7 b' class InteractiveShell(Component, Magic):' | |||||
264 | # Subclasses with thread support should override this as needed. |
|
273 | # Subclasses with thread support should override this as needed. | |
265 | isthreaded = False |
|
274 | isthreaded = False | |
266 |
|
275 | |||
267 | def __init__(self, parent=None, config=None, ipythondir=None, usage=None, |
|
276 | def __init__(self, parent=None, config=None, ipython_dir=None, usage=None, | |
268 | user_ns=None, user_global_ns=None, |
|
277 | user_ns=None, user_global_ns=None, | |
269 | banner1=None, banner2=None, display_banner=None, |
|
278 | banner1=None, banner2=None, display_banner=None, | |
270 | custom_exceptions=((),None)): |
|
279 | custom_exceptions=((),None)): | |
@@ -274,7 +283,7 b' class InteractiveShell(Component, Magic):' | |||||
274 | super(InteractiveShell, self).__init__(parent, config=config) |
|
283 | super(InteractiveShell, self).__init__(parent, config=config) | |
275 |
|
284 | |||
276 | # These are relatively independent and stateless |
|
285 | # These are relatively independent and stateless | |
277 | self.init_ipythondir(ipythondir) |
|
286 | self.init_ipython_dir(ipython_dir) | |
278 | self.init_instance_attrs() |
|
287 | self.init_instance_attrs() | |
279 | self.init_term_title() |
|
288 | self.init_term_title() | |
280 | self.init_usage(usage) |
|
289 | self.init_usage(usage) | |
@@ -332,7 +341,7 b' class InteractiveShell(Component, Magic):' | |||||
332 | def _banner2_changed(self): |
|
341 | def _banner2_changed(self): | |
333 | self.compute_banner() |
|
342 | self.compute_banner() | |
334 |
|
343 | |||
335 | def _ipythondir_changed(self, name, new): |
|
344 | def _ipython_dir_changed(self, name, new): | |
336 | if not os.path.isdir(new): |
|
345 | if not os.path.isdir(new): | |
337 | os.makedirs(new, mode = 0777) |
|
346 | os.makedirs(new, mode = 0777) | |
338 | if not os.path.isdir(self.ipython_extension_dir): |
|
347 | if not os.path.isdir(self.ipython_extension_dir): | |
@@ -340,7 +349,7 b' class InteractiveShell(Component, Magic):' | |||||
340 |
|
349 | |||
341 | @property |
|
350 | @property | |
342 | def ipython_extension_dir(self): |
|
351 | def ipython_extension_dir(self): | |
343 | return os.path.join(self.ipythondir, 'extensions') |
|
352 | return os.path.join(self.ipython_dir, 'extensions') | |
344 |
|
353 | |||
345 | @property |
|
354 | @property | |
346 | def usable_screen_length(self): |
|
355 | def usable_screen_length(self): | |
@@ -372,19 +381,19 b' class InteractiveShell(Component, Magic):' | |||||
372 | # init_* methods called by __init__ |
|
381 | # init_* methods called by __init__ | |
373 | #------------------------------------------------------------------------- |
|
382 | #------------------------------------------------------------------------- | |
374 |
|
383 | |||
375 | def init_ipythondir(self, ipythondir): |
|
384 | def init_ipython_dir(self, ipython_dir): | |
376 | if ipythondir is not None: |
|
385 | if ipython_dir is not None: | |
377 | self.ipythondir = ipythondir |
|
386 | self.ipython_dir = ipython_dir | |
378 | self.config.Global.ipythondir = self.ipythondir |
|
387 | self.config.Global.ipython_dir = self.ipython_dir | |
379 | return |
|
388 | return | |
380 |
|
389 | |||
381 | if hasattr(self.config.Global, 'ipythondir'): |
|
390 | if hasattr(self.config.Global, 'ipython_dir'): | |
382 | self.ipythondir = self.config.Global.ipythondir |
|
391 | self.ipython_dir = self.config.Global.ipython_dir | |
383 | else: |
|
392 | else: | |
384 | self.ipythondir = get_ipython_dir() |
|
393 | self.ipython_dir = get_ipython_dir() | |
385 |
|
394 | |||
386 | # All children can just read this |
|
395 | # All children can just read this | |
387 | self.config.Global.ipythondir = self.ipythondir |
|
396 | self.config.Global.ipython_dir = self.ipython_dir | |
388 |
|
397 | |||
389 | def init_instance_attrs(self): |
|
398 | def init_instance_attrs(self): | |
390 | self.jobs = BackgroundJobManager() |
|
399 | self.jobs = BackgroundJobManager() | |
@@ -1070,7 +1079,7 b' class InteractiveShell(Component, Magic):' | |||||
1070 | histfname = 'history-%s' % self.profile |
|
1079 | histfname = 'history-%s' % self.profile | |
1071 | else: |
|
1080 | else: | |
1072 | histfname = 'history' |
|
1081 | histfname = 'history' | |
1073 | self.histfile = os.path.join(self.ipythondir, histfname) |
|
1082 | self.histfile = os.path.join(self.ipython_dir, histfname) | |
1074 |
|
1083 | |||
1075 | # Fill the history zero entry, user counter starts at 1 |
|
1084 | # Fill the history zero entry, user counter starts at 1 | |
1076 | self.input_hist.append('\n') |
|
1085 | self.input_hist.append('\n') | |
@@ -1078,12 +1087,12 b' class InteractiveShell(Component, Magic):' | |||||
1078 |
|
1087 | |||
1079 | def init_shadow_hist(self): |
|
1088 | def init_shadow_hist(self): | |
1080 | try: |
|
1089 | try: | |
1081 | self.db = pickleshare.PickleShareDB(self.ipythondir + "/db") |
|
1090 | self.db = pickleshare.PickleShareDB(self.ipython_dir + "/db") | |
1082 | except exceptions.UnicodeDecodeError: |
|
1091 | except exceptions.UnicodeDecodeError: | |
1083 | print "Your ipythondir can't be decoded to unicode!" |
|
1092 | print "Your ipython_dir can't be decoded to unicode!" | |
1084 | print "Please set HOME environment variable to something that" |
|
1093 | print "Please set HOME environment variable to something that" | |
1085 | print r"only has ASCII characters, e.g. c:\home" |
|
1094 | print r"only has ASCII characters, e.g. c:\home" | |
1086 | print "Now it is", self.ipythondir |
|
1095 | print "Now it is", self.ipython_dir | |
1087 | sys.exit() |
|
1096 | sys.exit() | |
1088 | self.shadowhist = ipcorehist.ShadowHist(self.db) |
|
1097 | self.shadowhist = ipcorehist.ShadowHist(self.db) | |
1089 |
|
1098 | |||
@@ -1426,9 +1435,7 b' class InteractiveShell(Component, Magic):' | |||||
1426 | return outcomps |
|
1435 | return outcomps | |
1427 |
|
1436 | |||
1428 | def set_custom_completer(self,completer,pos=0): |
|
1437 | def set_custom_completer(self,completer,pos=0): | |
1429 |
""" |
|
1438 | """Adds a new custom completer function. | |
1430 |
|
||||
1431 | Adds a new custom completer function. |
|
|||
1432 |
|
1439 | |||
1433 | The position argument (defaults to 0) is the index in the completers |
|
1440 | The position argument (defaults to 0) is the index in the completers | |
1434 | list where you want the completer to be inserted.""" |
|
1441 | list where you want the completer to be inserted.""" | |
@@ -1438,9 +1445,18 b' class InteractiveShell(Component, Magic):' | |||||
1438 | self.Completer.matchers.insert(pos,newcomp) |
|
1445 | self.Completer.matchers.insert(pos,newcomp) | |
1439 |
|
1446 | |||
1440 | def set_completer(self): |
|
1447 | def set_completer(self): | |
1441 |
""" |
|
1448 | """Reset readline's completer to be our own.""" | |
1442 | self.readline.set_completer(self.Completer.complete) |
|
1449 | self.readline.set_completer(self.Completer.complete) | |
1443 |
|
1450 | |||
|
1451 | def set_completer_frame(self, frame=None): | |||
|
1452 | """Set the frame of the completer.""" | |||
|
1453 | if frame: | |||
|
1454 | self.Completer.namespace = frame.f_locals | |||
|
1455 | self.Completer.global_namespace = frame.f_globals | |||
|
1456 | else: | |||
|
1457 | self.Completer.namespace = self.user_ns | |||
|
1458 | self.Completer.global_namespace = self.user_global_ns | |||
|
1459 | ||||
1444 | #------------------------------------------------------------------------- |
|
1460 | #------------------------------------------------------------------------- | |
1445 | # Things related to readline |
|
1461 | # Things related to readline | |
1446 | #------------------------------------------------------------------------- |
|
1462 | #------------------------------------------------------------------------- | |
@@ -1913,7 +1929,7 b' class InteractiveShell(Component, Magic):' | |||||
1913 | # SystemExit exception changed between Python 2.4 and 2.5, so |
|
1929 | # SystemExit exception changed between Python 2.4 and 2.5, so | |
1914 | # the checks must be done in a version-dependent way. |
|
1930 | # the checks must be done in a version-dependent way. | |
1915 | show = False |
|
1931 | show = False | |
1916 |
if status. |
|
1932 | if status.args[0]==0 and not kw['exit_ignore']: | |
1917 | show = True |
|
1933 | show = True | |
1918 | if show: |
|
1934 | if show: | |
1919 | self.showtraceback() |
|
1935 | self.showtraceback() | |
@@ -2278,6 +2294,8 b' class InteractiveShell(Component, Magic):' | |||||
2278 | def get_component(self, name=None, klass=None): |
|
2294 | def get_component(self, name=None, klass=None): | |
2279 | """Fetch a component by name and klass in my tree.""" |
|
2295 | """Fetch a component by name and klass in my tree.""" | |
2280 | c = Component.get_instances(root=self, name=name, klass=klass) |
|
2296 | c = Component.get_instances(root=self, name=name, klass=klass) | |
|
2297 | if len(c) == 0: | |||
|
2298 | return None | |||
2281 | if len(c) == 1: |
|
2299 | if len(c) == 1: | |
2282 | return c[0] |
|
2300 | return c[0] | |
2283 | else: |
|
2301 | else: | |
@@ -2309,7 +2327,7 b' class InteractiveShell(Component, Magic):' | |||||
2309 | You can put your extension modules anywhere you want, as long as |
|
2327 | You can put your extension modules anywhere you want, as long as | |
2310 | they can be imported by Python's standard import mechanism. However, |
|
2328 | they can be imported by Python's standard import mechanism. However, | |
2311 | to make it easy to write extensions, you can also put your extensions |
|
2329 | to make it easy to write extensions, you can also put your extensions | |
2312 | in ``os.path.join(self.ipythondir, 'extensions')``. This directory |
|
2330 | in ``os.path.join(self.ipython_dir, 'extensions')``. This directory | |
2313 | is added to ``sys.path`` automatically. |
|
2331 | is added to ``sys.path`` automatically. | |
2314 | """ |
|
2332 | """ | |
2315 | from IPython.utils.syspathcontext import prepended_to_syspath |
|
2333 | from IPython.utils.syspathcontext import prepended_to_syspath |
@@ -21,6 +21,7 b' import os' | |||||
21 | import pdb |
|
21 | import pdb | |
22 | import pydoc |
|
22 | import pydoc | |
23 | import sys |
|
23 | import sys | |
|
24 | import shutil | |||
24 | import re |
|
25 | import re | |
25 | import tempfile |
|
26 | import tempfile | |
26 | import time |
|
27 | import time | |
@@ -1268,7 +1269,6 b' Currently the magic system has the following functions:\\n"""' | |||||
1268 | If you want IPython to automatically do this on every exception, see |
|
1269 | If you want IPython to automatically do this on every exception, see | |
1269 | the %pdb magic for more details. |
|
1270 | the %pdb magic for more details. | |
1270 | """ |
|
1271 | """ | |
1271 |
|
||||
1272 | self.shell.debugger(force=True) |
|
1272 | self.shell.debugger(force=True) | |
1273 |
|
1273 | |||
1274 | @testdec.skip_doctest |
|
1274 | @testdec.skip_doctest | |
@@ -3378,34 +3378,6 b' Defaulting color scheme to \'NoColor\'"""' | |||||
3378 | qr = IPython.core.usage.quick_reference + self.magic_magic('-brief') |
|
3378 | qr = IPython.core.usage.quick_reference + self.magic_magic('-brief') | |
3379 |
|
3379 | |||
3380 | page(qr) |
|
3380 | page(qr) | |
3381 |
|
||||
3382 | def magic_upgrade(self,arg): |
|
|||
3383 | """ Upgrade your IPython installation |
|
|||
3384 |
|
||||
3385 | This will copy the config files that don't yet exist in your |
|
|||
3386 | ipython dir from the system config dir. Use this after upgrading |
|
|||
3387 | IPython if you don't wish to delete your .ipython dir. |
|
|||
3388 |
|
||||
3389 | Call with -nolegacy to get rid of ipythonrc* files (recommended for |
|
|||
3390 | new users) |
|
|||
3391 |
|
||||
3392 | """ |
|
|||
3393 | ip = self.getapi() |
|
|||
3394 | ipinstallation = path(IPython.__file__).dirname() |
|
|||
3395 | upgrade_script = '%s "%s"' % (sys.executable,ipinstallation / 'utils' / 'upgradedir.py') |
|
|||
3396 | src_config = ipinstallation / 'config' / 'userconfig' |
|
|||
3397 | userdir = path(ip.config.IPYTHONDIR) |
|
|||
3398 | cmd = '%s "%s" "%s"' % (upgrade_script, src_config, userdir) |
|
|||
3399 | print ">",cmd |
|
|||
3400 | shell(cmd) |
|
|||
3401 | if arg == '-nolegacy': |
|
|||
3402 | legacy = userdir.files('ipythonrc*') |
|
|||
3403 | print "Nuking legacy files:",legacy |
|
|||
3404 |
|
||||
3405 | [p.remove() for p in legacy] |
|
|||
3406 | suffix = (sys.platform == 'win32' and '.ini' or '') |
|
|||
3407 | (userdir / ('ipythonrc' + suffix)).write_text('# Empty, see ipy_user_conf.py\n') |
|
|||
3408 |
|
||||
3409 |
|
3381 | |||
3410 | def magic_doctest_mode(self,parameter_s=''): |
|
3382 | def magic_doctest_mode(self,parameter_s=''): | |
3411 | """Toggle doctest mode on and off. |
|
3383 | """Toggle doctest mode on and off. | |
@@ -3550,4 +3522,59 b' Defaulting color scheme to \'NoColor\'"""' | |||||
3550 | """Reload an IPython extension by its module name.""" |
|
3522 | """Reload an IPython extension by its module name.""" | |
3551 | self.reload_extension(module_str) |
|
3523 | self.reload_extension(module_str) | |
3552 |
|
3524 | |||
|
3525 | def magic_install_profiles(self, s): | |||
|
3526 | """Install the default IPython profiles into the .ipython dir. | |||
|
3527 | ||||
|
3528 | If the default profiles have already been installed, they will not | |||
|
3529 | be overwritten. You can force overwriting them by using the ``-o`` | |||
|
3530 | option:: | |||
|
3531 | ||||
|
3532 | In [1]: %install_profiles -o | |||
|
3533 | """ | |||
|
3534 | if '-o' in s: | |||
|
3535 | overwrite = True | |||
|
3536 | else: | |||
|
3537 | overwrite = False | |||
|
3538 | from IPython.config import profile | |||
|
3539 | profile_dir = os.path.split(profile.__file__)[0] | |||
|
3540 | ipython_dir = self.ipython_dir | |||
|
3541 | files = os.listdir(profile_dir) | |||
|
3542 | ||||
|
3543 | to_install = [] | |||
|
3544 | for f in files: | |||
|
3545 | if f.startswith('ipython_config'): | |||
|
3546 | src = os.path.join(profile_dir, f) | |||
|
3547 | dst = os.path.join(ipython_dir, f) | |||
|
3548 | if (not os.path.isfile(dst)) or overwrite: | |||
|
3549 | to_install.append((f, src, dst)) | |||
|
3550 | if len(to_install)>0: | |||
|
3551 | print "Installing profiles to: ", ipython_dir | |||
|
3552 | for (f, src, dst) in to_install: | |||
|
3553 | shutil.copy(src, dst) | |||
|
3554 | print " %s" % f | |||
|
3555 | ||||
|
3556 | def magic_install_default_config(self, s): | |||
|
3557 | """Install IPython's default config file into the .ipython dir. | |||
|
3558 | ||||
|
3559 | If the default config file (:file:`ipython_config.py`) is already | |||
|
3560 | installed, it will not be overwritten. You can force overwriting | |||
|
3561 | by using the ``-o`` option:: | |||
|
3562 | ||||
|
3563 | In [1]: %install_default_config | |||
|
3564 | """ | |||
|
3565 | if '-o' in s: | |||
|
3566 | overwrite = True | |||
|
3567 | else: | |||
|
3568 | overwrite = False | |||
|
3569 | from IPython.config import default | |||
|
3570 | config_dir = os.path.split(default.__file__)[0] | |||
|
3571 | ipython_dir = self.ipython_dir | |||
|
3572 | default_config_file_name = 'ipython_config.py' | |||
|
3573 | src = os.path.join(config_dir, default_config_file_name) | |||
|
3574 | dst = os.path.join(ipython_dir, default_config_file_name) | |||
|
3575 | if (not os.path.isfile(dst)) or overwrite: | |||
|
3576 | shutil.copy(src, dst) | |||
|
3577 | print "Installing default config file: %s" % dst | |||
|
3578 | ||||
|
3579 | ||||
3553 | # end Magic |
|
3580 | # end Magic |
@@ -39,7 +39,7 b' from IPython.core.splitinput import split_user_input' | |||||
39 | from IPython.core.page import page |
|
39 | from IPython.core.page import page | |
40 |
|
40 | |||
41 | from IPython.utils.traitlets import List, Int, Any, Str, CBool, Bool |
|
41 | from IPython.utils.traitlets import List, Int, Any, Str, CBool, Bool | |
42 | from IPython.utils.genutils import make_quoted_expr |
|
42 | from IPython.utils.genutils import make_quoted_expr, Term | |
43 | from IPython.utils.autoattr import auto_attr |
|
43 | from IPython.utils.autoattr import auto_attr | |
44 |
|
44 | |||
45 | #----------------------------------------------------------------------------- |
|
45 | #----------------------------------------------------------------------------- |
@@ -15,7 +15,7 b' import nose.tools as nt' | |||||
15 | # our own packages |
|
15 | # our own packages | |
16 | from IPython.core import iplib |
|
16 | from IPython.core import iplib | |
17 | from IPython.core import ipapi |
|
17 | from IPython.core import ipapi | |
18 | from IPython.core.oldusersetup import user_setup |
|
18 | ||
19 |
|
19 | |||
20 | #----------------------------------------------------------------------------- |
|
20 | #----------------------------------------------------------------------------- | |
21 | # Globals |
|
21 | # Globals | |
@@ -54,27 +54,4 b' def test_reset():' | |||||
54 | continue |
|
54 | continue | |
55 | nt.assert_equals(len(ns),0) |
|
55 | nt.assert_equals(len(ns),0) | |
56 |
|
56 | |||
57 |
|
||||
58 | # make sure that user_setup can be run re-entrantly in 'install' mode. |
|
|||
59 | def test_user_setup(): |
|
|||
60 | # use a lambda to pass kwargs to the generator |
|
|||
61 | user_setup = lambda a,k: user_setup(*a,**k) |
|
|||
62 | kw = dict(mode='install', interactive=False) |
|
|||
63 |
|
||||
64 | # Call the user setup and verify that the directory exists |
|
|||
65 | yield user_setup, (ip.config.IPYTHONDIR,''), kw |
|
|||
66 | yield os.path.isdir, ip.config.IPYTHONDIR |
|
|||
67 |
|
||||
68 | # Now repeat the operation with a non-existent directory. Check both that |
|
|||
69 | # the call succeeds and that the directory is created. |
|
|||
70 | tmpdir = tempfile.mktemp(prefix='ipython-test-') |
|
|||
71 | # Use a try with an empty except because try/finally doesn't work with a |
|
|||
72 | # yield in Python 2.4. |
|
|||
73 | try: |
|
|||
74 | yield user_setup, (tmpdir,''), kw |
|
|||
75 | yield os.path.isdir, tmpdir |
|
|||
76 | except: |
|
|||
77 | pass |
|
|||
78 | # Clean up the temp dir once done |
|
|||
79 | shutil.rmtree(tmpdir) |
|
|||
80 | No newline at end of file |
|
57 |
@@ -40,7 +40,7 b' USAGE' | |||||
40 | in directories. |
|
40 | in directories. | |
41 |
|
41 | |||
42 | In the rest of this text, we will refer to this directory as |
|
42 | In the rest of this text, we will refer to this directory as | |
43 | IPYTHONDIR. |
|
43 | IPYTHON_DIR. | |
44 |
|
44 | |||
45 | REGULAR OPTIONS |
|
45 | REGULAR OPTIONS | |
46 | After the above threading options have been given, regular options can |
|
46 | After the above threading options have been given, regular options can | |
@@ -150,9 +150,9 b' REGULAR OPTIONS' | |||||
150 | here (in case your default EDITOR is something like Emacs). |
|
150 | here (in case your default EDITOR is something like Emacs). | |
151 |
|
151 | |||
152 | -ipythondir <name> |
|
152 | -ipythondir <name> | |
153 | The name of your IPython configuration directory IPYTHONDIR. |
|
153 | The name of your IPython configuration directory IPYTHON_DIR. | |
154 | This can also be specified through the environment variable |
|
154 | This can also be specified through the environment variable | |
155 | IPYTHONDIR. |
|
155 | IPYTHON_DIR. | |
156 |
|
156 | |||
157 | -log|l Generate a log file of all input. The file is named |
|
157 | -log|l Generate a log file of all input. The file is named | |
158 | ipython_log.py in your current directory (which prevents logs |
|
158 | ipython_log.py in your current directory (which prevents logs | |
@@ -201,10 +201,10 b' REGULAR OPTIONS' | |||||
201 |
|
201 | |||
202 | -profile|p <name> |
|
202 | -profile|p <name> | |
203 | Assume that your config file is ipythonrc-<name> (looks in cur- |
|
203 | Assume that your config file is ipythonrc-<name> (looks in cur- | |
204 | rent dir first, then in IPYTHONDIR). This is a quick way to keep |
|
204 | rent dir first, then in IPYTHON_DIR). This is a quick way to keep | |
205 | and load multiple config files for different tasks, especially |
|
205 | and load multiple config files for different tasks, especially | |
206 | if you use the include option of config files. You can keep a |
|
206 | if you use the include option of config files. You can keep a | |
207 | basic IPYTHONDIR/ipythonrc file and then have other 'profiles' |
|
207 | basic IPYTHON_DIR/ipythonrc file and then have other 'profiles' | |
208 | which include this one and load extra things for particular |
|
208 | which include this one and load extra things for particular | |
209 | tasks. For example: |
|
209 | tasks. For example: | |
210 |
|
210 | |||
@@ -245,7 +245,7 b' REGULAR OPTIONS' | |||||
245 | -rcfile <name> |
|
245 | -rcfile <name> | |
246 | Name of your IPython resource configuration file. normally |
|
246 | Name of your IPython resource configuration file. normally | |
247 | IPython loads ipythonrc (from current directory) or |
|
247 | IPython loads ipythonrc (from current directory) or | |
248 | IPYTHONDIR/ipythonrc. If the loading of your config file fails, |
|
248 | IPYTHON_DIR/ipythonrc. If the loading of your config file fails, | |
249 | IPython starts with a bare bones configuration (no modules |
|
249 | IPython starts with a bare bones configuration (no modules | |
250 | loaded at all). |
|
250 | loaded at all). | |
251 |
|
251 | |||
@@ -284,7 +284,7 b' REGULAR OPTIONS' | |||||
284 | Simply removes all input/output separators. |
|
284 | Simply removes all input/output separators. | |
285 |
|
285 | |||
286 | -upgrade |
|
286 | -upgrade | |
287 | Allows you to upgrade your IPYTHONDIR configuration when you |
|
287 | Allows you to upgrade your IPYTHON_DIR configuration when you | |
288 | install a new version of IPython. Since new versions may |
|
288 | install a new version of IPython. Since new versions may | |
289 | include new command lines options or example files, this copies |
|
289 | include new command lines options or example files, this copies | |
290 | updated ipythonrc-type files. However, it backs up (with a .old |
|
290 | updated ipythonrc-type files. However, it backs up (with a .old |
@@ -6,11 +6,10 b' ipython.' | |||||
6 | try: |
|
6 | try: | |
7 | import wx |
|
7 | import wx | |
8 | except ImportError, e: |
|
8 | except ImportError, e: | |
9 |
e. |
|
9 | e.args[0] = """%s | |
10 | ________________________________________________________________________________ |
|
10 | ________________________________________________________________________________ | |
11 | You need wxPython to run this application. |
|
11 | You need wxPython to run this application. | |
12 |
""" % e. |
|
12 | """ % e.args[0] | |
13 | e.args = (e.message, ) + e.args[1:] |
|
|||
14 | raise e |
|
13 | raise e | |
15 |
|
14 | |||
16 | from wx_frontend import WxController |
|
15 | from wx_frontend import WxController |
@@ -109,7 +109,7 b' class MyFrame(wx.Frame):' | |||||
109 |
|
109 | |||
110 | def optionSave(self, name, value): |
|
110 | def optionSave(self, name, value): | |
111 | ip = get() |
|
111 | ip = get() | |
112 |
path = ip. |
|
112 | path = ip.ipython_dir | |
113 | opt = open(path + '/options.conf','w') |
|
113 | opt = open(path + '/options.conf','w') | |
114 |
|
114 | |||
115 | try: |
|
115 | try: | |
@@ -126,7 +126,7 b' class MyFrame(wx.Frame):' | |||||
126 | def optionLoad(self): |
|
126 | def optionLoad(self): | |
127 | try: |
|
127 | try: | |
128 | ip = get() |
|
128 | ip = get() | |
129 |
path = ip. |
|
129 | path = ip.ipython_dir | |
130 | opt = open(path + '/options.conf','r') |
|
130 | opt = open(path + '/options.conf','r') | |
131 | lines = opt.readlines() |
|
131 | lines = opt.readlines() | |
132 | opt.close() |
|
132 | opt.close() |
@@ -1,3 +1,4 b'' | |||||
|
1 | #!/usr/bin/env python | |||
1 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
2 |
|
3 | |||
3 | """Asynchronous clients for the IPython controller. |
|
4 | """Asynchronous clients for the IPython controller. | |
@@ -9,32 +10,32 b' deferreds to the result.' | |||||
9 |
|
10 | |||
10 | The main methods are are `get_*_client` and `get_client`. |
|
11 | The main methods are are `get_*_client` and `get_client`. | |
11 | """ |
|
12 | """ | |
12 |
|
13 | #----------------------------------------------------------------------------- | ||
13 | __docformat__ = "restructuredtext en" |
|
14 | # Copyright (C) 2008-2009 The IPython Development Team | |
14 |
|
||||
15 | #------------------------------------------------------------------------------- |
|
|||
16 | # Copyright (C) 2008 The IPython Development Team |
|
|||
17 | # |
|
15 | # | |
18 | # Distributed under the terms of the BSD License. The full license is in |
|
16 | # Distributed under the terms of the BSD License. The full license is in | |
19 | # the file COPYING, distributed as part of this software. |
|
17 | # the file COPYING, distributed as part of this software. | |
20 |
#----------------------------------------------------------------------------- |
|
18 | #----------------------------------------------------------------------------- | |
21 |
|
19 | |||
22 |
#----------------------------------------------------------------------------- |
|
20 | #----------------------------------------------------------------------------- | |
23 | # Imports |
|
21 | # Imports | |
24 |
#----------------------------------------------------------------------------- |
|
22 | #----------------------------------------------------------------------------- | |
25 |
|
23 | |||
26 | from IPython.kernel import codeutil |
|
24 | from IPython.kernel import codeutil | |
27 |
from IPython.kernel.clientconnector import |
|
25 | from IPython.kernel.clientconnector import ( | |
|
26 | AsyncClientConnector, | |||
|
27 | AsyncCluster | |||
|
28 | ) | |||
28 |
|
29 | |||
29 | # Other things that the user will need |
|
30 | # Other things that the user will need | |
30 | from IPython.kernel.task import MapTask, StringTask |
|
31 | from IPython.kernel.task import MapTask, StringTask | |
31 | from IPython.kernel.error import CompositeError |
|
32 | from IPython.kernel.error import CompositeError | |
32 |
|
33 | |||
33 |
#----------------------------------------------------------------------------- |
|
34 | #----------------------------------------------------------------------------- | |
34 | # Code |
|
35 | # Code | |
35 |
#----------------------------------------------------------------------------- |
|
36 | #----------------------------------------------------------------------------- | |
36 |
|
37 | |||
37 | _client_tub = ClientConnector() |
|
38 | _client_tub = AsyncClientConnector() | |
38 | get_multiengine_client = _client_tub.get_multiengine_client |
|
39 | get_multiengine_client = _client_tub.get_multiengine_client | |
39 | get_task_client = _client_tub.get_task_client |
|
40 | get_task_client = _client_tub.get_task_client | |
40 | get_client = _client_tub.get_client |
|
41 | get_client = _client_tub.get_client |
@@ -1,3 +1,4 b'' | |||||
|
1 | #!/usr/bin/env python | |||
1 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
2 |
|
3 | |||
3 | """This module contains blocking clients for the controller interfaces. |
|
4 | """This module contains blocking clients for the controller interfaces. | |
@@ -15,33 +16,36 b' The main classes in this module are:' | |||||
15 | * CompositeError |
|
16 | * CompositeError | |
16 | """ |
|
17 | """ | |
17 |
|
18 | |||
18 | __docformat__ = "restructuredtext en" |
|
19 | #----------------------------------------------------------------------------- | |
19 |
|
20 | # Copyright (C) 2008-2009 The IPython Development Team | ||
20 | #------------------------------------------------------------------------------- |
|
|||
21 | # Copyright (C) 2008 The IPython Development Team |
|
|||
22 | # |
|
21 | # | |
23 | # Distributed under the terms of the BSD License. The full license is in |
|
22 | # Distributed under the terms of the BSD License. The full license is in | |
24 | # the file COPYING, distributed as part of this software. |
|
23 | # the file COPYING, distributed as part of this software. | |
25 |
#----------------------------------------------------------------------------- |
|
24 | #----------------------------------------------------------------------------- | |
26 |
|
25 | |||
27 |
#----------------------------------------------------------------------------- |
|
26 | #----------------------------------------------------------------------------- | |
28 | # Imports |
|
27 | # Imports | |
29 |
#----------------------------------------------------------------------------- |
|
28 | #----------------------------------------------------------------------------- | |
30 |
|
29 | |||
|
30 | from cStringIO import StringIO | |||
31 | import sys |
|
31 | import sys | |
|
32 | import warnings | |||
32 |
|
33 | |||
33 | # from IPython.utils import growl |
|
34 | # from IPython.utils import growl | |
34 | # growl.start("IPython1 Client") |
|
35 | # growl.start("IPython1 Client") | |
35 |
|
36 | |||
36 |
|
37 | |||
37 | from twisted.internet import reactor |
|
38 | from twisted.internet import reactor | |
38 | from IPython.kernel.clientconnector import ClientConnector |
|
39 | from twisted.internet.error import PotentialZombieWarning | |
|
40 | from twisted.python import log | |||
|
41 | ||||
|
42 | from IPython.kernel.clientconnector import ClientConnector, Cluster | |||
39 | from IPython.kernel.twistedutil import ReactorInThread |
|
43 | from IPython.kernel.twistedutil import ReactorInThread | |
40 | from IPython.kernel.twistedutil import blockingCallFromThread |
|
44 | from IPython.kernel.twistedutil import blockingCallFromThread | |
41 |
|
45 | |||
42 | # These enable various things |
|
46 | # These enable various things | |
43 | from IPython.kernel import codeutil |
|
47 | from IPython.kernel import codeutil | |
44 | import IPython.kernel.magic |
|
48 | # import IPython.kernel.magic | |
45 |
|
49 | |||
46 | # Other things that the user will need |
|
50 | # Other things that the user will need | |
47 | from IPython.kernel.task import MapTask, StringTask |
|
51 | from IPython.kernel.task import MapTask, StringTask | |
@@ -51,46 +55,34 b' from IPython.kernel.error import CompositeError' | |||||
51 | # Code |
|
55 | # Code | |
52 | #------------------------------------------------------------------------------- |
|
56 | #------------------------------------------------------------------------------- | |
53 |
|
57 | |||
54 | _client_tub = ClientConnector() |
|
58 | warnings.simplefilter('ignore', PotentialZombieWarning) | |
55 |
|
||||
56 |
|
||||
57 | def get_multiengine_client(furl_or_file=''): |
|
|||
58 | """Get the blocking MultiEngine client. |
|
|||
59 |
|
||||
60 | :Parameters: |
|
|||
61 | furl_or_file : str |
|
|||
62 | A furl or a filename containing a furl. If empty, the |
|
|||
63 | default furl_file will be used |
|
|||
64 |
|
||||
65 | :Returns: |
|
|||
66 | The connected MultiEngineClient instance |
|
|||
67 | """ |
|
|||
68 | client = blockingCallFromThread(_client_tub.get_multiengine_client, |
|
|||
69 | furl_or_file) |
|
|||
70 | return client.adapt_to_blocking_client() |
|
|||
71 |
|
||||
72 | def get_task_client(furl_or_file=''): |
|
|||
73 | """Get the blocking Task client. |
|
|||
74 |
|
||||
75 | :Parameters: |
|
|||
76 | furl_or_file : str |
|
|||
77 | A furl or a filename containing a furl. If empty, the |
|
|||
78 | default furl_file will be used |
|
|||
79 |
|
||||
80 | :Returns: |
|
|||
81 | The connected TaskClient instance |
|
|||
82 | """ |
|
|||
83 | client = blockingCallFromThread(_client_tub.get_task_client, |
|
|||
84 | furl_or_file) |
|
|||
85 | return client.adapt_to_blocking_client() |
|
|||
86 |
|
59 | |||
|
60 | _client_tub = ClientConnector() | |||
87 |
|
61 | |||
|
62 | get_multiengine_client = _client_tub.get_multiengine_client | |||
|
63 | get_task_client = _client_tub.get_task_client | |||
88 | MultiEngineClient = get_multiengine_client |
|
64 | MultiEngineClient = get_multiengine_client | |
89 | TaskClient = get_task_client |
|
65 | TaskClient = get_task_client | |
90 |
|
66 | |||
91 |
|
67 | # This isn't great. I should probably set this up in the ReactorInThread | ||
|
68 | # class below. But, it does work for now. | |||
|
69 | log.startLogging(sys.stdout, setStdout=0) | |||
92 |
|
70 | |||
93 | # Now we start the reactor in a thread |
|
71 | # Now we start the reactor in a thread | |
94 | rit = ReactorInThread() |
|
72 | rit = ReactorInThread() | |
95 | rit.setDaemon(True) |
|
73 | rit.setDaemon(True) | |
96 | rit.start() No newline at end of file |
|
74 | rit.start() | |
|
75 | ||||
|
76 | ||||
|
77 | ||||
|
78 | ||||
|
79 | __all__ = [ | |||
|
80 | 'MapTask', | |||
|
81 | 'StringTask', | |||
|
82 | 'MultiEngineClient', | |||
|
83 | 'TaskClient', | |||
|
84 | 'CompositeError', | |||
|
85 | 'get_task_client', | |||
|
86 | 'get_multiengine_client', | |||
|
87 | 'Cluster' | |||
|
88 | ] |
This diff has been collapsed as it changes many lines, (815 lines changed) Show them Hide them | |||||
@@ -1,142 +1,268 b'' | |||||
|
1 | #!/usr/bin/env python | |||
1 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
2 |
|
3 | |||
3 |
""" |
|
4 | """Facilities for handling client connections to the controller.""" | |
4 |
|
5 | |||
5 | __docformat__ = "restructuredtext en" |
|
6 | #----------------------------------------------------------------------------- | |
6 |
|
7 | # Copyright (C) 2008-2009 The IPython Development Team | ||
7 | #------------------------------------------------------------------------------- |
|
|||
8 | # Copyright (C) 2008 The IPython Development Team |
|
|||
9 | # |
|
8 | # | |
10 | # Distributed under the terms of the BSD License. The full license is in |
|
9 | # Distributed under the terms of the BSD License. The full license is in | |
11 | # the file COPYING, distributed as part of this software. |
|
10 | # the file COPYING, distributed as part of this software. | |
12 |
#----------------------------------------------------------------------------- |
|
11 | #----------------------------------------------------------------------------- | |
13 |
|
12 | |||
14 |
#----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
15 | # Imports |
|
14 | # Imports | |
16 |
#----------------------------------------------------------------------------- |
|
15 | #----------------------------------------------------------------------------- | |
17 |
|
||||
18 | from twisted.internet import defer |
|
|||
19 |
|
16 | |||
20 | from IPython.kernel.fcutil import Tub, UnauthenticatedTub |
|
17 | from __future__ import with_statement | |
|
18 | import os | |||
21 |
|
19 | |||
22 | from IPython.kernel.config import config_manager as kernel_config_manager |
|
20 | from IPython.kernel.fcutil import ( | |
|
21 | Tub, | |||
|
22 | find_furl, | |||
|
23 | is_valid_furl_or_file, | |||
|
24 | validate_furl_or_file, | |||
|
25 | FURLError | |||
|
26 | ) | |||
|
27 | from IPython.kernel.clusterdir import ClusterDir, ClusterDirError | |||
|
28 | from IPython.kernel.launcher import IPClusterLauncher | |||
|
29 | from IPython.kernel.twistedutil import ( | |||
|
30 | gatherBoth, | |||
|
31 | make_deferred, | |||
|
32 | blockingCallFromThread, | |||
|
33 | sleep_deferred | |||
|
34 | ) | |||
23 | from IPython.utils.importstring import import_item |
|
35 | from IPython.utils.importstring import import_item | |
24 |
from IPython. |
|
36 | from IPython.utils.genutils import get_ipython_dir | |
25 |
|
37 | |||
26 | co = kernel_config_manager.get_config_obj() |
|
38 | from twisted.internet import defer | |
27 | client_co = co['client'] |
|
39 | from twisted.internet.defer import inlineCallbacks, returnValue | |
|
40 | from twisted.python import failure, log | |||
28 |
|
41 | |||
29 |
#----------------------------------------------------------------------------- |
|
42 | #----------------------------------------------------------------------------- | |
30 | # The ClientConnector class |
|
43 | # The ClientConnector class | |
31 |
#----------------------------------------------------------------------------- |
|
44 | #----------------------------------------------------------------------------- | |
32 |
|
45 | |||
33 | class ClientConnector(object): |
|
46 | DELAY = 0.2 | |
34 | """ |
|
47 | MAX_TRIES = 9 | |
35 | This class gets remote references from furls and returns the wrapped clients. |
|
48 | ||
36 |
|
49 | |||
37 | This class is also used in `client.py` and `asyncclient.py` to create |
|
50 | class ClientConnectorError(Exception): | |
38 | a single per client-process Tub. |
|
51 | pass | |
|
52 | ||||
|
53 | ||||
|
54 | class AsyncClientConnector(object): | |||
|
55 | """A class for getting remote references and clients from furls. | |||
|
56 | ||||
|
57 | This start a single :class:`Tub` for all remote reference and caches | |||
|
58 | references. | |||
39 | """ |
|
59 | """ | |
40 |
|
60 | |||
41 | def __init__(self): |
|
61 | def __init__(self): | |
42 | self._remote_refs = {} |
|
62 | self._remote_refs = {} | |
43 | self.tub = Tub() |
|
63 | self.tub = Tub() | |
44 | self.tub.startService() |
|
64 | self.tub.startService() | |
45 |
|
65 | |||
46 | def get_reference(self, furl_or_file): |
|
66 | def _find_furl(self, profile='default', cluster_dir=None, | |
|
67 | furl_or_file=None, furl_file_name=None, | |||
|
68 | ipython_dir=None): | |||
|
69 | """Find a FURL file by profile+ipython_dir or cluster dir. | |||
|
70 | ||||
|
71 | This raises an :exc:`~IPython.kernel.fcutil.FURLError` exception | |||
|
72 | if a FURL file can't be found. | |||
47 |
|
|
73 | """ | |
48 | Get a remote reference using a furl or a file containing a furl. |
|
74 | # Try by furl_or_file | |
49 |
|
75 | if furl_or_file is not None: | ||
|
76 | validate_furl_or_file(furl_or_file) | |||
|
77 | return furl_or_file | |||
|
78 | ||||
|
79 | if furl_file_name is None: | |||
|
80 | raise FURLError('A furl_file_name must be provided') | |||
|
81 | ||||
|
82 | # Try by cluster_dir | |||
|
83 | if cluster_dir is not None: | |||
|
84 | cluster_dir_obj = ClusterDir.find_cluster_dir(cluster_dir) | |||
|
85 | sdir = cluster_dir_obj.security_dir | |||
|
86 | furl_file = os.path.join(sdir, furl_file_name) | |||
|
87 | validate_furl_or_file(furl_file) | |||
|
88 | return furl_file | |||
|
89 | ||||
|
90 | # Try by profile | |||
|
91 | if ipython_dir is None: | |||
|
92 | ipython_dir = get_ipython_dir() | |||
|
93 | if profile is not None: | |||
|
94 | cluster_dir_obj = ClusterDir.find_cluster_dir_by_profile( | |||
|
95 | ipython_dir, profile) | |||
|
96 | sdir = cluster_dir_obj.security_dir | |||
|
97 | furl_file = os.path.join(sdir, furl_file_name) | |||
|
98 | validate_furl_or_file(furl_file) | |||
|
99 | return furl_file | |||
|
100 | ||||
|
101 | raise FURLError('Could not find a valid FURL file.') | |||
|
102 | ||||
|
103 | def get_reference(self, furl_or_file): | |||
|
104 | """Get a remote reference using a furl or a file containing a furl. | |||
|
105 | ||||
50 | Remote references are cached locally so once a remote reference |
|
106 | Remote references are cached locally so once a remote reference | |
51 | has been retrieved for a given furl, the cached version is |
|
107 | has been retrieved for a given furl, the cached version is | |
52 | returned. |
|
108 | returned. | |
53 |
|
109 | |||
54 |
|
|
110 | Parameters | |
55 | furl_or_file : str |
|
111 | ---------- | |
56 | A furl or a filename containing a furl |
|
112 | furl_or_file : str | |
57 |
|
113 | A furl or a filename containing a furl. This should already be | ||
58 | :Returns: |
|
114 | validated, but might not yet exist. | |
59 | A deferred to a remote reference |
|
115 | ||
|
116 | Returns | |||
|
117 | ------- | |||
|
118 | A deferred to a remote reference | |||
60 | """ |
|
119 | """ | |
61 |
furl = |
|
120 | furl = furl_or_file | |
62 | if furl in self._remote_refs: |
|
121 | if furl in self._remote_refs: | |
63 | d = defer.succeed(self._remote_refs[furl]) |
|
122 | d = defer.succeed(self._remote_refs[furl]) | |
64 | else: |
|
123 | else: | |
65 | d = self.tub.getReference(furl) |
|
124 | d = self.tub.getReference(furl) | |
66 | d.addCallback(self.save_ref, furl) |
|
125 | d.addCallback(self._save_ref, furl) | |
67 | return d |
|
126 | return d | |
68 |
|
127 | |||
69 | def save_ref(self, ref, furl): |
|
128 | def _save_ref(self, ref, furl): | |
70 | """ |
|
129 | """Cache a remote reference by its furl.""" | |
71 | Cache a remote reference by its furl. |
|
|||
72 | """ |
|
|||
73 | self._remote_refs[furl] = ref |
|
130 | self._remote_refs[furl] = ref | |
74 | return ref |
|
131 | return ref | |
75 |
|
132 | |||
76 |
def get_task_client(self, |
|
133 | def get_task_client(self, profile='default', cluster_dir=None, | |
77 | """ |
|
134 | furl_or_file=None, ipython_dir=None, | |
78 | Get the task controller client. |
|
135 | delay=DELAY, max_tries=MAX_TRIES): | |
|
136 | """Get the task controller client. | |||
79 |
|
137 | |||
80 |
This method is a simple wrapper around `get_client` that |
|
138 | This method is a simple wrapper around `get_client` that passes in | |
81 | `furl_or_file` to be empty, in which case, the furls is taken |
|
139 | the default name of the task client FURL file. Usually only | |
82 | from the default furl file given in the configuration. |
|
140 | the ``profile`` option will be needed. If a FURL file can't be | |
|
141 | found by its profile, use ``cluster_dir`` or ``furl_or_file``. | |||
83 |
|
142 | |||
84 |
|
|
143 | Parameters | |
85 | furl_or_file : str |
|
144 | ---------- | |
86 | A furl or a filename containing a furl. If empty, the |
|
145 | profile : str | |
87 | default furl_file will be used |
|
146 | The name of a cluster directory profile (default="default"). The | |
88 |
|
147 | cluster directory "cluster_<profile>" will be searched for | ||
89 | :Returns: |
|
148 | in ``os.getcwd()``, the ipython_dir and then in the directories | |
90 | A deferred to the actual client class |
|
149 | listed in the :env:`IPCLUSTER_DIR_PATH` environment variable. | |
91 | """ |
|
150 | cluster_dir : str | |
92 | task_co = client_co['client_interfaces']['task'] |
|
151 | The full path to a cluster directory. This is useful if profiles | |
93 | if furl_or_file: |
|
152 | are not being used. | |
94 |
|
|
153 | furl_or_file : str | |
95 | else: |
|
154 | A furl or a filename containing a FURLK. This is useful if you | |
96 | ff = task_co['furl_file'] |
|
155 | simply know the location of the FURL file. | |
97 | return self.get_client(ff) |
|
156 | ipython_dir : str | |
|
157 | The location of the ipython_dir if different from the default. | |||
|
158 | This is used if the cluster directory is being found by profile. | |||
|
159 | delay : float | |||
|
160 | The initial delay between re-connection attempts. Susequent delays | |||
|
161 | get longer according to ``delay[i] = 1.5*delay[i-1]``. | |||
|
162 | max_tries : int | |||
|
163 | The max number of re-connection attempts. | |||
98 |
|
|
164 | ||
99 | def get_multiengine_client(self, furl_or_file=''): |
|
165 | Returns | |
|
166 | ------- | |||
|
167 | A deferred to the actual client class. | |||
100 |
|
|
168 | """ | |
101 | Get the multiengine controller client. |
|
169 | return self.get_client( | |
|
170 | profile, cluster_dir, furl_or_file, | |||
|
171 | 'ipcontroller-tc.furl', ipython_dir, | |||
|
172 | delay, max_tries | |||
|
173 | ) | |||
|
174 | ||||
|
175 | def get_multiengine_client(self, profile='default', cluster_dir=None, | |||
|
176 | furl_or_file=None, ipython_dir=None, | |||
|
177 | delay=DELAY, max_tries=MAX_TRIES): | |||
|
178 | """Get the multiengine controller client. | |||
102 |
|
179 | |||
103 |
This method is a simple wrapper around `get_client` that |
|
180 | This method is a simple wrapper around `get_client` that passes in | |
104 | `furl_or_file` to be empty, in which case, the furls is taken |
|
181 | the default name of the task client FURL file. Usually only | |
105 | from the default furl file given in the configuration. |
|
182 | the ``profile`` option will be needed. If a FURL file can't be | |
|
183 | found by its profile, use ``cluster_dir`` or ``furl_or_file``. | |||
106 |
|
184 | |||
107 |
|
|
185 | Parameters | |
108 | furl_or_file : str |
|
186 | ---------- | |
109 | A furl or a filename containing a furl. If empty, the |
|
187 | profile : str | |
110 | default furl_file will be used |
|
188 | The name of a cluster directory profile (default="default"). The | |
111 |
|
189 | cluster directory "cluster_<profile>" will be searched for | ||
112 | :Returns: |
|
190 | in ``os.getcwd()``, the ipython_dir and then in the directories | |
113 | A deferred to the actual client class |
|
191 | listed in the :env:`IPCLUSTER_DIR_PATH` environment variable. | |
|
192 | cluster_dir : str | |||
|
193 | The full path to a cluster directory. This is useful if profiles | |||
|
194 | are not being used. | |||
|
195 | furl_or_file : str | |||
|
196 | A furl or a filename containing a FURLK. This is useful if you | |||
|
197 | simply know the location of the FURL file. | |||
|
198 | ipython_dir : str | |||
|
199 | The location of the ipython_dir if different from the default. | |||
|
200 | This is used if the cluster directory is being found by profile. | |||
|
201 | delay : float | |||
|
202 | The initial delay between re-connection attempts. Susequent delays | |||
|
203 | get longer according to ``delay[i] = 1.5*delay[i-1]``. | |||
|
204 | max_tries : int | |||
|
205 | The max number of re-connection attempts. | |||
|
206 | ||||
|
207 | Returns | |||
|
208 | ------- | |||
|
209 | A deferred to the actual client class. | |||
114 | """ |
|
210 | """ | |
115 | task_co = client_co['client_interfaces']['multiengine'] |
|
211 | return self.get_client( | |
116 |
|
|
212 | profile, cluster_dir, furl_or_file, | |
117 | ff = furl_or_file |
|
213 | 'ipcontroller-mec.furl', ipython_dir, | |
118 | else: |
|
214 | delay, max_tries | |
119 | ff = task_co['furl_file'] |
|
215 | ) | |
120 | return self.get_client(ff) |
|
|||
121 |
|
216 | |||
122 |
def get_client(self, |
|
217 | def get_client(self, profile='default', cluster_dir=None, | |
123 | """ |
|
218 | furl_or_file=None, furl_file_name=None, ipython_dir=None, | |
124 | Get a remote reference and wrap it in a client by furl. |
|
219 | delay=DELAY, max_tries=MAX_TRIES): | |
125 |
|
220 | """Get a remote reference and wrap it in a client by furl. | ||
126 | This method first gets a remote reference and then calls its |
|
221 | ||
127 | `get_client_name` method to find the apprpriate client class |
|
222 | This method is a simple wrapper around `get_client` that passes in | |
128 | that should be used to wrap the remote reference. |
|
223 | the default name of the task client FURL file. Usually only | |
129 |
|
224 | the ``profile`` option will be needed. If a FURL file can't be | ||
130 | :Parameters: |
|
225 | found by its profile, use ``cluster_dir`` or ``furl_or_file``. | |
131 | furl_or_file : str |
|
|||
132 | A furl or a filename containing a furl |
|
|||
133 |
|
226 | |||
134 |
|
|
227 | Parameters | |
135 | A deferred to the actual client class |
|
228 | ---------- | |
|
229 | profile : str | |||
|
230 | The name of a cluster directory profile (default="default"). The | |||
|
231 | cluster directory "cluster_<profile>" will be searched for | |||
|
232 | in ``os.getcwd()``, the ipython_dir and then in the directories | |||
|
233 | listed in the :env:`IPCLUSTER_DIR_PATH` environment variable. | |||
|
234 | cluster_dir : str | |||
|
235 | The full path to a cluster directory. This is useful if profiles | |||
|
236 | are not being used. | |||
|
237 | furl_or_file : str | |||
|
238 | A furl or a filename containing a FURL. This is useful if you | |||
|
239 | simply know the location of the FURL file. | |||
|
240 | furl_file_name : str | |||
|
241 | The filename (not the full path) of the FURL. This must be | |||
|
242 | provided if ``furl_or_file`` is not. | |||
|
243 | ipython_dir : str | |||
|
244 | The location of the ipython_dir if different from the default. | |||
|
245 | This is used if the cluster directory is being found by profile. | |||
|
246 | delay : float | |||
|
247 | The initial delay between re-connection attempts. Susequent delays | |||
|
248 | get longer according to ``delay[i] = 1.5*delay[i-1]``. | |||
|
249 | max_tries : int | |||
|
250 | The max number of re-connection attempts. | |||
|
251 | ||||
|
252 | Returns | |||
|
253 | ------- | |||
|
254 | A deferred to the actual client class. Or a failure to a | |||
|
255 | :exc:`FURLError`. | |||
136 | """ |
|
256 | """ | |
137 | furl = find_furl(furl_or_file) |
|
257 | try: | |
138 |
|
|
258 | furl_file = self._find_furl( | |
139 | def wrap_remote_reference(rr): |
|
259 | profile, cluster_dir, furl_or_file, | |
|
260 | furl_file_name, ipython_dir | |||
|
261 | ) | |||
|
262 | except FURLError: | |||
|
263 | return defer.fail(failure.Failure()) | |||
|
264 | ||||
|
265 | def _wrap_remote_reference(rr): | |||
140 | d = rr.callRemote('get_client_name') |
|
266 | d = rr.callRemote('get_client_name') | |
141 | d.addCallback(lambda name: import_item(name)) |
|
267 | d.addCallback(lambda name: import_item(name)) | |
142 | def adapt(client_interface): |
|
268 | def adapt(client_interface): | |
@@ -146,5 +272,502 b' class ClientConnector(object):' | |||||
146 | d.addCallback(adapt) |
|
272 | d.addCallback(adapt) | |
147 |
|
273 | |||
148 | return d |
|
274 | return d | |
149 | d.addCallback(wrap_remote_reference) |
|
275 | ||
|
276 | d = self._try_to_connect(furl_file, delay, max_tries, attempt=0) | |||
|
277 | d.addCallback(_wrap_remote_reference) | |||
|
278 | d.addErrback(self._handle_error, furl_file) | |||
|
279 | return d | |||
|
280 | ||||
|
281 | def _handle_error(self, f, furl_file): | |||
|
282 | raise ClientConnectorError('Could not connect to the controller ' | |||
|
283 | 'using the FURL file. This usually means that i) the controller ' | |||
|
284 | 'was not started or ii) a firewall was blocking the client from ' | |||
|
285 | 'connecting to the controller: %s' % furl_file) | |||
|
286 | ||||
|
287 | @inlineCallbacks | |||
|
288 | def _try_to_connect(self, furl_or_file, delay, max_tries, attempt): | |||
|
289 | """Try to connect to the controller with retry logic.""" | |||
|
290 | if attempt < max_tries: | |||
|
291 | log.msg("Connecting [%r]" % attempt) | |||
|
292 | try: | |||
|
293 | self.furl = find_furl(furl_or_file) | |||
|
294 | # Uncomment this to see the FURL being tried. | |||
|
295 | # log.msg("FURL: %s" % self.furl) | |||
|
296 | rr = yield self.get_reference(self.furl) | |||
|
297 | log.msg("Connected: %s" % furl_or_file) | |||
|
298 | except: | |||
|
299 | if attempt==max_tries-1: | |||
|
300 | # This will propagate the exception all the way to the top | |||
|
301 | # where it can be handled. | |||
|
302 | raise | |||
|
303 | else: | |||
|
304 | yield sleep_deferred(delay) | |||
|
305 | rr = yield self._try_to_connect( | |||
|
306 | furl_or_file, 1.5*delay, max_tries, attempt+1 | |||
|
307 | ) | |||
|
308 | returnValue(rr) | |||
|
309 | else: | |||
|
310 | returnValue(rr) | |||
|
311 | else: | |||
|
312 | raise ClientConnectorError( | |||
|
313 | 'Could not connect to controller, max_tries (%r) exceeded. ' | |||
|
314 | 'This usually means that i) the controller was not started, ' | |||
|
315 | 'or ii) a firewall was blocking the client from connecting ' | |||
|
316 | 'to the controller.' % max_tries | |||
|
317 | ) | |||
|
318 | ||||
|
319 | ||||
|
320 | class ClientConnector(object): | |||
|
321 | """A blocking version of a client connector. | |||
|
322 | ||||
|
323 | This class creates a single :class:`Tub` instance and allows remote | |||
|
324 | references and client to be retrieved by their FURLs. Remote references | |||
|
325 | are cached locally and FURL files can be found using profiles and cluster | |||
|
326 | directories. | |||
|
327 | """ | |||
|
328 | ||||
|
329 | def __init__(self): | |||
|
330 | self.async_cc = AsyncClientConnector() | |||
|
331 | ||||
|
332 | def get_task_client(self, profile='default', cluster_dir=None, | |||
|
333 | furl_or_file=None, ipython_dir=None, | |||
|
334 | delay=DELAY, max_tries=MAX_TRIES): | |||
|
335 | """Get the task client. | |||
|
336 | ||||
|
337 | Usually only the ``profile`` option will be needed. If a FURL file | |||
|
338 | can't be found by its profile, use ``cluster_dir`` or | |||
|
339 | ``furl_or_file``. | |||
|
340 | ||||
|
341 | Parameters | |||
|
342 | ---------- | |||
|
343 | profile : str | |||
|
344 | The name of a cluster directory profile (default="default"). The | |||
|
345 | cluster directory "cluster_<profile>" will be searched for | |||
|
346 | in ``os.getcwd()``, the ipython_dir and then in the directories | |||
|
347 | listed in the :env:`IPCLUSTER_DIR_PATH` environment variable. | |||
|
348 | cluster_dir : str | |||
|
349 | The full path to a cluster directory. This is useful if profiles | |||
|
350 | are not being used. | |||
|
351 | furl_or_file : str | |||
|
352 | A furl or a filename containing a FURLK. This is useful if you | |||
|
353 | simply know the location of the FURL file. | |||
|
354 | ipython_dir : str | |||
|
355 | The location of the ipython_dir if different from the default. | |||
|
356 | This is used if the cluster directory is being found by profile. | |||
|
357 | delay : float | |||
|
358 | The initial delay between re-connection attempts. Susequent delays | |||
|
359 | get longer according to ``delay[i] = 1.5*delay[i-1]``. | |||
|
360 | max_tries : int | |||
|
361 | The max number of re-connection attempts. | |||
|
362 | ||||
|
363 | Returns | |||
|
364 | ------- | |||
|
365 | The task client instance. | |||
|
366 | """ | |||
|
367 | client = blockingCallFromThread( | |||
|
368 | self.async_cc.get_task_client, profile, cluster_dir, | |||
|
369 | furl_or_file, ipython_dir, delay, max_tries | |||
|
370 | ) | |||
|
371 | return client.adapt_to_blocking_client() | |||
|
372 | ||||
|
373 | def get_multiengine_client(self, profile='default', cluster_dir=None, | |||
|
374 | furl_or_file=None, ipython_dir=None, | |||
|
375 | delay=DELAY, max_tries=MAX_TRIES): | |||
|
376 | """Get the multiengine client. | |||
|
377 | ||||
|
378 | Usually only the ``profile`` option will be needed. If a FURL file | |||
|
379 | can't be found by its profile, use ``cluster_dir`` or | |||
|
380 | ``furl_or_file``. | |||
|
381 | ||||
|
382 | Parameters | |||
|
383 | ---------- | |||
|
384 | profile : str | |||
|
385 | The name of a cluster directory profile (default="default"). The | |||
|
386 | cluster directory "cluster_<profile>" will be searched for | |||
|
387 | in ``os.getcwd()``, the ipython_dir and then in the directories | |||
|
388 | listed in the :env:`IPCLUSTER_DIR_PATH` environment variable. | |||
|
389 | cluster_dir : str | |||
|
390 | The full path to a cluster directory. This is useful if profiles | |||
|
391 | are not being used. | |||
|
392 | furl_or_file : str | |||
|
393 | A furl or a filename containing a FURLK. This is useful if you | |||
|
394 | simply know the location of the FURL file. | |||
|
395 | ipython_dir : str | |||
|
396 | The location of the ipython_dir if different from the default. | |||
|
397 | This is used if the cluster directory is being found by profile. | |||
|
398 | delay : float | |||
|
399 | The initial delay between re-connection attempts. Susequent delays | |||
|
400 | get longer according to ``delay[i] = 1.5*delay[i-1]``. | |||
|
401 | max_tries : int | |||
|
402 | The max number of re-connection attempts. | |||
|
403 | ||||
|
404 | Returns | |||
|
405 | ------- | |||
|
406 | The multiengine client instance. | |||
|
407 | """ | |||
|
408 | client = blockingCallFromThread( | |||
|
409 | self.async_cc.get_multiengine_client, profile, cluster_dir, | |||
|
410 | furl_or_file, ipython_dir, delay, max_tries | |||
|
411 | ) | |||
|
412 | return client.adapt_to_blocking_client() | |||
|
413 | ||||
|
414 | def get_client(self, profile='default', cluster_dir=None, | |||
|
415 | furl_or_file=None, ipython_dir=None, | |||
|
416 | delay=DELAY, max_tries=MAX_TRIES): | |||
|
417 | client = blockingCallFromThread( | |||
|
418 | self.async_cc.get_client, profile, cluster_dir, | |||
|
419 | furl_or_file, ipython_dir, | |||
|
420 | delay, max_tries | |||
|
421 | ) | |||
|
422 | return client.adapt_to_blocking_client() | |||
|
423 | ||||
|
424 | ||||
|
425 | class ClusterStateError(Exception): | |||
|
426 | pass | |||
|
427 | ||||
|
428 | ||||
|
429 | class AsyncCluster(object): | |||
|
430 | """An class that wraps the :command:`ipcluster` script.""" | |||
|
431 | ||||
|
432 | def __init__(self, profile='default', cluster_dir=None, ipython_dir=None, | |||
|
433 | auto_create=False, auto_stop=True): | |||
|
434 | """Create a class to manage an IPython cluster. | |||
|
435 | ||||
|
436 | This class calls the :command:`ipcluster` command with the right | |||
|
437 | options to start an IPython cluster. Typically a cluster directory | |||
|
438 | must be created (:command:`ipcluster create`) and configured before | |||
|
439 | using this class. Configuration is done by editing the | |||
|
440 | configuration files in the top level of the cluster directory. | |||
|
441 | ||||
|
442 | Parameters | |||
|
443 | ---------- | |||
|
444 | profile : str | |||
|
445 | The name of a cluster directory profile (default="default"). The | |||
|
446 | cluster directory "cluster_<profile>" will be searched for | |||
|
447 | in ``os.getcwd()``, the ipython_dir and then in the directories | |||
|
448 | listed in the :env:`IPCLUSTER_DIR_PATH` environment variable. | |||
|
449 | cluster_dir : str | |||
|
450 | The full path to a cluster directory. This is useful if profiles | |||
|
451 | are not being used. | |||
|
452 | ipython_dir : str | |||
|
453 | The location of the ipython_dir if different from the default. | |||
|
454 | This is used if the cluster directory is being found by profile. | |||
|
455 | auto_create : bool | |||
|
456 | Automatically create the cluster directory it is dones't exist. | |||
|
457 | This will usually only make sense if using a local cluster | |||
|
458 | (default=False). | |||
|
459 | auto_stop : bool | |||
|
460 | Automatically stop the cluster when this instance is garbage | |||
|
461 | collected (default=True). This is useful if you want the cluster | |||
|
462 | to live beyond your current process. There is also an instance | |||
|
463 | attribute ``auto_stop`` to change this behavior. | |||
|
464 | """ | |||
|
465 | self._setup_cluster_dir(profile, cluster_dir, ipython_dir, auto_create) | |||
|
466 | self.state = 'before' | |||
|
467 | self.launcher = None | |||
|
468 | self.client_connector = None | |||
|
469 | self.auto_stop = auto_stop | |||
|
470 | ||||
|
471 | def __del__(self): | |||
|
472 | if self.auto_stop and self.state=='running': | |||
|
473 | print "Auto stopping the cluster..." | |||
|
474 | self.stop() | |||
|
475 | ||||
|
476 | @property | |||
|
477 | def location(self): | |||
|
478 | if hasattr(self, 'cluster_dir_obj'): | |||
|
479 | return self.cluster_dir_obj.location | |||
|
480 | else: | |||
|
481 | return '' | |||
|
482 | ||||
|
483 | @property | |||
|
484 | def running(self): | |||
|
485 | if self.state=='running': | |||
|
486 | return True | |||
|
487 | else: | |||
|
488 | return False | |||
|
489 | ||||
|
490 | def _setup_cluster_dir(self, profile, cluster_dir, ipython_dir, auto_create): | |||
|
491 | if ipython_dir is None: | |||
|
492 | ipython_dir = get_ipython_dir() | |||
|
493 | if cluster_dir is not None: | |||
|
494 | try: | |||
|
495 | self.cluster_dir_obj = ClusterDir.find_cluster_dir(cluster_dir) | |||
|
496 | except ClusterDirError: | |||
|
497 | pass | |||
|
498 | if profile is not None: | |||
|
499 | try: | |||
|
500 | self.cluster_dir_obj = ClusterDir.find_cluster_dir_by_profile( | |||
|
501 | ipython_dir, profile) | |||
|
502 | except ClusterDirError: | |||
|
503 | pass | |||
|
504 | if auto_create or profile=='default': | |||
|
505 | # This should call 'ipcluster create --profile default | |||
|
506 | self.cluster_dir_obj = ClusterDir.create_cluster_dir_by_profile( | |||
|
507 | ipython_dir, profile) | |||
|
508 | else: | |||
|
509 | raise ClusterDirError('Cluster dir not found.') | |||
|
510 | ||||
|
511 | @make_deferred | |||
|
512 | def start(self, n=2): | |||
|
513 | """Start the IPython cluster with n engines. | |||
|
514 | ||||
|
515 | Parameters | |||
|
516 | ---------- | |||
|
517 | n : int | |||
|
518 | The number of engine to start. | |||
|
519 | """ | |||
|
520 | # We might want to add logic to test if the cluster has started | |||
|
521 | # by another process.... | |||
|
522 | if not self.state=='running': | |||
|
523 | self.launcher = IPClusterLauncher(os.getcwd()) | |||
|
524 | self.launcher.ipcluster_n = n | |||
|
525 | self.launcher.ipcluster_subcommand = 'start' | |||
|
526 | d = self.launcher.start() | |||
|
527 | d.addCallback(self._handle_start) | |||
|
528 | return d | |||
|
529 | else: | |||
|
530 | raise ClusterStateError('Cluster is already running') | |||
|
531 | ||||
|
532 | @make_deferred | |||
|
533 | def stop(self): | |||
|
534 | """Stop the IPython cluster if it is running.""" | |||
|
535 | if self.state=='running': | |||
|
536 | d1 = self.launcher.observe_stop() | |||
|
537 | d1.addCallback(self._handle_stop) | |||
|
538 | d2 = self.launcher.stop() | |||
|
539 | return gatherBoth([d1, d2], consumeErrors=True) | |||
|
540 | else: | |||
|
541 | raise ClusterStateError("Cluster not running") | |||
|
542 | ||||
|
543 | def get_multiengine_client(self, delay=DELAY, max_tries=MAX_TRIES): | |||
|
544 | """Get the multiengine client for the running cluster. | |||
|
545 | ||||
|
546 | If this fails, it means that the cluster has not finished starting. | |||
|
547 | Usually waiting a few seconds are re-trying will solve this. | |||
|
548 | """ | |||
|
549 | if self.client_connector is None: | |||
|
550 | self.client_connector = AsyncClientConnector() | |||
|
551 | return self.client_connector.get_multiengine_client( | |||
|
552 | cluster_dir=self.cluster_dir_obj.location, | |||
|
553 | delay=delay, max_tries=max_tries | |||
|
554 | ) | |||
|
555 | ||||
|
556 | def get_task_client(self, delay=DELAY, max_tries=MAX_TRIES): | |||
|
557 | """Get the task client for the running cluster. | |||
|
558 | ||||
|
559 | If this fails, it means that the cluster has not finished starting. | |||
|
560 | Usually waiting a few seconds are re-trying will solve this. | |||
|
561 | """ | |||
|
562 | if self.client_connector is None: | |||
|
563 | self.client_connector = AsyncClientConnector() | |||
|
564 | return self.client_connector.get_task_client( | |||
|
565 | cluster_dir=self.cluster_dir_obj.location, | |||
|
566 | delay=delay, max_tries=max_tries | |||
|
567 | ) | |||
|
568 | ||||
|
569 | def get_ipengine_logs(self): | |||
|
570 | return self.get_logs_by_name('ipengine') | |||
|
571 | ||||
|
572 | def get_ipcontroller_logs(self): | |||
|
573 | return self.get_logs_by_name('ipcontroller') | |||
|
574 | ||||
|
575 | def get_ipcluster_logs(self): | |||
|
576 | return self.get_logs_by_name('ipcluster') | |||
|
577 | ||||
|
578 | def get_logs_by_name(self, name='ipcluster'): | |||
|
579 | log_dir = self.cluster_dir_obj.log_dir | |||
|
580 | logs = {} | |||
|
581 | for log in os.listdir(log_dir): | |||
|
582 | if log.startswith(name + '-') and log.endswith('.log'): | |||
|
583 | with open(os.path.join(log_dir, log), 'r') as f: | |||
|
584 | logs[log] = f.read() | |||
|
585 | return logs | |||
|
586 | ||||
|
587 | def get_logs(self): | |||
|
588 | d = self.get_ipcluster_logs() | |||
|
589 | d.update(self.get_ipengine_logs()) | |||
|
590 | d.update(self.get_ipcontroller_logs()) | |||
150 | return d |
|
591 | return d | |
|
592 | ||||
|
593 | def _handle_start(self, r): | |||
|
594 | self.state = 'running' | |||
|
595 | ||||
|
596 | def _handle_stop(self, r): | |||
|
597 | self.state = 'after' | |||
|
598 | ||||
|
599 | ||||
|
600 | class Cluster(object): | |||
|
601 | ||||
|
602 | ||||
|
603 | def __init__(self, profile='default', cluster_dir=None, ipython_dir=None, | |||
|
604 | auto_create=False, auto_stop=True): | |||
|
605 | """Create a class to manage an IPython cluster. | |||
|
606 | ||||
|
607 | This class calls the :command:`ipcluster` command with the right | |||
|
608 | options to start an IPython cluster. Typically a cluster directory | |||
|
609 | must be created (:command:`ipcluster create`) and configured before | |||
|
610 | using this class. Configuration is done by editing the | |||
|
611 | configuration files in the top level of the cluster directory. | |||
|
612 | ||||
|
613 | Parameters | |||
|
614 | ---------- | |||
|
615 | profile : str | |||
|
616 | The name of a cluster directory profile (default="default"). The | |||
|
617 | cluster directory "cluster_<profile>" will be searched for | |||
|
618 | in ``os.getcwd()``, the ipython_dir and then in the directories | |||
|
619 | listed in the :env:`IPCLUSTER_DIR_PATH` environment variable. | |||
|
620 | cluster_dir : str | |||
|
621 | The full path to a cluster directory. This is useful if profiles | |||
|
622 | are not being used. | |||
|
623 | ipython_dir : str | |||
|
624 | The location of the ipython_dir if different from the default. | |||
|
625 | This is used if the cluster directory is being found by profile. | |||
|
626 | auto_create : bool | |||
|
627 | Automatically create the cluster directory it is dones't exist. | |||
|
628 | This will usually only make sense if using a local cluster | |||
|
629 | (default=False). | |||
|
630 | auto_stop : bool | |||
|
631 | Automatically stop the cluster when this instance is garbage | |||
|
632 | collected (default=True). This is useful if you want the cluster | |||
|
633 | to live beyond your current process. There is also an instance | |||
|
634 | attribute ``auto_stop`` to change this behavior. | |||
|
635 | """ | |||
|
636 | self.async_cluster = AsyncCluster( | |||
|
637 | profile, cluster_dir, ipython_dir, auto_create, auto_stop | |||
|
638 | ) | |||
|
639 | self.cluster_dir_obj = self.async_cluster.cluster_dir_obj | |||
|
640 | self.client_connector = None | |||
|
641 | ||||
|
642 | def _set_auto_stop(self, value): | |||
|
643 | self.async_cluster.auto_stop = value | |||
|
644 | ||||
|
645 | def _get_auto_stop(self): | |||
|
646 | return self.async_cluster.auto_stop | |||
|
647 | ||||
|
648 | auto_stop = property(_get_auto_stop, _set_auto_stop) | |||
|
649 | ||||
|
650 | @property | |||
|
651 | def location(self): | |||
|
652 | return self.async_cluster.location | |||
|
653 | ||||
|
654 | @property | |||
|
655 | def running(self): | |||
|
656 | return self.async_cluster.running | |||
|
657 | ||||
|
658 | def start(self, n=2): | |||
|
659 | """Start the IPython cluster with n engines. | |||
|
660 | ||||
|
661 | Parameters | |||
|
662 | ---------- | |||
|
663 | n : int | |||
|
664 | The number of engine to start. | |||
|
665 | """ | |||
|
666 | return blockingCallFromThread(self.async_cluster.start, n) | |||
|
667 | ||||
|
668 | def stop(self): | |||
|
669 | """Stop the IPython cluster if it is running.""" | |||
|
670 | return blockingCallFromThread(self.async_cluster.stop) | |||
|
671 | ||||
|
672 | def get_multiengine_client(self, delay=DELAY, max_tries=MAX_TRIES): | |||
|
673 | """Get the multiengine client for the running cluster. | |||
|
674 | ||||
|
675 | This will try to attempt to the controller multiple times. If this | |||
|
676 | fails altogether, try looking at the following: | |||
|
677 | * Make sure the controller is starting properly by looking at its | |||
|
678 | log files. | |||
|
679 | * Make sure the controller is writing its FURL file in the location | |||
|
680 | expected by the client. | |||
|
681 | * Make sure a firewall on the controller's host is not blocking the | |||
|
682 | client from connecting. | |||
|
683 | ||||
|
684 | Parameters | |||
|
685 | ---------- | |||
|
686 | delay : float | |||
|
687 | The initial delay between re-connection attempts. Susequent delays | |||
|
688 | get longer according to ``delay[i] = 1.5*delay[i-1]``. | |||
|
689 | max_tries : int | |||
|
690 | The max number of re-connection attempts. | |||
|
691 | """ | |||
|
692 | if self.client_connector is None: | |||
|
693 | self.client_connector = ClientConnector() | |||
|
694 | return self.client_connector.get_multiengine_client( | |||
|
695 | cluster_dir=self.cluster_dir_obj.location, | |||
|
696 | delay=delay, max_tries=max_tries | |||
|
697 | ) | |||
|
698 | ||||
|
699 | def get_task_client(self, delay=DELAY, max_tries=MAX_TRIES): | |||
|
700 | """Get the task client for the running cluster. | |||
|
701 | ||||
|
702 | This will try to attempt to the controller multiple times. If this | |||
|
703 | fails altogether, try looking at the following: | |||
|
704 | * Make sure the controller is starting properly by looking at its | |||
|
705 | log files. | |||
|
706 | * Make sure the controller is writing its FURL file in the location | |||
|
707 | expected by the client. | |||
|
708 | * Make sure a firewall on the controller's host is not blocking the | |||
|
709 | client from connecting. | |||
|
710 | ||||
|
711 | Parameters | |||
|
712 | ---------- | |||
|
713 | delay : float | |||
|
714 | The initial delay between re-connection attempts. Susequent delays | |||
|
715 | get longer according to ``delay[i] = 1.5*delay[i-1]``. | |||
|
716 | max_tries : int | |||
|
717 | The max number of re-connection attempts. | |||
|
718 | """ | |||
|
719 | if self.client_connector is None: | |||
|
720 | self.client_connector = ClientConnector() | |||
|
721 | return self.client_connector.get_task_client( | |||
|
722 | cluster_dir=self.cluster_dir_obj.location, | |||
|
723 | delay=delay, max_tries=max_tries | |||
|
724 | ) | |||
|
725 | ||||
|
726 | def __repr__(self): | |||
|
727 | s = "<Cluster(running=%r, location=%s)" % (self.running, self.location) | |||
|
728 | return s | |||
|
729 | ||||
|
730 | def get_logs_by_name(self, name='ipcluter'): | |||
|
731 | """Get a dict of logs by process name (ipcluster, ipengine, etc.)""" | |||
|
732 | return self.async_cluster.get_logs_by_name(name) | |||
|
733 | ||||
|
734 | def get_ipengine_logs(self): | |||
|
735 | """Get a dict of logs for all engines in this cluster.""" | |||
|
736 | return self.async_cluster.get_ipengine_logs() | |||
|
737 | ||||
|
738 | def get_ipcontroller_logs(self): | |||
|
739 | """Get a dict of logs for the controller in this cluster.""" | |||
|
740 | return self.async_cluster.get_ipcontroller_logs() | |||
|
741 | ||||
|
742 | def get_ipcluster_logs(self): | |||
|
743 | """Get a dict of the ipcluster logs for this cluster.""" | |||
|
744 | return self.async_cluster.get_ipcluster_logs() | |||
|
745 | ||||
|
746 | def get_logs(self): | |||
|
747 | """Get a dict of all logs for this cluster.""" | |||
|
748 | return self.async_cluster.get_logs() | |||
|
749 | ||||
|
750 | def _print_logs(self, logs): | |||
|
751 | for k, v in logs.iteritems(): | |||
|
752 | print "===================================" | |||
|
753 | print "Logfile: %s" % k | |||
|
754 | print "===================================" | |||
|
755 | print v | |||
|
756 | ||||
|
757 | ||||
|
758 | def print_ipengine_logs(self): | |||
|
759 | """Print the ipengine logs for this cluster to stdout.""" | |||
|
760 | self._print_logs(self.get_ipengine_logs()) | |||
|
761 | ||||
|
762 | def print_ipcontroller_logs(self): | |||
|
763 | """Print the ipcontroller logs for this cluster to stdout.""" | |||
|
764 | self._print_logs(self.get_ipcontroller_logs()) | |||
|
765 | ||||
|
766 | def print_ipcluster_logs(self): | |||
|
767 | """Print the ipcluster logs for this cluster to stdout.""" | |||
|
768 | self._print_logs(self.get_ipcluster_logs()) | |||
|
769 | ||||
|
770 | def print_logs(self): | |||
|
771 | """Print all the logs for this cluster to stdout.""" | |||
|
772 | self._print_logs(self.get_logs()) | |||
|
773 |
@@ -1,32 +1,38 b'' | |||||
|
1 | #!/usr/bin/env python | |||
1 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
2 |
|
3 | |||
3 | """A class that manages the engines connection to the controller.""" |
|
4 | """A class that manages the engines connection to the controller.""" | |
4 |
|
5 | |||
5 | __docformat__ = "restructuredtext en" |
|
6 | #----------------------------------------------------------------------------- | |
6 |
|
7 | # Copyright (C) 2008-2009 The IPython Development Team | ||
7 | #------------------------------------------------------------------------------- |
|
|||
8 | # Copyright (C) 2008 The IPython Development Team |
|
|||
9 | # |
|
8 | # | |
10 | # Distributed under the terms of the BSD License. The full license is in |
|
9 | # Distributed under the terms of the BSD License. The full license is in | |
11 | # the file COPYING, distributed as part of this software. |
|
10 | # the file COPYING, distributed as part of this software. | |
12 |
#----------------------------------------------------------------------------- |
|
11 | #----------------------------------------------------------------------------- | |
13 |
|
12 | |||
14 |
#----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
15 | # Imports |
|
14 | # Imports | |
16 |
#----------------------------------------------------------------------------- |
|
15 | #----------------------------------------------------------------------------- | |
17 |
|
16 | |||
18 | import os |
|
17 | import os | |
19 | import cPickle as pickle |
|
18 | import cPickle as pickle | |
20 |
|
19 | |||
21 | from twisted.python import log, failure |
|
20 | from twisted.python import log, failure | |
22 | from twisted.internet import defer |
|
21 | from twisted.internet import defer | |
|
22 | from twisted.internet.defer import inlineCallbacks, returnValue | |||
23 |
|
23 | |||
24 | from IPython.kernel.fcutil import find_furl |
|
24 | from IPython.kernel.fcutil import find_furl, validate_furl_or_file | |
25 | from IPython.kernel.enginefc import IFCEngine |
|
25 | from IPython.kernel.enginefc import IFCEngine | |
|
26 | from IPython.kernel.twistedutil import sleep_deferred, make_deferred | |||
26 |
|
27 | |||
27 |
#----------------------------------------------------------------------------- |
|
28 | #----------------------------------------------------------------------------- | |
28 | # The ClientConnector class |
|
29 | # The ClientConnector class | |
29 |
#----------------------------------------------------------------------------- |
|
30 | #----------------------------------------------------------------------------- | |
|
31 | ||||
|
32 | ||||
|
33 | class EngineConnectorError(Exception): | |||
|
34 | pass | |||
|
35 | ||||
30 |
|
36 | |||
31 | class EngineConnector(object): |
|
37 | class EngineConnector(object): | |
32 | """Manage an engines connection to a controller. |
|
38 | """Manage an engines connection to a controller. | |
@@ -38,8 +44,10 b' class EngineConnector(object):' | |||||
38 |
|
44 | |||
39 | def __init__(self, tub): |
|
45 | def __init__(self, tub): | |
40 | self.tub = tub |
|
46 | self.tub = tub | |
41 |
|
47 | |||
42 | def connect_to_controller(self, engine_service, furl_or_file): |
|
48 | @make_deferred | |
|
49 | def connect_to_controller(self, engine_service, furl_or_file, | |||
|
50 | delay=0.1, max_tries=10): | |||
43 | """ |
|
51 | """ | |
44 | Make a connection to a controller specified by a furl. |
|
52 | Make a connection to a controller specified by a furl. | |
45 |
|
53 | |||
@@ -48,34 +56,73 b' class EngineConnector(object):' | |||||
48 | foolscap URL contains all the information needed to connect to the |
|
56 | foolscap URL contains all the information needed to connect to the | |
49 | controller, including the ip and port as well as any encryption and |
|
57 | controller, including the ip and port as well as any encryption and | |
50 | authentication information needed for the connection. |
|
58 | authentication information needed for the connection. | |
51 |
|
59 | |||
52 | After getting a reference to the controller, this method calls the |
|
60 | After getting a reference to the controller, this method calls the | |
53 | `register_engine` method of the controller to actually register the |
|
61 | `register_engine` method of the controller to actually register the | |
54 | engine. |
|
62 | engine. | |
55 |
|
63 | |||
56 | :Parameters: |
|
64 | This method will try to connect to the controller multiple times with | |
57 | engine_service : IEngineBase |
|
65 | a delay in between. Each time the FURL file is read anew. | |
58 | An instance of an `IEngineBase` implementer |
|
66 | ||
59 | furl_or_file : str |
|
67 | Parameters | |
60 | A furl or a filename containing a furl |
|
68 | __________ | |
|
69 | engine_service : IEngineBase | |||
|
70 | An instance of an `IEngineBase` implementer | |||
|
71 | furl_or_file : str | |||
|
72 | A furl or a filename containing a furl | |||
|
73 | delay : float | |||
|
74 | The intial time to wait between connection attempts. Subsequent | |||
|
75 | attempts have increasing delays. | |||
|
76 | max_tries : int | |||
|
77 | The maximum number of connection attempts. | |||
|
78 | ||||
|
79 | Returns | |||
|
80 | ------- | |||
|
81 | A deferred to the registered client or a failure to an error | |||
|
82 | like :exc:`FURLError`. | |||
61 | """ |
|
83 | """ | |
62 | if not self.tub.running: |
|
84 | if not self.tub.running: | |
63 | self.tub.startService() |
|
85 | self.tub.startService() | |
64 | self.engine_service = engine_service |
|
86 | self.engine_service = engine_service | |
65 | self.engine_reference = IFCEngine(self.engine_service) |
|
87 | self.engine_reference = IFCEngine(self.engine_service) | |
66 | try: |
|
88 | ||
67 |
|
|
89 | validate_furl_or_file(furl_or_file) | |
68 | except ValueError: |
|
90 | d = self._try_to_connect(furl_or_file, delay, max_tries, attempt=0) | |
69 | return defer.fail(failure.Failure()) |
|
91 | d.addCallback(self._register) | |
|
92 | return d | |||
|
93 | ||||
|
94 | @inlineCallbacks | |||
|
95 | def _try_to_connect(self, furl_or_file, delay, max_tries, attempt): | |||
|
96 | """Try to connect to the controller with retry logic.""" | |||
|
97 | if attempt < max_tries: | |||
|
98 | log.msg("Attempting to connect to controller [%r]: %s" % \ | |||
|
99 | (attempt, furl_or_file)) | |||
|
100 | try: | |||
|
101 | self.furl = find_furl(furl_or_file) | |||
|
102 | # Uncomment this to see the FURL being tried. | |||
|
103 | # log.msg("FURL: %s" % self.furl) | |||
|
104 | rr = yield self.tub.getReference(self.furl) | |||
|
105 | except: | |||
|
106 | if attempt==max_tries-1: | |||
|
107 | # This will propagate the exception all the way to the top | |||
|
108 | # where it can be handled. | |||
|
109 | raise | |||
|
110 | else: | |||
|
111 | yield sleep_deferred(delay) | |||
|
112 | rr = yield self._try_to_connect( | |||
|
113 | furl_or_file, 1.5*delay, max_tries, attempt+1 | |||
|
114 | ) | |||
|
115 | # rr becomes an int when there is a connection!!! | |||
|
116 | returnValue(rr) | |||
|
117 | else: | |||
|
118 | returnValue(rr) | |||
70 | else: |
|
119 | else: | |
71 | d = self.tub.getReference(self.furl) |
|
120 | raise EngineConnectorError( | |
72 | d.addCallbacks(self._register, self._log_failure) |
|
121 | 'Could not connect to controller, max_tries (%r) exceeded. ' | |
73 | return d |
|
122 | 'This usually means that i) the controller was not started, ' | |
74 |
|
123 | 'or ii) a firewall was blocking the engine from connecting ' | ||
75 | def _log_failure(self, reason): |
|
124 | 'to the controller.' % max_tries | |
76 | log.err('EngineConnector: engine registration failed:') |
|
125 | ) | |
77 | log.err(reason) |
|
|||
78 | return reason |
|
|||
79 |
|
126 | |||
80 | def _register(self, rr): |
|
127 | def _register(self, rr): | |
81 | self.remote_ref = rr |
|
128 | self.remote_ref = rr | |
@@ -83,7 +130,7 b' class EngineConnector(object):' | |||||
83 | desired_id = self.engine_service.id |
|
130 | desired_id = self.engine_service.id | |
84 | d = self.remote_ref.callRemote('register_engine', self.engine_reference, |
|
131 | d = self.remote_ref.callRemote('register_engine', self.engine_reference, | |
85 | desired_id, os.getpid(), pickle.dumps(self.engine_service.properties,2)) |
|
132 | desired_id, os.getpid(), pickle.dumps(self.engine_service.properties,2)) | |
86 |
return d.addCallback |
|
133 | return d.addCallback(self._reference_sent) | |
87 |
|
134 | |||
88 | def _reference_sent(self, registration_dict): |
|
135 | def _reference_sent(self, registration_dict): | |
89 | self.engine_service.id = registration_dict['id'] |
|
136 | self.engine_service.id = registration_dict['id'] |
@@ -127,9 +127,11 b' class TaskRejectError(KernelError):' | |||||
127 | class CompositeError(KernelError): |
|
127 | class CompositeError(KernelError): | |
128 | def __init__(self, message, elist): |
|
128 | def __init__(self, message, elist): | |
129 | Exception.__init__(self, *(message, elist)) |
|
129 | Exception.__init__(self, *(message, elist)) | |
130 | self.message = message |
|
130 | # Don't use pack_exception because it will conflict with the .message | |
|
131 | # attribute that is being deprecated in 2.6 and beyond. | |||
|
132 | self.msg = message | |||
131 | self.elist = elist |
|
133 | self.elist = elist | |
132 |
|
134 | |||
133 | def _get_engine_str(self, ev): |
|
135 | def _get_engine_str(self, ev): | |
134 | try: |
|
136 | try: | |
135 | ei = ev._ipython_engine_info |
|
137 | ei = ev._ipython_engine_info | |
@@ -137,7 +139,7 b' class CompositeError(KernelError):' | |||||
137 | return '[Engine Exception]' |
|
139 | return '[Engine Exception]' | |
138 | else: |
|
140 | else: | |
139 | return '[%i:%s]: ' % (ei['engineid'], ei['method']) |
|
141 | return '[%i:%s]: ' % (ei['engineid'], ei['method']) | |
140 |
|
142 | |||
141 | def _get_traceback(self, ev): |
|
143 | def _get_traceback(self, ev): | |
142 | try: |
|
144 | try: | |
143 | tb = ev._ipython_traceback_text |
|
145 | tb = ev._ipython_traceback_text | |
@@ -145,14 +147,14 b' class CompositeError(KernelError):' | |||||
145 | return 'No traceback available' |
|
147 | return 'No traceback available' | |
146 | else: |
|
148 | else: | |
147 | return tb |
|
149 | return tb | |
148 |
|
150 | |||
149 | def __str__(self): |
|
151 | def __str__(self): | |
150 |
s = str(self.m |
|
152 | s = str(self.msg) | |
151 | for et, ev, etb in self.elist: |
|
153 | for et, ev, etb in self.elist: | |
152 | engine_str = self._get_engine_str(ev) |
|
154 | engine_str = self._get_engine_str(ev) | |
153 | s = s + '\n' + engine_str + str(et.__name__) + ': ' + str(ev) |
|
155 | s = s + '\n' + engine_str + str(et.__name__) + ': ' + str(ev) | |
154 | return s |
|
156 | return s | |
155 |
|
157 | |||
156 | def print_tracebacks(self, excid=None): |
|
158 | def print_tracebacks(self, excid=None): | |
157 | if excid is None: |
|
159 | if excid is None: | |
158 | for (et,ev,etb) in self.elist: |
|
160 | for (et,ev,etb) in self.elist: |
@@ -1,27 +1,62 b'' | |||||
|
1 | #!/usr/bin/env python | |||
1 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
|
3 | """ | |||
|
4 | Foolscap related utilities. | |||
|
5 | """ | |||
2 |
|
6 | |||
3 | """Foolscap related utilities.""" |
|
7 | #----------------------------------------------------------------------------- | |
4 |
|
8 | # Copyright (C) 2008-2009 The IPython Development Team | ||
5 | __docformat__ = "restructuredtext en" |
|
|||
6 |
|
||||
7 | #------------------------------------------------------------------------------- |
|
|||
8 | # Copyright (C) 2008 The IPython Development Team |
|
|||
9 | # |
|
9 | # | |
10 | # Distributed under the terms of the BSD License. The full license is in |
|
10 | # Distributed under the terms of the BSD License. The full license is in | |
11 | # the file COPYING, distributed as part of this software. |
|
11 | # the file COPYING, distributed as part of this software. | |
12 |
#----------------------------------------------------------------------------- |
|
12 | #----------------------------------------------------------------------------- | |
13 |
|
13 | |||
14 |
#----------------------------------------------------------------------------- |
|
14 | #----------------------------------------------------------------------------- | |
15 | # Imports |
|
15 | # Imports | |
16 |
#----------------------------------------------------------------------------- |
|
16 | #----------------------------------------------------------------------------- | |
|
17 | ||||
|
18 | from __future__ import with_statement | |||
17 |
|
19 | |||
18 | import os |
|
20 | import os | |
|
21 | import tempfile | |||
|
22 | ||||
|
23 | from twisted.internet import reactor, defer | |||
|
24 | from twisted.python import log | |||
19 |
|
25 | |||
20 | from foolscap import Tub, UnauthenticatedTub |
|
26 | from foolscap import Tub, UnauthenticatedTub | |
21 |
|
27 | |||
|
28 | from IPython.config.loader import Config | |||
|
29 | ||||
|
30 | from IPython.kernel.configobjfactory import AdaptedConfiguredObjectFactory | |||
|
31 | ||||
|
32 | from IPython.kernel.error import SecurityError | |||
|
33 | ||||
|
34 | from IPython.utils.traitlets import Int, Str, Bool, Instance | |||
|
35 | from IPython.utils.importstring import import_item | |||
|
36 | ||||
|
37 | #----------------------------------------------------------------------------- | |||
|
38 | # Code | |||
|
39 | #----------------------------------------------------------------------------- | |||
|
40 | ||||
|
41 | ||||
|
42 | # We do this so if a user doesn't have OpenSSL installed, it will try to use | |||
|
43 | # an UnauthenticatedTub. But, they will still run into problems if they | |||
|
44 | # try to use encrypted furls. | |||
|
45 | try: | |||
|
46 | import OpenSSL | |||
|
47 | except: | |||
|
48 | Tub = UnauthenticatedTub | |||
|
49 | have_crypto = False | |||
|
50 | else: | |||
|
51 | have_crypto = True | |||
|
52 | ||||
|
53 | ||||
|
54 | class FURLError(Exception): | |||
|
55 | pass | |||
|
56 | ||||
|
57 | ||||
22 | def check_furl_file_security(furl_file, secure): |
|
58 | def check_furl_file_security(furl_file, secure): | |
23 | """Remove the old furl_file if changing security modes.""" |
|
59 | """Remove the old furl_file if changing security modes.""" | |
24 |
|
||||
25 | if os.path.isfile(furl_file): |
|
60 | if os.path.isfile(furl_file): | |
26 | f = open(furl_file, 'r') |
|
61 | f = open(furl_file, 'r') | |
27 | oldfurl = f.read().strip() |
|
62 | oldfurl = f.read().strip() | |
@@ -29,41 +64,210 b' def check_furl_file_security(furl_file, secure):' | |||||
29 | if (oldfurl.startswith('pb://') and not secure) or (oldfurl.startswith('pbu://') and secure): |
|
64 | if (oldfurl.startswith('pb://') and not secure) or (oldfurl.startswith('pbu://') and secure): | |
30 | os.remove(furl_file) |
|
65 | os.remove(furl_file) | |
31 |
|
66 | |||
|
67 | ||||
32 | def is_secure(furl): |
|
68 | def is_secure(furl): | |
|
69 | """Is the given FURL secure or not.""" | |||
33 | if is_valid(furl): |
|
70 | if is_valid(furl): | |
34 | if furl.startswith("pb://"): |
|
71 | if furl.startswith("pb://"): | |
35 | return True |
|
72 | return True | |
36 | elif furl.startswith("pbu://"): |
|
73 | elif furl.startswith("pbu://"): | |
37 | return False |
|
74 | return False | |
38 | else: |
|
75 | else: | |
39 |
raise |
|
76 | raise FURLError("invalid FURL: %s" % furl) | |
|
77 | ||||
40 |
|
78 | |||
41 | def is_valid(furl): |
|
79 | def is_valid(furl): | |
|
80 | """Is the str a valid FURL or not.""" | |||
42 | if isinstance(furl, str): |
|
81 | if isinstance(furl, str): | |
43 | if furl.startswith("pb://") or furl.startswith("pbu://"): |
|
82 | if furl.startswith("pb://") or furl.startswith("pbu://"): | |
44 | return True |
|
83 | return True | |
45 | else: |
|
84 | else: | |
46 | return False |
|
85 | return False | |
47 |
|
86 | |||
|
87 | ||||
48 | def find_furl(furl_or_file): |
|
88 | def find_furl(furl_or_file): | |
|
89 | """Find, validate and return a FURL in a string or file.""" | |||
49 | if isinstance(furl_or_file, str): |
|
90 | if isinstance(furl_or_file, str): | |
50 | if is_valid(furl_or_file): |
|
91 | if is_valid(furl_or_file): | |
51 | return furl_or_file |
|
92 | return furl_or_file | |
52 | if os.path.isfile(furl_or_file): |
|
93 | if os.path.isfile(furl_or_file): | |
53 |
|
|
94 | with open(furl_or_file, 'r') as f: | |
|
95 | furl = f.read().strip() | |||
54 | if is_valid(furl): |
|
96 | if is_valid(furl): | |
55 | return furl |
|
97 | return furl | |
56 |
raise |
|
98 | raise FURLError("Not a valid FURL or FURL file: %s" % furl_or_file) | |
57 |
|
99 | |||
58 | # We do this so if a user doesn't have OpenSSL installed, it will try to use |
|
|||
59 | # an UnauthenticatedTub. But, they will still run into problems if they |
|
|||
60 | # try to use encrypted furls. |
|
|||
61 | try: |
|
|||
62 | import OpenSSL |
|
|||
63 | except: |
|
|||
64 | Tub = UnauthenticatedTub |
|
|||
65 | have_crypto = False |
|
|||
66 | else: |
|
|||
67 | have_crypto = True |
|
|||
68 |
|
100 | |||
|
101 | def is_valid_furl_or_file(furl_or_file): | |||
|
102 | """Validate a FURL or a FURL file. | |||
|
103 | ||||
|
104 | If ``furl_or_file`` looks like a file, we simply make sure its directory | |||
|
105 | exists and that it has a ``.furl`` file extension. We don't try to see | |||
|
106 | if the FURL file exists or to read its contents. This is useful for | |||
|
107 | cases where auto re-connection is being used. | |||
|
108 | """ | |||
|
109 | if isinstance(furl_or_file, str): | |||
|
110 | if is_valid(furl_or_file): | |||
|
111 | return True | |||
|
112 | if isinstance(furl_or_file, (str, unicode)): | |||
|
113 | path, furl_filename = os.path.split(furl_or_file) | |||
|
114 | if os.path.isdir(path) and furl_filename.endswith('.furl'): | |||
|
115 | return True | |||
|
116 | return False | |||
|
117 | ||||
|
118 | ||||
|
119 | def validate_furl_or_file(furl_or_file): | |||
|
120 | if not is_valid_furl_or_file(furl_or_file): | |||
|
121 | raise FURLError('Not a valid FURL or FURL file: %r' % furl_or_file) | |||
|
122 | ||||
|
123 | ||||
|
124 | def get_temp_furlfile(filename): | |||
|
125 | """Return a temporary FURL file.""" | |||
|
126 | return tempfile.mktemp(dir=os.path.dirname(filename), | |||
|
127 | prefix=os.path.basename(filename)) | |||
|
128 | ||||
|
129 | ||||
|
130 | def make_tub(ip, port, secure, cert_file): | |||
|
131 | """Create a listening tub given an ip, port, and cert_file location. | |||
|
132 | ||||
|
133 | Parameters | |||
|
134 | ---------- | |||
|
135 | ip : str | |||
|
136 | The ip address or hostname that the tub should listen on. | |||
|
137 | Empty means all interfaces. | |||
|
138 | port : int | |||
|
139 | The port that the tub should listen on. A value of 0 means | |||
|
140 | pick a random port | |||
|
141 | secure: bool | |||
|
142 | Will the connection be secure (in the Foolscap sense). | |||
|
143 | cert_file: str | |||
|
144 | A filename of a file to be used for theSSL certificate. | |||
|
145 | ||||
|
146 | Returns | |||
|
147 | ------- | |||
|
148 | A tub, listener tuple. | |||
|
149 | """ | |||
|
150 | if secure: | |||
|
151 | if have_crypto: | |||
|
152 | tub = Tub(certFile=cert_file) | |||
|
153 | else: | |||
|
154 | raise SecurityError("OpenSSL/pyOpenSSL is not available, so we " | |||
|
155 | "can't run in secure mode. Try running without " | |||
|
156 | "security using 'ipcontroller -xy'.") | |||
|
157 | else: | |||
|
158 | tub = UnauthenticatedTub() | |||
|
159 | ||||
|
160 | # Set the strport based on the ip and port and start listening | |||
|
161 | if ip == '': | |||
|
162 | strport = "tcp:%i" % port | |||
|
163 | else: | |||
|
164 | strport = "tcp:%i:interface=%s" % (port, ip) | |||
|
165 | log.msg("Starting listener with [secure=%r] on: %s" % (secure, strport)) | |||
|
166 | listener = tub.listenOn(strport) | |||
|
167 | ||||
|
168 | return tub, listener | |||
|
169 | ||||
|
170 | ||||
|
171 | class FCServiceFactory(AdaptedConfiguredObjectFactory): | |||
|
172 | """This class creates a tub with various services running in it. | |||
|
173 | ||||
|
174 | The basic idea is that :meth:`create` returns a running :class:`Tub` | |||
|
175 | instance that has a number of Foolscap references registered in it. | |||
|
176 | This class is a subclass of :class:`IPython.core.component.Component` | |||
|
177 | so the IPython configuration and component system are used. | |||
|
178 | ||||
|
179 | Attributes | |||
|
180 | ---------- | |||
|
181 | interfaces : Config | |||
|
182 | A Config instance whose values are sub-Config objects having two | |||
|
183 | keys: furl_file and interface_chain. | |||
|
184 | ||||
|
185 | The other attributes are the standard ones for Foolscap. | |||
|
186 | """ | |||
|
187 | ||||
|
188 | ip = Str('', config=True) | |||
|
189 | port = Int(0, config=True) | |||
|
190 | secure = Bool(True, config=True) | |||
|
191 | cert_file = Str('', config=True) | |||
|
192 | location = Str('', config=True) | |||
|
193 | reuse_furls = Bool(False, config=True) | |||
|
194 | interfaces = Instance(klass=Config, kw={}, allow_none=False, config=True) | |||
|
195 | ||||
|
196 | def __init__(self, config, adaptee): | |||
|
197 | super(FCServiceFactory, self).__init__(config, adaptee) | |||
|
198 | self._check_reuse_furls() | |||
|
199 | ||||
|
200 | def _ip_changed(self, name, old, new): | |||
|
201 | if new == 'localhost' or new == '127.0.0.1': | |||
|
202 | self.location = '127.0.0.1' | |||
|
203 | ||||
|
204 | def _check_reuse_furls(self): | |||
|
205 | furl_files = [i.furl_file for i in self.interfaces.values()] | |||
|
206 | for ff in furl_files: | |||
|
207 | fullfile = self._get_security_file(ff) | |||
|
208 | if self.reuse_furls: | |||
|
209 | if self.port==0: | |||
|
210 | raise FURLError("You are trying to reuse the FURL file " | |||
|
211 | "for this connection, but the port for this connection " | |||
|
212 | "is set to 0 (autoselect). To reuse the FURL file " | |||
|
213 | "you need to specify specific port to listen on." | |||
|
214 | ) | |||
|
215 | else: | |||
|
216 | log.msg("Reusing FURL file: %s" % fullfile) | |||
|
217 | else: | |||
|
218 | if os.path.isfile(fullfile): | |||
|
219 | log.msg("Removing old FURL file: %s" % fullfile) | |||
|
220 | os.remove(fullfile) | |||
|
221 | ||||
|
222 | def _get_security_file(self, filename): | |||
|
223 | return os.path.join(self.config.Global.security_dir, filename) | |||
|
224 | ||||
|
225 | def create(self): | |||
|
226 | """Create and return the Foolscap tub with everything running.""" | |||
|
227 | ||||
|
228 | self.tub, self.listener = make_tub( | |||
|
229 | self.ip, self.port, self.secure, | |||
|
230 | self._get_security_file(self.cert_file) | |||
|
231 | ) | |||
|
232 | # log.msg("Interfaces to register [%r]: %r" % \ | |||
|
233 | # (self.__class__, self.interfaces)) | |||
|
234 | if not self.secure: | |||
|
235 | log.msg("WARNING: running with no security: %s" % \ | |||
|
236 | self.__class__.__name__) | |||
|
237 | reactor.callWhenRunning(self.set_location_and_register) | |||
|
238 | return self.tub | |||
|
239 | ||||
|
240 | def set_location_and_register(self): | |||
|
241 | """Set the location for the tub and return a deferred.""" | |||
|
242 | ||||
|
243 | if self.location == '': | |||
|
244 | d = self.tub.setLocationAutomatically() | |||
|
245 | else: | |||
|
246 | d = defer.maybeDeferred(self.tub.setLocation, | |||
|
247 | "%s:%i" % (self.location, self.listener.getPortnum())) | |||
|
248 | self.adapt_to_interfaces(d) | |||
|
249 | ||||
|
250 | def adapt_to_interfaces(self, d): | |||
|
251 | """Run through the interfaces, adapt and register.""" | |||
|
252 | ||||
|
253 | for ifname, ifconfig in self.interfaces.iteritems(): | |||
|
254 | ff = self._get_security_file(ifconfig.furl_file) | |||
|
255 | log.msg("Adapting [%s] to interface: %s" % \ | |||
|
256 | (self.adaptee.__class__.__name__, ifname)) | |||
|
257 | log.msg("Saving FURL for interface [%s] to file: %s" % (ifname, ff)) | |||
|
258 | check_furl_file_security(ff, self.secure) | |||
|
259 | adaptee = self.adaptee | |||
|
260 | for i in ifconfig.interface_chain: | |||
|
261 | adaptee = import_item(i)(adaptee) | |||
|
262 | d.addCallback(self.register, adaptee, furl_file=ff) | |||
|
263 | ||||
|
264 | def register(self, empty, ref, furl_file): | |||
|
265 | """Register the reference with the FURL file. | |||
|
266 | ||||
|
267 | The FURL file is created and then moved to make sure that when the | |||
|
268 | file appears, the buffer has been flushed and the file closed. | |||
|
269 | """ | |||
|
270 | temp_furl_file = get_temp_furlfile(furl_file) | |||
|
271 | self.tub.registerReference(ref, furlFile=temp_furl_file) | |||
|
272 | os.rename(temp_furl_file, furl_file) | |||
69 |
|
273 |
@@ -262,9 +262,8 b' class MultiEngine(ControllerAdapterBase):' | |||||
262 | elif targets == 'all': |
|
262 | elif targets == 'all': | |
263 | eList = self.engines.values() |
|
263 | eList = self.engines.values() | |
264 | if len(eList) == 0: |
|
264 | if len(eList) == 0: | |
265 |
|
|
265 | raise error.NoEnginesRegistered("There are no engines registered. " | |
266 |
|
|
266 | "Check the logs if you think there should have been.") | |
267 | raise error.NoEnginesRegistered(msg) |
|
|||
268 | else: |
|
267 | else: | |
269 | return eList |
|
268 | return eList | |
270 | else: |
|
269 | else: |
@@ -263,10 +263,18 b' class InteractiveMultiEngineClient(object):' | |||||
263 | """ |
|
263 | """ | |
264 |
|
264 | |||
265 | try: |
|
265 | try: | |
266 | __IPYTHON__.activeController = self |
|
266 | # This is injected into __builtins__. | |
|
267 | ip = get_ipython() | |||
267 | except NameError: |
|
268 | except NameError: | |
268 |
print "The IPython |
|
269 | print "The IPython parallel magics (%result, %px, %autopx) only work within IPython." | |
269 |
|
270 | else: | ||
|
271 | pmagic = ip.get_component('parallel_magic') | |||
|
272 | if pmagic is not None: | |||
|
273 | pmagic.active_multiengine_client = self | |||
|
274 | else: | |||
|
275 | print "You must first load the parallelmagic extension " \ | |||
|
276 | "by doing '%load_ext parallelmagic'" | |||
|
277 | ||||
270 | def __setitem__(self, key, value): |
|
278 | def __setitem__(self, key, value): | |
271 | """Add a dictionary interface for pushing/pulling. |
|
279 | """Add a dictionary interface for pushing/pulling. | |
272 |
|
280 |
@@ -1,22 +1,18 b'' | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
3 |
|
3 | |||
4 | """ipcluster script""" |
|
4 | #----------------------------------------------------------------------------- | |
5 |
|
5 | # Copyright (C) 2008-2009 The IPython Development Team | ||
6 | __docformat__ = "restructuredtext en" |
|
|||
7 |
|
||||
8 | #------------------------------------------------------------------------------- |
|
|||
9 | # Copyright (C) 2008 The IPython Development Team |
|
|||
10 | # |
|
6 | # | |
11 | # Distributed under the terms of the BSD License. The full license is in |
|
7 | # Distributed under the terms of the BSD License. The full license is in | |
12 | # the file COPYING, distributed as part of this software. |
|
8 | # the file COPYING, distributed as part of this software. | |
13 |
#----------------------------------------------------------------------------- |
|
9 | #----------------------------------------------------------------------------- | |
14 |
|
10 | |||
15 |
#----------------------------------------------------------------------------- |
|
11 | #----------------------------------------------------------------------------- | |
16 | # Imports |
|
12 | # Imports | |
17 |
#----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
|
14 | ||||
18 |
|
15 | |||
19 | if __name__ == '__main__': |
|
16 | from IPython.kernel.ipclusterapp import launch_new_instance | |
20 | from IPython.kernel.scripts import ipcluster |
|
|||
21 | ipcluster.main() |
|
|||
22 |
|
17 | |||
|
18 | launch_new_instance() |
@@ -1,20 +1,18 b'' | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
3 |
|
3 | |||
4 | __docformat__ = "restructuredtext en" |
|
4 | #----------------------------------------------------------------------------- | |
5 |
|
5 | # Copyright (C) 2008-2009 The IPython Development Team | ||
6 | #------------------------------------------------------------------------------- |
|
|||
7 | # Copyright (C) 2008 The IPython Development Team |
|
|||
8 | # |
|
6 | # | |
9 | # Distributed under the terms of the BSD License. The full license is in |
|
7 | # Distributed under the terms of the BSD License. The full license is in | |
10 | # the file COPYING, distributed as part of this software. |
|
8 | # the file COPYING, distributed as part of this software. | |
11 |
#----------------------------------------------------------------------------- |
|
9 | #----------------------------------------------------------------------------- | |
12 |
|
10 | |||
13 |
#----------------------------------------------------------------------------- |
|
11 | #----------------------------------------------------------------------------- | |
14 | # Imports |
|
12 | # Imports | |
15 |
#----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
|
14 | ||||
16 |
|
15 | |||
17 | if __name__ == '__main__': |
|
16 | from IPython.kernel.ipcontrollerapp import launch_new_instance | |
18 | from IPython.kernel.scripts import ipcontroller |
|
|||
19 | ipcontroller.main() |
|
|||
20 |
|
17 | |||
|
18 | launch_new_instance() |
@@ -1,20 +1,20 b'' | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
3 |
|
3 | |||
4 | __docformat__ = "restructuredtext en" |
|
4 | #----------------------------------------------------------------------------- | |
5 |
|
5 | # Copyright (C) 2008-2009 The IPython Development Team | ||
6 | #------------------------------------------------------------------------------- |
|
|||
7 | # Copyright (C) 2008 The IPython Development Team |
|
|||
8 | # |
|
6 | # | |
9 | # Distributed under the terms of the BSD License. The full license is in |
|
7 | # Distributed under the terms of the BSD License. The full license is in | |
10 | # the file COPYING, distributed as part of this software. |
|
8 | # the file COPYING, distributed as part of this software. | |
11 |
#----------------------------------------------------------------------------- |
|
9 | #----------------------------------------------------------------------------- | |
12 |
|
10 | |||
13 |
#----------------------------------------------------------------------------- |
|
11 | #----------------------------------------------------------------------------- | |
14 | # Imports |
|
12 | # Imports | |
15 |
#----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
|
14 | ||||
|
15 | ||||
|
16 | from IPython.kernel.ipengineapp import launch_new_instance | |||
|
17 | ||||
|
18 | launch_new_instance() | |||
16 |
|
19 | |||
17 | if __name__ == '__main__': |
|
|||
18 | from IPython.kernel.scripts import ipengine |
|
|||
19 | ipengine.main() |
|
|||
20 |
|
20 |
@@ -3,18 +3,16 b'' | |||||
3 |
|
3 | |||
4 | """Things directly related to all of twisted.""" |
|
4 | """Things directly related to all of twisted.""" | |
5 |
|
5 | |||
6 | __docformat__ = "restructuredtext en" |
|
6 | #----------------------------------------------------------------------------- | |
7 |
|
7 | # Copyright (C) 2008-2009 The IPython Development Team | ||
8 | #------------------------------------------------------------------------------- |
|
|||
9 | # Copyright (C) 2008 The IPython Development Team |
|
|||
10 | # |
|
8 | # | |
11 | # Distributed under the terms of the BSD License. The full license is in |
|
9 | # Distributed under the terms of the BSD License. The full license is in | |
12 | # the file COPYING, distributed as part of this software. |
|
10 | # the file COPYING, distributed as part of this software. | |
13 |
#----------------------------------------------------------------------------- |
|
11 | #----------------------------------------------------------------------------- | |
14 |
|
12 | |||
15 |
#----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
16 | # Imports |
|
14 | # Imports | |
17 |
#----------------------------------------------------------------------------- |
|
15 | #----------------------------------------------------------------------------- | |
18 |
|
16 | |||
19 | import os, sys |
|
17 | import os, sys | |
20 | import threading, Queue, atexit |
|
18 | import threading, Queue, atexit | |
@@ -25,9 +23,9 b' from twisted.python import log, failure' | |||||
25 |
|
23 | |||
26 | from IPython.kernel.error import FileTimeoutError |
|
24 | from IPython.kernel.error import FileTimeoutError | |
27 |
|
25 | |||
28 |
#----------------------------------------------------------------------------- |
|
26 | #----------------------------------------------------------------------------- | |
29 | # Classes related to twisted and threads |
|
27 | # Classes related to twisted and threads | |
30 |
#----------------------------------------------------------------------------- |
|
28 | #----------------------------------------------------------------------------- | |
31 |
|
29 | |||
32 |
|
30 | |||
33 | class ReactorInThread(threading.Thread): |
|
31 | class ReactorInThread(threading.Thread): | |
@@ -42,6 +40,15 b' class ReactorInThread(threading.Thread):' | |||||
42 | """ |
|
40 | """ | |
43 |
|
41 | |||
44 | def run(self): |
|
42 | def run(self): | |
|
43 | """Run the twisted reactor in a thread. | |||
|
44 | ||||
|
45 | This runs the reactor with installSignalHandlers=0, which prevents | |||
|
46 | twisted from installing any of its own signal handlers. This needs to | |||
|
47 | be disabled because signal.signal can't be called in a thread. The | |||
|
48 | only problem with this is that SIGCHLD events won't be detected so | |||
|
49 | spawnProcess won't detect that its processes have been killed by | |||
|
50 | an external factor. | |||
|
51 | """ | |||
45 | reactor.run(installSignalHandlers=0) |
|
52 | reactor.run(installSignalHandlers=0) | |
46 | # self.join() |
|
53 | # self.join() | |
47 |
|
54 | |||
@@ -247,3 +254,21 b' def wait_for_file(filename, delay=0.1, max_tries=10):' | |||||
247 |
|
254 | |||
248 | _test_for_file(filename) |
|
255 | _test_for_file(filename) | |
249 | return d |
|
256 | return d | |
|
257 | ||||
|
258 | ||||
|
259 | def sleep_deferred(seconds): | |||
|
260 | """Sleep without blocking the event loop.""" | |||
|
261 | d = defer.Deferred() | |||
|
262 | reactor.callLater(seconds, d.callback, seconds) | |||
|
263 | return d | |||
|
264 | ||||
|
265 | ||||
|
266 | def make_deferred(func): | |||
|
267 | """A decorator that calls a function with :func`maybeDeferred`.""" | |||
|
268 | ||||
|
269 | def _wrapper(*args, **kwargs): | |||
|
270 | return defer.maybeDeferred(func, *args, **kwargs) | |||
|
271 | ||||
|
272 | return _wrapper | |||
|
273 | ||||
|
274 |
@@ -51,19 +51,22 b' def inputhook_wx1():' | |||||
51 | This approach seems to work, but its performance is not great as it |
|
51 | This approach seems to work, but its performance is not great as it | |
52 | relies on having PyOS_InputHook called regularly. |
|
52 | relies on having PyOS_InputHook called regularly. | |
53 | """ |
|
53 | """ | |
54 | app = wx.GetApp() |
|
54 | try: | |
55 | if app is not None: |
|
55 | app = wx.GetApp() | |
56 | assert wx.Thread_IsMain() |
|
56 | if app is not None: | |
57 |
|
57 | assert wx.Thread_IsMain() | ||
58 | # Make a temporary event loop and process system events until |
|
58 | ||
59 | # there are no more waiting, then allow idle events (which |
|
59 | # Make a temporary event loop and process system events until | |
60 | # will also deal with pending or posted wx events.) |
|
60 | # there are no more waiting, then allow idle events (which | |
61 | evtloop = wx.EventLoop() |
|
61 | # will also deal with pending or posted wx events.) | |
62 |
|
|
62 | evtloop = wx.EventLoop() | |
63 | while evtloop.Pending(): |
|
63 | ea = wx.EventLoopActivator(evtloop) | |
64 |
evtloop. |
|
64 | while evtloop.Pending(): | |
65 | app.ProcessIdle() |
|
65 | evtloop.Dispatch() | |
66 | del ea |
|
66 | app.ProcessIdle() | |
|
67 | del ea | |||
|
68 | except KeyboardInterrupt: | |||
|
69 | pass | |||
67 | return 0 |
|
70 | return 0 | |
68 |
|
71 | |||
69 | class EventLoopTimer(wx.Timer): |
|
72 | class EventLoopTimer(wx.Timer): | |
@@ -102,13 +105,16 b' def inputhook_wx2():' | |||||
102 | but eventually performance would suffer from calling select/kbhit too |
|
105 | but eventually performance would suffer from calling select/kbhit too | |
103 | often. |
|
106 | often. | |
104 | """ |
|
107 | """ | |
105 | app = wx.GetApp() |
|
108 | try: | |
106 | if app is not None: |
|
109 | app = wx.GetApp() | |
107 | assert wx.Thread_IsMain() |
|
110 | if app is not None: | |
108 | elr = EventLoopRunner() |
|
111 | assert wx.Thread_IsMain() | |
109 | # As this time is made shorter, keyboard response improves, but idle |
|
112 | elr = EventLoopRunner() | |
110 | # CPU load goes up. 10 ms seems like a good compromise. |
|
113 | # As this time is made shorter, keyboard response improves, but idle | |
111 | elr.Run(time=10) # CHANGE time here to control polling interval |
|
114 | # CPU load goes up. 10 ms seems like a good compromise. | |
|
115 | elr.Run(time=10) # CHANGE time here to control polling interval | |||
|
116 | except KeyboardInterrupt: | |||
|
117 | pass | |||
112 | return 0 |
|
118 | return 0 | |
113 |
|
119 | |||
114 | def inputhook_wx3(): |
|
120 | def inputhook_wx3(): | |
@@ -119,49 +125,54 b' def inputhook_wx3():' | |||||
119 | time.sleep is inserted. This is needed, otherwise, CPU usage is at 100%. |
|
125 | time.sleep is inserted. This is needed, otherwise, CPU usage is at 100%. | |
120 | This sleep time should be tuned though for best performance. |
|
126 | This sleep time should be tuned though for best performance. | |
121 | """ |
|
127 | """ | |
122 | app = wx.GetApp() |
|
128 | # We need to protect against a user pressing Control-C when IPython is | |
123 | if app is not None: |
|
129 | # idle and this is running. We trap KeyboardInterrupt and pass. | |
124 | assert wx.Thread_IsMain() |
|
130 | try: | |
125 |
|
131 | app = wx.GetApp() | ||
126 | # The import of wx on Linux sets the handler for signal.SIGINT |
|
132 | if app is not None: | |
127 | # to 0. This is a bug in wx or gtk. We fix by just setting it |
|
133 | assert wx.Thread_IsMain() | |
128 | # back to the Python default. |
|
134 | ||
129 | if not callable(signal.getsignal(signal.SIGINT)): |
|
135 | # The import of wx on Linux sets the handler for signal.SIGINT | |
130 | signal.signal(signal.SIGINT, signal.default_int_handler) |
|
136 | # to 0. This is a bug in wx or gtk. We fix by just setting it | |
131 |
|
137 | # back to the Python default. | ||
132 | evtloop = wx.EventLoop() |
|
138 | if not callable(signal.getsignal(signal.SIGINT)): | |
133 | ea = wx.EventLoopActivator(evtloop) |
|
139 | signal.signal(signal.SIGINT, signal.default_int_handler) | |
134 | t = clock() |
|
140 | ||
135 | while not stdin_ready(): |
|
141 | evtloop = wx.EventLoop() | |
136 | while evtloop.Pending(): |
|
142 | ea = wx.EventLoopActivator(evtloop) | |
137 |
|
|
143 | t = clock() | |
138 | evtloop.Dispatch() |
|
144 | while not stdin_ready(): | |
139 | app.ProcessIdle() |
|
145 | while evtloop.Pending(): | |
140 | # We need to sleep at this point to keep the idle CPU load |
|
146 | t = clock() | |
141 | # low. However, if sleep to long, GUI response is poor. As |
|
147 | evtloop.Dispatch() | |
142 | # a compromise, we watch how often GUI events are being processed |
|
148 | app.ProcessIdle() | |
143 | # and switch between a short and long sleep time. Here are some |
|
149 | # We need to sleep at this point to keep the idle CPU load | |
144 | # stats useful in helping to tune this. |
|
150 | # low. However, if sleep to long, GUI response is poor. As | |
145 | # time CPU load |
|
151 | # a compromise, we watch how often GUI events are being processed | |
146 | # 0.001 13% |
|
152 | # and switch between a short and long sleep time. Here are some | |
147 | # 0.005 3% |
|
153 | # stats useful in helping to tune this. | |
148 |
|
|
154 | # time CPU load | |
149 |
|
|
155 | # 0.001 13% | |
150 | used_time = clock() - t |
|
156 | # 0.005 3% | |
151 | if used_time > 5*60.0: |
|
157 | # 0.01 1.5% | |
152 |
# |
|
158 | # 0.05 0.5% | |
153 |
|
|
159 | used_time = clock() - t | |
154 |
|
|
160 | if used_time > 5*60.0: | |
155 |
# print 'Sleep for |
|
161 | # print 'Sleep for 5 s' # dbg | |
156 |
time.sleep( |
|
162 | time.sleep(5.0) | |
157 |
elif used_time > |
|
163 | elif used_time > 10.0: | |
158 | # Few GUI events coming in, so we can sleep longer |
|
164 | # print 'Sleep for 1 s' # dbg | |
159 | # print 'Sleep for 0.05 s' # dbg |
|
165 | time.sleep(1.0) | |
160 |
|
|
166 | elif used_time > 0.1: | |
161 | else: |
|
167 | # Few GUI events coming in, so we can sleep longer | |
162 | # Many GUI events coming in, so sleep only very little |
|
168 | # print 'Sleep for 0.05 s' # dbg | |
163 |
time.sleep(0.0 |
|
169 | time.sleep(0.05) | |
164 |
|
|
170 | else: | |
|
171 | # Many GUI events coming in, so sleep only very little | |||
|
172 | time.sleep(0.001) | |||
|
173 | del ea | |||
|
174 | except KeyboardInterrupt: | |||
|
175 | pass | |||
165 | return 0 |
|
176 | return 0 | |
166 |
|
177 | |||
167 | # This is our default implementation |
|
178 | # This is our default implementation |
@@ -303,11 +303,11 b' class IPythonRunner(InteractiveRunner):' | |||||
303 | def __init__(self,program = 'ipython',args=None,out=sys.stdout,echo=True): |
|
303 | def __init__(self,program = 'ipython',args=None,out=sys.stdout,echo=True): | |
304 | """New runner, optionally passing the ipython command to use.""" |
|
304 | """New runner, optionally passing the ipython command to use.""" | |
305 |
|
305 | |||
306 | args0 = ['-colors','NoColor', |
|
306 | args0 = ['--colors','NoColor', | |
307 | '-pi1','In [\\#]: ', |
|
307 | '-pi1','In [\\#]: ', | |
308 | '-pi2',' .\\D.: ', |
|
308 | '-pi2',' .\\D.: ', | |
309 |
'-noterm |
|
309 | '--noterm-title', | |
310 | '-noautoindent'] |
|
310 | '--no-auto-indent'] | |
311 | if args is None: args = args0 |
|
311 | if args is None: args = args0 | |
312 | else: args = args0 + args |
|
312 | else: args = args0 + args | |
313 | prompts = [r'In \[\d+\]: ',r' \.*: '] |
|
313 | prompts = [r'In \[\d+\]: ',r' \.*: '] |
@@ -14,7 +14,7 b' executed.' | |||||
14 | # the file COPYING, distributed as part of this software. |
|
14 | # the file COPYING, distributed as part of this software. | |
15 | #***************************************************************************** |
|
15 | #***************************************************************************** | |
16 |
|
16 | |||
17 | # TODO: deprecated |
|
17 | ||
18 | def prefilter_shell(self,line,continuation): |
|
18 | def prefilter_shell(self,line,continuation): | |
19 | """Alternate prefilter, modified for shell-like functionality. |
|
19 | """Alternate prefilter, modified for shell-like functionality. | |
20 |
|
20 |
@@ -141,7 +141,7 b' def collect(ip,arg):' | |||||
141 | Without args, try to open ~/_ipython/collect dir (in win32 at least). |
|
141 | Without args, try to open ~/_ipython/collect dir (in win32 at least). | |
142 | """ |
|
142 | """ | |
143 | from IPython.external.path import path |
|
143 | from IPython.external.path import path | |
144 |
basedir = path(ip. |
|
144 | basedir = path(ip.ipython_dir + '/collect') | |
145 | try: |
|
145 | try: | |
146 | fs = mglob.expand(arg.split(None,1)[1]) |
|
146 | fs = mglob.expand(arg.split(None,1)[1]) | |
147 | except IndexError: |
|
147 | except IndexError: | |
@@ -170,7 +170,7 b' def inote(ip,arg):' | |||||
170 | Without args, opens notes.txt for editing. |
|
170 | Without args, opens notes.txt for editing. | |
171 | """ |
|
171 | """ | |
172 | import time |
|
172 | import time | |
173 |
fname = ip. |
|
173 | fname = ip.ipython_dir + '/notes.txt' | |
174 |
|
174 | |||
175 | try: |
|
175 | try: | |
176 | entry = " === " + time.asctime() + ': ===\n' + arg.split(None,1)[1] + '\n' |
|
176 | entry = " === " + time.asctime() + ': ===\n' + arg.split(None,1)[1] + '\n' |
@@ -559,9 +559,7 b' def filefind(filename, path_dirs=None):' | |||||
559 | path_dirs = (path_dirs,) |
|
559 | path_dirs = (path_dirs,) | |
560 | for path in path_dirs: |
|
560 | for path in path_dirs: | |
561 | if path == '.': path = os.getcwd() |
|
561 | if path == '.': path = os.getcwd() | |
562 |
testname = os.path. |
|
562 | testname = expand_path(os.path.join(path, filename)) | |
563 | os.path.expanduser( |
|
|||
564 | os.path.join(path, filename))) |
|
|||
565 | if os.path.isfile(testname): |
|
563 | if os.path.isfile(testname): | |
566 | return os.path.abspath(testname) |
|
564 | return os.path.abspath(testname) | |
567 | raise IOError("File does not exist in any " |
|
565 | raise IOError("File does not exist in any " | |
@@ -717,10 +715,17 b' class HomeDirError(Error):' | |||||
717 | def get_home_dir(): |
|
715 | def get_home_dir(): | |
718 | """Return the closest possible equivalent to a 'home' directory. |
|
716 | """Return the closest possible equivalent to a 'home' directory. | |
719 |
|
717 | |||
720 | We first try $HOME. Absent that, on NT it's $HOMEDRIVE\$HOMEPATH. |
|
718 | * On POSIX, we try $HOME. | |
721 |
|
719 | * On Windows we try: | ||
|
720 | - %HOMESHARE% | |||
|
721 | - %HOMEDRIVE\%HOMEPATH% | |||
|
722 | - %USERPROFILE% | |||
|
723 | - Registry hack | |||
|
724 | * On Dos C:\ | |||
|
725 | ||||
722 | Currently only Posix and NT are implemented, a HomeDirError exception is |
|
726 | Currently only Posix and NT are implemented, a HomeDirError exception is | |
723 |
raised for all other OSes. |
|
727 | raised for all other OSes. | |
|
728 | """ | |||
724 |
|
729 | |||
725 | isdir = os.path.isdir |
|
730 | isdir = os.path.isdir | |
726 | env = os.environ |
|
731 | env = os.environ | |
@@ -736,93 +741,88 b' def get_home_dir():' | |||||
736 | root=os.path.abspath(root).rstrip('\\') |
|
741 | root=os.path.abspath(root).rstrip('\\') | |
737 | if isdir(os.path.join(root, '_ipython')): |
|
742 | if isdir(os.path.join(root, '_ipython')): | |
738 | os.environ["IPYKITROOT"] = root |
|
743 | os.environ["IPYKITROOT"] = root | |
739 | return root |
|
744 | return root.decode(sys.getfilesystemencoding()) | |
740 | try: |
|
745 | ||
741 | homedir = env['HOME'] |
|
746 | if os.name == 'posix': | |
742 | if not isdir(homedir): |
|
747 | # Linux, Unix, AIX, OS X | |
743 | # in case a user stuck some string which does NOT resolve to a |
|
748 | try: | |
744 | # valid path, it's as good as if we hadn't foud it |
|
749 | homedir = env['HOME'] | |
745 |
|
|
750 | except KeyError: | |
746 | return homedir |
|
751 | raise HomeDirError('Undefined $HOME, IPython cannot proceed.') | |
747 | except KeyError: |
|
752 | else: | |
748 | if os.name == 'posix': |
|
753 | return homedir.decode(sys.getfilesystemencoding()) | |
749 | raise HomeDirError,'undefined $HOME, IPython can not proceed.' |
|
754 | elif os.name == 'nt': | |
750 | elif os.name == 'nt': |
|
755 | # Now for win9x, XP, Vista, 7? | |
751 |
|
|
756 | # For some strange reason all of these return 'nt' for os.name. | |
752 | try: |
|
757 | # First look for a network home directory. This will return the UNC | |
753 | homedir = os.path.join(env['HOMEDRIVE'],env['HOMEPATH']) |
|
758 | # path (\\server\\Users\%username%) not the mapped path (Z:\). This | |
754 | if not isdir(homedir): |
|
759 | # is needed when running IPython on cluster where all paths have to | |
755 | homedir = os.path.join(env['USERPROFILE']) |
|
760 | # be UNC. | |
756 | if not isdir(homedir): |
|
761 | try: | |
757 | raise HomeDirError |
|
762 | homedir = env['HOMESHARE'] | |
758 | return homedir |
|
763 | except KeyError: | |
759 | except KeyError: |
|
764 | pass | |
760 | try: |
|
765 | else: | |
761 | # Use the registry to get the 'My Documents' folder. |
|
766 | if isdir(homedir): | |
762 | import _winreg as wreg |
|
767 | return homedir.decode(sys.getfilesystemencoding()) | |
763 | key = wreg.OpenKey(wreg.HKEY_CURRENT_USER, |
|
768 | ||
764 | "Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders") |
|
769 | # Now look for a local home directory | |
765 | homedir = wreg.QueryValueEx(key,'Personal')[0] |
|
770 | try: | |
766 | key.Close() |
|
771 | homedir = os.path.join(env['HOMEDRIVE'],env['HOMEPATH']) | |
767 | if not isdir(homedir): |
|
772 | except KeyError: | |
768 | e = ('Invalid "Personal" folder registry key ' |
|
773 | pass | |
769 | 'typically "My Documents".\n' |
|
774 | else: | |
770 | 'Value: %s\n' |
|
775 | if isdir(homedir): | |
771 | 'This is not a valid directory on your system.' % |
|
776 | return homedir.decode(sys.getfilesystemencoding()) | |
772 | homedir) |
|
777 | ||
773 | raise HomeDirError(e) |
|
778 | # Now the users profile directory | |
774 | return homedir |
|
779 | try: | |
775 | except HomeDirError: |
|
780 | homedir = os.path.join(env['USERPROFILE']) | |
776 | raise |
|
781 | except KeyError: | |
777 |
|
|
782 | pass | |
778 | return 'C:\\' |
|
|||
779 | elif os.name == 'dos': |
|
|||
780 | # Desperate, may do absurd things in classic MacOS. May work under DOS. |
|
|||
781 | return 'C:\\' |
|
|||
782 | else: |
|
783 | else: | |
783 | raise HomeDirError,'support for your operating system not implemented.' |
|
784 | if isdir(homedir): | |
|
785 | return homedir.decode(sys.getfilesystemencoding()) | |||
|
786 | ||||
|
787 | # Use the registry to get the 'My Documents' folder. | |||
|
788 | try: | |||
|
789 | import _winreg as wreg | |||
|
790 | key = wreg.OpenKey( | |||
|
791 | wreg.HKEY_CURRENT_USER, | |||
|
792 | "Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" | |||
|
793 | ) | |||
|
794 | homedir = wreg.QueryValueEx(key,'Personal')[0] | |||
|
795 | key.Close() | |||
|
796 | except: | |||
|
797 | pass | |||
|
798 | else: | |||
|
799 | if isdir(homedir): | |||
|
800 | return homedir.decode(sys.getfilesystemencoding()) | |||
|
801 | ||||
|
802 | # If all else fails, raise HomeDirError | |||
|
803 | raise HomeDirError('No valid home directory could be found') | |||
|
804 | elif os.name == 'dos': | |||
|
805 | # Desperate, may do absurd things in classic MacOS. May work under DOS. | |||
|
806 | return 'C:\\'.decode(sys.getfilesystemencoding()) | |||
|
807 | else: | |||
|
808 | raise HomeDirError('No valid home directory could be found for your OS') | |||
784 |
|
809 | |||
785 |
|
810 | |||
786 | def get_ipython_dir(): |
|
811 | def get_ipython_dir(): | |
787 | """Get the IPython directory for this platform and user. |
|
812 | """Get the IPython directory for this platform and user. | |
788 |
|
813 | |||
789 | This uses the logic in `get_home_dir` to find the home directory |
|
814 | This uses the logic in `get_home_dir` to find the home directory | |
790 |
and the adds |
|
815 | and the adds .ipython to the end of the path. | |
791 | """ |
|
816 | """ | |
792 | if os.name == 'posix': |
|
817 | ipdir_def = '.ipython' | |
793 | ipdir_def = '.ipython' |
|
|||
794 | else: |
|
|||
795 | ipdir_def = '_ipython' |
|
|||
796 | home_dir = get_home_dir() |
|
818 | home_dir = get_home_dir() | |
797 |
ipdir = os. |
|
819 | ipdir = os.environ.get( | |
798 | os.path.join(home_dir, ipdir_def))) |
|
820 | 'IPYTHON_DIR', os.environ.get( | |
|
821 | 'IPYTHONDIR', os.path.join(home_dir, ipdir_def) | |||
|
822 | ) | |||
|
823 | ) | |||
799 | return ipdir.decode(sys.getfilesystemencoding()) |
|
824 | return ipdir.decode(sys.getfilesystemencoding()) | |
800 |
|
825 | |||
801 | def get_security_dir(): |
|
|||
802 | """Get the IPython security directory. |
|
|||
803 |
|
||||
804 | This directory is the default location for all security related files, |
|
|||
805 | including SSL/TLS certificates and FURL files. |
|
|||
806 |
|
||||
807 | If the directory does not exist, it is created with 0700 permissions. |
|
|||
808 | If it exists, permissions are set to 0700. |
|
|||
809 | """ |
|
|||
810 | security_dir = os.path.join(get_ipython_dir(), 'security') |
|
|||
811 | if not os.path.isdir(security_dir): |
|
|||
812 | os.mkdir(security_dir, 0700) |
|
|||
813 | else: |
|
|||
814 | os.chmod(security_dir, 0700) |
|
|||
815 | return security_dir |
|
|||
816 |
|
||||
817 | def get_log_dir(): |
|
|||
818 | """Get the IPython log directory. |
|
|||
819 |
|
||||
820 | If the log directory does not exist, it is created. |
|
|||
821 | """ |
|
|||
822 | log_dir = os.path.join(get_ipython_dir(), 'log') |
|
|||
823 | if not os.path.isdir(log_dir): |
|
|||
824 | os.mkdir(log_dir, 0777) |
|
|||
825 | return log_dir |
|
|||
826 |
|
826 | |||
827 | #**************************************************************************** |
|
827 | #**************************************************************************** | |
828 | # strings and text |
|
828 | # strings and text | |
@@ -1738,7 +1738,7 b' def extract_vars_above(*names):' | |||||
1738 | callerNS = sys._getframe(2).f_locals |
|
1738 | callerNS = sys._getframe(2).f_locals | |
1739 | return dict((k,callerNS[k]) for k in names) |
|
1739 | return dict((k,callerNS[k]) for k in names) | |
1740 |
|
1740 | |||
1741 |
def |
|
1741 | def expand_path(s): | |
1742 | """Expand $VARS and ~names in a string, like a shell |
|
1742 | """Expand $VARS and ~names in a string, like a shell | |
1743 |
|
1743 | |||
1744 | :Examples: |
|
1744 | :Examples: | |
@@ -1748,8 +1748,17 b' def shexp(s):' | |||||
1748 | In [3]: shexp('variable FOO is $FOO') |
|
1748 | In [3]: shexp('variable FOO is $FOO') | |
1749 | Out[3]: 'variable FOO is test' |
|
1749 | Out[3]: 'variable FOO is test' | |
1750 | """ |
|
1750 | """ | |
1751 | return os.path.expandvars(os.path.expanduser(s)) |
|
1751 | # This is a pretty subtle hack. When expand user is given a UNC path | |
1752 |
|
1752 | # on Windows (\\server\share$\%username%), os.path.expandvars, removes | ||
|
1753 | # the $ to get (\\server\share\%username%). I think it considered $ | |||
|
1754 | # alone an empty var. But, we need the $ to remains there (it indicates | |||
|
1755 | # a hidden share). | |||
|
1756 | if os.name=='nt': | |||
|
1757 | s = s.replace('$\\', 'IPYTHON_TEMP') | |||
|
1758 | s = os.path.expandvars(os.path.expanduser(s)) | |||
|
1759 | if os.name=='nt': | |||
|
1760 | s = s.replace('IPYTHON_TEMP', '$\\') | |||
|
1761 | return s | |||
1753 |
|
1762 | |||
1754 | def list_strings(arg): |
|
1763 | def list_strings(arg): | |
1755 | """Always return a list of strings, given a string or list of strings |
|
1764 | """Always return a list of strings, given a string or list of strings |
@@ -1,119 +1,137 b'' | |||||
|
1 | #!/usr/bin/env python | |||
1 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
2 |
|
3 | """ | ||
3 |
|
|
4 | The IPython Core Notification Center. | |
4 |
|
5 | |||
5 | See docs/source/development/notification_blueprint.txt for an overview of the |
|
6 | See docs/source/development/notification_blueprint.txt for an overview of the | |
6 | notification module. |
|
7 | notification module. | |
|
8 | ||||
|
9 | Authors: | |||
|
10 | ||||
|
11 | * Barry Wark | |||
|
12 | * Brian Granger | |||
7 | """ |
|
13 | """ | |
8 |
|
14 | |||
9 | __docformat__ = "restructuredtext en" |
|
|||
10 |
|
||||
11 | #----------------------------------------------------------------------------- |
|
15 | #----------------------------------------------------------------------------- | |
12 |
# Copyright (C) 2008 The IPython Development Team |
|
16 | # Copyright (C) 2008-2009 The IPython Development Team | |
13 | # |
|
17 | # | |
14 |
# Distributed under the terms of the BSD License. The full license is in |
|
18 | # Distributed under the terms of the BSD License. The full license is in | |
15 |
# the file COPYING, distributed as part of this software. |
|
19 | # the file COPYING, distributed as part of this software. | |
|
20 | #----------------------------------------------------------------------------- | |||
|
21 | ||||
|
22 | #----------------------------------------------------------------------------- | |||
|
23 | # Code | |||
16 | #----------------------------------------------------------------------------- |
|
24 | #----------------------------------------------------------------------------- | |
17 |
|
25 | |||
18 | # Tell nose to skip the testing of this module |
|
26 | ||
19 | __test__ = {} |
|
27 | class NotificationError(Exception): | |
|
28 | pass | |||
|
29 | ||||
20 |
|
30 | |||
21 | class NotificationCenter(object): |
|
31 | class NotificationCenter(object): | |
22 | """Synchronous notification center |
|
32 | """Synchronous notification center. | |
23 |
|
33 | |||
24 | Examples |
|
34 | Examples | |
25 | -------- |
|
35 | -------- | |
26 | >>> import IPython.kernel.core.notification as notification |
|
36 | Here is a simple example of how to use this:: | |
27 | >>> def callback(theType, theSender, args={}): |
|
37 | ||
28 | ... print theType,theSender,args |
|
38 | import IPython.kernel.core.notification as notification | |
29 | ... |
|
39 | def callback(ntype, theSender, args={}): | |
30 | >>> notification.sharedCenter.add_observer(callback, 'NOTIFICATION_TYPE', None) |
|
40 | print ntype,theSender,args | |
31 | >>> notification.sharedCenter.post_notification('NOTIFICATION_TYPE', object()) # doctest:+ELLIPSIS |
|
41 | ||
32 | NOTIFICATION_TYPE ... |
|
42 | notification.sharedCenter.add_observer(callback, 'NOTIFICATION_TYPE', None) | |
33 |
|
43 | notification.sharedCenter.post_notification('NOTIFICATION_TYPE', object()) # doctest:+ELLIPSIS | ||
|
44 | NOTIFICATION_TYPE ... | |||
34 | """ |
|
45 | """ | |
35 | def __init__(self): |
|
46 | def __init__(self): | |
36 | super(NotificationCenter, self).__init__() |
|
47 | super(NotificationCenter, self).__init__() | |
37 | self._init_observers() |
|
48 | self._init_observers() | |
38 |
|
49 | |||
39 |
|
||||
40 | def _init_observers(self): |
|
50 | def _init_observers(self): | |
41 | """Initialize observer storage""" |
|
51 | """Initialize observer storage""" | |
42 |
|
52 | |||
43 | self.registered_types = set() #set of types that are observed |
|
53 | self.registered_types = set() #set of types that are observed | |
44 | self.registered_senders = set() #set of senders that are observed |
|
54 | self.registered_senders = set() #set of senders that are observed | |
45 | self.observers = {} #map (type,sender) => callback (callable) |
|
55 | self.observers = {} #map (type,sender) => callback (callable) | |
46 |
|
||||
47 |
|
||||
48 | def post_notification(self, theType, sender, **kwargs): |
|
|||
49 | """Post notification (type,sender,**kwargs) to all registered |
|
|||
50 | observers. |
|
|||
51 |
|
56 | |||
52 | Implementation notes: |
|
57 | def post_notification(self, ntype, sender, *args, **kwargs): | |
|
58 | """Post notification to all registered observers. | |||
|
59 | ||||
|
60 | The registered callback will be called as:: | |||
|
61 | ||||
|
62 | callback(ntype, sender, *args, **kwargs) | |||
|
63 | ||||
|
64 | Parameters | |||
|
65 | ---------- | |||
|
66 | ntype : hashable | |||
|
67 | The notification type. | |||
|
68 | sender : hashable | |||
|
69 | The object sending the notification. | |||
|
70 | *args : tuple | |||
|
71 | The positional arguments to be passed to the callback. | |||
|
72 | **kwargs : dict | |||
|
73 | The keyword argument to be passed to the callback. | |||
53 |
|
74 | |||
|
75 | Notes | |||
|
76 | ----- | |||
54 | * If no registered observers, performance is O(1). |
|
77 | * If no registered observers, performance is O(1). | |
55 | * Notificaiton order is undefined. |
|
78 | * Notificaiton order is undefined. | |
56 | * Notifications are posted synchronously. |
|
79 | * Notifications are posted synchronously. | |
57 | """ |
|
80 | """ | |
58 |
|
81 | |||
59 |
if(t |
|
82 | if(ntype==None or sender==None): | |
60 | raise Exception("NotificationCenter.post_notification requires \ |
|
83 | raise NotificationError( | |
61 | type and sender.") |
|
84 | "Notification type and sender are required.") | |
62 |
|
85 | |||
63 | # If there are no registered observers for the type/sender pair |
|
86 | # If there are no registered observers for the type/sender pair | |
64 |
if((t |
|
87 | if((ntype not in self.registered_types and | |
65 | None not in self.registered_types) or |
|
88 | None not in self.registered_types) or | |
66 | (sender not in self.registered_senders and |
|
89 | (sender not in self.registered_senders and | |
67 | None not in self.registered_senders)): |
|
90 | None not in self.registered_senders)): | |
68 | return |
|
91 | return | |
69 |
|
92 | |||
70 |
for o in self._observers_for_notification(t |
|
93 | for o in self._observers_for_notification(ntype, sender): | |
71 |
o(t |
|
94 | o(ntype, sender, *args, **kwargs) | |
72 |
|
95 | |||
73 |
|
96 | def _observers_for_notification(self, ntype, sender): | ||
74 | def _observers_for_notification(self, theType, sender): |
|
|||
75 | """Find all registered observers that should recieve notification""" |
|
97 | """Find all registered observers that should recieve notification""" | |
76 |
|
98 | |||
77 | keys = ( |
|
99 | keys = ( | |
78 |
|
|
100 | (ntype,sender), | |
79 |
|
|
101 | (ntype, None), | |
80 |
|
|
102 | (None, sender), | |
81 |
|
|
103 | (None,None) | |
82 |
|
|
104 | ) | |
83 |
|
105 | |||
84 |
|
||||
85 | obs = set() |
|
106 | obs = set() | |
86 | for k in keys: |
|
107 | for k in keys: | |
87 | obs.update(self.observers.get(k, set())) |
|
108 | obs.update(self.observers.get(k, set())) | |
88 |
|
109 | |||
89 | return obs |
|
110 | return obs | |
90 |
|
111 | |||
91 |
|
112 | def add_observer(self, callback, ntype, sender): | ||
92 | def add_observer(self, callback, theType, sender): |
|
|||
93 | """Add an observer callback to this notification center. |
|
113 | """Add an observer callback to this notification center. | |
94 |
|
114 | |||
95 | The given callback will be called upon posting of notifications of |
|
115 | The given callback will be called upon posting of notifications of | |
96 |
the given type/sender and will receive any additional |
|
116 | the given type/sender and will receive any additional arguments passed | |
97 | to post_notification. |
|
117 | to post_notification. | |
98 |
|
118 | |||
99 | Parameters |
|
119 | Parameters | |
100 | ---------- |
|
120 | ---------- | |
101 |
|
|
121 | callback : callable | |
102 | Callable. Must take at least two arguments:: |
|
122 | The callable that will be called by :meth:`post_notification` | |
103 |
|
|
123 | as ``callback(ntype, sender, *args, **kwargs) | |
104 |
|
124 | ntype : hashable | ||
105 | theType : hashable |
|
|||
106 | The notification type. If None, all notifications from sender |
|
125 | The notification type. If None, all notifications from sender | |
107 | will be posted. |
|
126 | will be posted. | |
108 |
|
||||
109 | sender : hashable |
|
127 | sender : hashable | |
110 |
The notification sender. If None, all notifications of t |
|
128 | The notification sender. If None, all notifications of ntype | |
111 | will be posted. |
|
129 | will be posted. | |
112 | """ |
|
130 | """ | |
113 | assert(callback != None) |
|
131 | assert(callback != None) | |
114 |
self.registered_types.add(t |
|
132 | self.registered_types.add(ntype) | |
115 | self.registered_senders.add(sender) |
|
133 | self.registered_senders.add(sender) | |
116 |
self.observers.setdefault((t |
|
134 | self.observers.setdefault((ntype,sender), set()).add(callback) | |
117 |
|
135 | |||
118 | def remove_all_observers(self): |
|
136 | def remove_all_observers(self): | |
119 | """Removes all observers from this notification center""" |
|
137 | """Removes all observers from this notification center""" | |
@@ -122,4 +140,4 b' class NotificationCenter(object):' | |||||
122 |
|
140 | |||
123 |
|
141 | |||
124 |
|
142 | |||
125 |
shared |
|
143 | shared_center = NotificationCenter() |
@@ -13,135 +13,129 b'' | |||||
13 | # Imports |
|
13 | # Imports | |
14 | #----------------------------------------------------------------------------- |
|
14 | #----------------------------------------------------------------------------- | |
15 |
|
15 | |||
16 | # Tell nose to skip this module |
|
16 | import unittest | |
17 | __test__ = {} |
|
|||
18 |
|
17 | |||
19 | from twisted.trial import unittest |
|
18 | from IPython.utils.notification import ( | |
20 | import IPython.kernel.core.notification as notification |
|
19 | NotificationCenter, | |
|
20 | NotificationError, | |||
|
21 | shared_center | |||
|
22 | ) | |||
21 |
|
23 | |||
22 | #----------------------------------------------------------------------------- |
|
24 | #----------------------------------------------------------------------------- | |
23 | # Support Classes |
|
25 | # Support Classes | |
24 | #----------------------------------------------------------------------------- |
|
26 | #----------------------------------------------------------------------------- | |
25 |
|
27 | |||
|
28 | ||||
26 | class Observer(object): |
|
29 | class Observer(object): | |
27 | """docstring for Observer""" |
|
30 | ||
28 |
def __init__(self, expected |
|
31 | def __init__(self, expected_ntype, expected_sender, | |
29 |
center= |
|
32 | center=shared_center, *args, **kwargs): | |
30 | super(Observer, self).__init__() |
|
33 | super(Observer, self).__init__() | |
31 |
self.expected |
|
34 | self.expected_ntype = expected_ntype | |
32 |
self.expected |
|
35 | self.expected_sender = expected_sender | |
33 |
self.expected |
|
36 | self.expected_args = args | |
|
37 | self.expected_kwargs = kwargs | |||
34 | self.recieved = False |
|
38 | self.recieved = False | |
35 | center.add_observer(self.callback, |
|
39 | center.add_observer(self.callback, | |
36 |
self.expected |
|
40 | self.expected_ntype, | |
37 |
self.expected |
|
41 | self.expected_sender) | |
38 |
|
42 | |||
39 |
def callback(self, t |
|
43 | def callback(self, ntype, sender, *args, **kwargs): | |
40 | """callback""" |
|
44 | assert(ntype == self.expected_ntype or | |
41 |
|
45 | self.expected_ntype == None) | ||
42 |
assert( |
|
46 | assert(sender == self.expected_sender or | |
43 |
self.expected |
|
47 | self.expected_sender == None) | |
44 |
assert( |
|
48 | assert(args == self.expected_args) | |
45 | self.expectedSender == None) |
|
49 | assert(kwargs == self.expected_kwargs) | |
46 | assert(args == self.expectedKwArgs) |
|
|||
47 | self.recieved = True |
|
50 | self.recieved = True | |
48 |
|
51 | |||
49 | def verify(self): |
|
52 | def verify(self): | |
50 | """verify""" |
|
|||
51 |
|
||||
52 | assert(self.recieved) |
|
53 | assert(self.recieved) | |
53 |
|
54 | |||
54 | def reset(self): |
|
55 | def reset(self): | |
55 | """reset""" |
|
|||
56 |
|
||||
57 | self.recieved = False |
|
56 | self.recieved = False | |
58 |
|
57 | |||
59 |
|
58 | |||
60 | class Notifier(object): |
|
59 | class Notifier(object): | |
61 | """docstring for Notifier""" |
|
60 | ||
62 |
def __init__(self, t |
|
61 | def __init__(self, ntype, **kwargs): | |
63 | super(Notifier, self).__init__() |
|
62 | super(Notifier, self).__init__() | |
64 |
self.t |
|
63 | self.ntype = ntype | |
65 | self.kwargs = kwargs |
|
64 | self.kwargs = kwargs | |
66 |
|
65 | |||
67 |
def post(self, center= |
|
66 | def post(self, center=shared_center): | |
68 | """fire""" |
|
67 | ||
69 |
|
68 | center.post_notification(self.ntype, self, | ||
70 | center.post_notification(self.theType, self, |
|
|||
71 | **self.kwargs) |
|
69 | **self.kwargs) | |
72 |
|
70 | |||
|
71 | ||||
73 | #----------------------------------------------------------------------------- |
|
72 | #----------------------------------------------------------------------------- | |
74 | # Tests |
|
73 | # Tests | |
75 | #----------------------------------------------------------------------------- |
|
74 | #----------------------------------------------------------------------------- | |
76 |
|
75 | |||
|
76 | ||||
77 | class NotificationTests(unittest.TestCase): |
|
77 | class NotificationTests(unittest.TestCase): | |
78 | """docstring for NotificationTests""" |
|
78 | ||
79 |
|
||||
80 | def tearDown(self): |
|
79 | def tearDown(self): | |
81 |
|
|
80 | shared_center.remove_all_observers() | |
82 |
|
81 | |||
83 | def test_notification_delivered(self): |
|
82 | def test_notification_delivered(self): | |
84 | """Test that notifications are delivered""" |
|
83 | """Test that notifications are delivered""" | |
85 | expectedType = 'EXPECTED_TYPE' |
|
84 | ||
86 | sender = Notifier(expectedType) |
|
85 | expected_ntype = 'EXPECTED_TYPE' | |
87 |
|
|
86 | sender = Notifier(expected_ntype) | |
88 |
|
87 | observer = Observer(expected_ntype, sender) | ||
|
88 | ||||
89 | sender.post() |
|
89 | sender.post() | |
90 |
|
||||
91 | observer.verify() |
|
90 | observer.verify() | |
92 |
|
91 | |||
93 | def test_type_specificity(self): |
|
92 | def test_type_specificity(self): | |
94 | """Test that observers are registered by type""" |
|
93 | """Test that observers are registered by type""" | |
95 |
|
94 | |||
96 |
expected |
|
95 | expected_ntype = 1 | |
97 |
unexpected |
|
96 | unexpected_ntype = "UNEXPECTED_TYPE" | |
98 |
sender = Notifier(expected |
|
97 | sender = Notifier(expected_ntype) | |
99 |
unexpected |
|
98 | unexpected_sender = Notifier(unexpected_ntype) | |
100 |
observer = Observer(expected |
|
99 | observer = Observer(expected_ntype, sender) | |
101 |
|
100 | |||
102 | sender.post() |
|
101 | sender.post() | |
103 |
unexpected |
|
102 | unexpected_sender.post() | |
104 |
|
||||
105 | observer.verify() |
|
103 | observer.verify() | |
106 |
|
104 | |||
107 | def test_sender_specificity(self): |
|
105 | def test_sender_specificity(self): | |
108 | """Test that observers are registered by sender""" |
|
106 | """Test that observers are registered by sender""" | |
109 |
|
107 | |||
110 |
expected |
|
108 | expected_ntype = "EXPECTED_TYPE" | |
111 |
sender1 = Notifier(expected |
|
109 | sender1 = Notifier(expected_ntype) | |
112 |
sender2 = Notifier(expected |
|
110 | sender2 = Notifier(expected_ntype) | |
113 |
observer = Observer(expected |
|
111 | observer = Observer(expected_ntype, sender1) | |
114 |
|
112 | |||
115 | sender1.post() |
|
113 | sender1.post() | |
116 | sender2.post() |
|
114 | sender2.post() | |
117 |
|
115 | |||
118 | observer.verify() |
|
116 | observer.verify() | |
119 |
|
117 | |||
120 | def test_remove_all_observers(self): |
|
118 | def test_remove_all_observers(self): | |
121 | """White-box test for remove_all_observers""" |
|
119 | """White-box test for remove_all_observers""" | |
122 |
|
120 | |||
123 | for i in xrange(10): |
|
121 | for i in xrange(10): | |
124 |
Observer('TYPE', None, center= |
|
122 | Observer('TYPE', None, center=shared_center) | |
125 |
|
123 | |||
126 |
self.assert_(len( |
|
124 | self.assert_(len(shared_center.observers[('TYPE',None)]) >= 10, | |
127 | "observers registered") |
|
125 | "observers registered") | |
128 |
|
126 | |||
129 |
|
|
127 | shared_center.remove_all_observers() | |
130 |
|
128 | self.assert_(len(shared_center.observers) == 0, "observers removed") | ||
131 | self.assert_(len(notification.sharedCenter.observers) == 0, "observers removed") |
|
|||
132 |
|
129 | |||
133 | def test_any_sender(self): |
|
130 | def test_any_sender(self): | |
134 | """test_any_sender""" |
|
131 | expected_ntype = "EXPECTED_TYPE" | |
135 |
|
132 | sender1 = Notifier(expected_ntype) | ||
136 | expectedType = "EXPECTED_TYPE" |
|
133 | sender2 = Notifier(expected_ntype) | |
137 |
|
|
134 | observer = Observer(expected_ntype, None) | |
138 | sender2 = Notifier(expectedType) |
|
135 | ||
139 | observer = Observer(expectedType, None) |
|
|||
140 |
|
||||
141 |
|
||||
142 | sender1.post() |
|
136 | sender1.post() | |
143 | observer.verify() |
|
137 | observer.verify() | |
144 |
|
138 | |||
145 | observer.reset() |
|
139 | observer.reset() | |
146 | sender2.post() |
|
140 | sender2.post() | |
147 | observer.verify() |
|
141 | observer.verify() | |
@@ -152,10 +146,9 b' class NotificationTests(unittest.TestCase):' | |||||
152 |
|
146 | |||
153 | for i in xrange(10): |
|
147 | for i in xrange(10): | |
154 | Observer("UNRELATED_TYPE", None) |
|
148 | Observer("UNRELATED_TYPE", None) | |
155 |
|
149 | |||
156 | o = Observer('EXPECTED_TYPE', None) |
|
150 | o = Observer('EXPECTED_TYPE', None) | |
157 |
|
151 | shared_center.post_notification('EXPECTED_TYPE', self) | ||
158 | notification.sharedCenter.post_notification('EXPECTED_TYPE', self) |
|
|||
159 |
|
||||
160 | o.verify() |
|
152 | o.verify() | |
161 |
|
153 | |||
|
154 |
@@ -151,6 +151,26 b' class _SimpleTest:' | |||||
151 | return self.__repr__() |
|
151 | return self.__repr__() | |
152 |
|
152 | |||
153 |
|
153 | |||
|
154 | def getmembers(object, predicate=None): | |||
|
155 | """A safe version of inspect.getmembers that handles missing attributes. | |||
|
156 | ||||
|
157 | This is useful when there are descriptor based attributes that for | |||
|
158 | some reason raise AttributeError even though they exist. This happens | |||
|
159 | in zope.inteface with the __provides__ attribute. | |||
|
160 | """ | |||
|
161 | results = [] | |||
|
162 | for key in dir(object): | |||
|
163 | try: | |||
|
164 | value = getattr(object, key) | |||
|
165 | except AttributeError: | |||
|
166 | pass | |||
|
167 | else: | |||
|
168 | if not predicate or predicate(value): | |||
|
169 | results.append((key, value)) | |||
|
170 | results.sort() | |||
|
171 | return results | |||
|
172 | ||||
|
173 | ||||
154 | #----------------------------------------------------------------------------- |
|
174 | #----------------------------------------------------------------------------- | |
155 | # Base TraitletType for all traitlets |
|
175 | # Base TraitletType for all traitlets | |
156 | #----------------------------------------------------------------------------- |
|
176 | #----------------------------------------------------------------------------- | |
@@ -316,6 +336,9 b' class MetaHasTraitlets(type):' | |||||
316 | This instantiates all TraitletTypes in the class dict and sets their |
|
336 | This instantiates all TraitletTypes in the class dict and sets their | |
317 | :attr:`name` attribute. |
|
337 | :attr:`name` attribute. | |
318 | """ |
|
338 | """ | |
|
339 | # print "MetaHasTraitlets (mcls, name): ", mcls, name | |||
|
340 | # print "MetaHasTraitlets (bases): ", bases | |||
|
341 | # print "MetaHasTraitlets (classdict): ", classdict | |||
319 | for k,v in classdict.iteritems(): |
|
342 | for k,v in classdict.iteritems(): | |
320 | if isinstance(v, TraitletType): |
|
343 | if isinstance(v, TraitletType): | |
321 | v.name = k |
|
344 | v.name = k | |
@@ -354,9 +377,16 b' class HasTraitlets(object):' | |||||
354 | # Here we tell all the TraitletType instances to set their default |
|
377 | # Here we tell all the TraitletType instances to set their default | |
355 | # values on the instance. |
|
378 | # values on the instance. | |
356 | for key in dir(cls): |
|
379 | for key in dir(cls): | |
357 | value = getattr(cls, key) |
|
380 | # Some descriptors raise AttributeError like zope.interface's | |
358 | if isinstance(value, TraitletType): |
|
381 | # __provides__ attributes even though they exist. This causes | |
359 | value.instance_init(inst) |
|
382 | # AttributeErrors even though they are listed in dir(cls). | |
|
383 | try: | |||
|
384 | value = getattr(cls, key) | |||
|
385 | except AttributeError: | |||
|
386 | pass | |||
|
387 | else: | |||
|
388 | if isinstance(value, TraitletType): | |||
|
389 | value.instance_init(inst) | |||
360 | return inst |
|
390 | return inst | |
361 |
|
391 | |||
362 | # def __init__(self): |
|
392 | # def __init__(self): | |
@@ -475,7 +505,7 b' class HasTraitlets(object):' | |||||
475 | exists, but has any value. This is because get_metadata returns |
|
505 | exists, but has any value. This is because get_metadata returns | |
476 | None if a metadata key doesn't exist. |
|
506 | None if a metadata key doesn't exist. | |
477 | """ |
|
507 | """ | |
478 |
traitlets = dict([memb for memb in |
|
508 | traitlets = dict([memb for memb in getmembers(self.__class__) if \ | |
479 | isinstance(memb[1], TraitletType)]) |
|
509 | isinstance(memb[1], TraitletType)]) | |
480 |
|
510 | |||
481 | if len(metadata) == 0: |
|
511 | if len(metadata) == 0: |
@@ -14,6 +14,8 b' ALLSPHINXOPTS = -d build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) $(SRCDIR)' | |||||
14 |
|
14 | |||
15 | .PHONY: help clean html web pickle htmlhelp latex changes linkcheck api |
|
15 | .PHONY: help clean html web pickle htmlhelp latex changes linkcheck api | |
16 |
|
16 | |||
|
17 | default: html | |||
|
18 | ||||
17 | help: |
|
19 | help: | |
18 | @echo "Please use \`make <target>' where <target> is one of" |
|
20 | @echo "Please use \`make <target>' where <target> is one of" | |
19 | @echo " html to make standalone HTML files" |
|
21 | @echo " html to make standalone HTML files" |
@@ -34,6 +34,7 b" if __name__ == '__main__':" | |||||
34 | r'\.ipdoctest', |
|
34 | r'\.ipdoctest', | |
35 | r'\.Gnuplot', |
|
35 | r'\.Gnuplot', | |
36 | r'\.frontend\.process\.winprocess', |
|
36 | r'\.frontend\.process\.winprocess', | |
|
37 | r'\.Shell', | |||
37 | ] |
|
38 | ] | |
38 | docwriter.write_api_docs(outdir) |
|
39 | docwriter.write_api_docs(outdir) | |
39 | docwriter.write_index(outdir, 'gen', |
|
40 | docwriter.write_index(outdir, 'gen', |
@@ -1,71 +1,71 b'' | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | # encoding: utf-8 |
|
|||
3 | """Run a Monte-Carlo options pricer in parallel.""" |
|
2 | """Run a Monte-Carlo options pricer in parallel.""" | |
4 |
|
3 | |||
5 | from IPython.kernel import client |
|
4 | from IPython.kernel import client | |
6 |
import numpy as |
|
5 | import numpy as np | |
7 |
from mcpricer import |
|
6 | from mcpricer import price_options | |
8 |
|
7 | |||
|
8 | # The MultiEngineClient is used to setup the calculation and works with all | |||
|
9 | # engine. | |||
|
10 | mec = client.MultiEngineClient(profile='mycluster') | |||
9 |
|
11 | |||
10 | tc = client.TaskClient() |
|
12 | # The TaskClient is an interface to the engines that provides dynamic load | |
11 | rc = client.MultiEngineClient() |
|
13 | # balancing at the expense of not knowing which engine will execute the code. | |
|
14 | tc = client.TaskClient(profile='mycluster') | |||
12 |
|
15 | |||
13 | # Initialize the common code on the engines |
|
16 | # Initialize the common code on the engines. This Python module has the | |
14 | rc.run('mcpricer.py') |
|
17 | # price_options function that prices the options. | |
|
18 | mec.run('mcpricer.py') | |||
15 |
|
19 | |||
16 | # Push the variables that won't change |
|
20 | # Define the function that will make up our tasks. We basically want to | |
17 | #(stock print, interest rate, days and MC paths) |
|
21 | # call the price_options function with all but two arguments (K, sigma) | |
18 | rc.push(dict(S=100.0, r=0.05, days=260, paths=10000)) |
|
22 | # fixed. | |
19 |
|
23 | def my_prices(K, sigma): | ||
20 | task_string = """\ |
|
24 | S = 100.0 | |
21 | op = MCOptionPricer(S,K,sigma,r,days,paths) |
|
25 | r = 0.05 | |
22 | op.run() |
|
26 | days = 260 | |
23 | vp, ap, vc, ac = op.vanilla_put, op.asian_put, op.vanilla_call, op.asian_call |
|
27 | paths = 100000 | |
24 | """ |
|
28 | return price_options(S, K, sigma, r, days, paths) | |
25 |
|
29 | |||
26 | # Create arrays of strike prices and volatilities |
|
30 | # Create arrays of strike prices and volatilities | |
27 | K_vals = N.linspace(90.0,100.0,5) |
|
31 | nK = 10 | |
28 | sigma_vals = N.linspace(0.0, 0.2,5) |
|
32 | nsigma = 10 | |
|
33 | K_vals = np.linspace(90.0, 100.0, nK) | |||
|
34 | sigma_vals = np.linspace(0.1, 0.4, nsigma) | |||
29 |
|
35 | |||
30 | # Submit tasks |
|
36 | # Submit tasks to the TaskClient for each (K, sigma) pair as a MapTask. | |
|
37 | # The MapTask simply applies a function (my_prices) to the arguments: | |||
|
38 | # my_prices(K, sigma) and returns the result. | |||
31 | taskids = [] |
|
39 | taskids = [] | |
32 | for K in K_vals: |
|
40 | for K in K_vals: | |
33 | for sigma in sigma_vals: |
|
41 | for sigma in sigma_vals: | |
34 | t = client.StringTask(task_string, |
|
42 | t = client.MapTask(my_prices, args=(K, sigma)) | |
35 | push=dict(sigma=sigma,K=K), |
|
|||
36 | pull=('vp','ap','vc','ac','sigma','K')) |
|
|||
37 | taskids.append(tc.run(t)) |
|
43 | taskids.append(tc.run(t)) | |
38 |
|
44 | |||
39 | print "Submitted tasks: ", taskids |
|
45 | print "Submitted tasks: ", len(taskids) | |
40 |
|
46 | |||
41 | # Block until tasks are completed |
|
47 | # Block until all tasks are completed. | |
42 | tc.barrier(taskids) |
|
48 | tc.barrier(taskids) | |
43 |
|
49 | |||
44 | # Get the results |
|
50 | # Get the results using TaskClient.get_task_result. | |
45 | results = [tc.get_task_result(tid) for tid in taskids] |
|
51 | results = [tc.get_task_result(tid) for tid in taskids] | |
46 |
|
52 | |||
47 | # Assemble the result |
|
53 | # Assemble the result into a structured NumPy array. | |
48 | vc = N.empty(K_vals.shape[0]*sigma_vals.shape[0],dtype='float64') |
|
54 | prices = np.empty(nK*nsigma, | |
49 | vp = N.empty(K_vals.shape[0]*sigma_vals.shape[0],dtype='float64') |
|
55 | dtype=[('ecall',float),('eput',float),('acall',float),('aput',float)] | |
50 | ac = N.empty(K_vals.shape[0]*sigma_vals.shape[0],dtype='float64') |
|
56 | ) | |
51 | ap = N.empty(K_vals.shape[0]*sigma_vals.shape[0],dtype='float64') |
|
57 | for i, price_tuple in enumerate(results): | |
52 | for i, tr in enumerate(results): |
|
58 | prices[i] = price_tuple | |
53 | ns = tr.ns |
|
59 | prices.shape = (nK, nsigma) | |
54 | vc[i] = ns.vc |
|
60 | K_vals, sigma_vals = np.meshgrid(K_vals, sigma_vals) | |
55 | vp[i] = ns.vp |
|
|||
56 | ac[i] = ns.ac |
|
|||
57 | ap[i] = ns.ap |
|
|||
58 | vc.shape = (K_vals.shape[0],sigma_vals.shape[0]) |
|
|||
59 | vp.shape = (K_vals.shape[0],sigma_vals.shape[0]) |
|
|||
60 | ac.shape = (K_vals.shape[0],sigma_vals.shape[0]) |
|
|||
61 | ap.shape = (K_vals.shape[0],sigma_vals.shape[0]) |
|
|||
62 |
|
||||
63 |
|
61 | |||
64 |
def plot_options( |
|
62 | def plot_options(sigma_vals, K_vals, prices): | |
65 | """Make a contour plot of the option prices.""" |
|
63 | """ | |
66 | import pylab |
|
64 | Make a contour plot of the option price in (sigma, K) space. | |
67 | pylab.contourf(sigma_vals, K_vals, prices) |
|
65 | """ | |
68 | pylab.colorbar() |
|
66 | from matplotlib import pyplot as plt | |
69 | pylab.title("Option Price") |
|
67 | plt.contourf(sigma_vals, K_vals, prices) | |
70 | pylab.xlabel("Volatility") |
|
68 | plt.colorbar() | |
71 |
p |
|
69 | plt.title("Option Price") | |
|
70 | plt.xlabel("Volatility") | |||
|
71 | plt.ylabel("Strike Price") |
@@ -1,43 +1,45 b'' | |||||
1 |
import numpy as |
|
1 | import numpy as np | |
2 | from math import * |
|
2 | from math import * | |
3 |
|
3 | |||
4 | class MCOptionPricer(object): |
|
|||
5 | def __init__(self, S=100.0, K=100.0, sigma=0.25, r=0.05, days=260, paths=10000): |
|
|||
6 | self.S = S |
|
|||
7 | self.K = K |
|
|||
8 | self.sigma = sigma |
|
|||
9 | self.r = r |
|
|||
10 | self.days = days |
|
|||
11 | self.paths = paths |
|
|||
12 | self.h = 1.0/self.days |
|
|||
13 | self.const1 = exp((self.r-0.5*self.sigma**2)*self.h) |
|
|||
14 | self.const2 = self.sigma*sqrt(self.h) |
|
|||
15 |
|
||||
16 | def run(self): |
|
|||
17 | stock_price = self.S*N.ones(self.paths, dtype='float64') |
|
|||
18 | stock_price_sum = N.zeros(self.paths, dtype='float64') |
|
|||
19 | for j in range(self.days): |
|
|||
20 | growth_factor = self.const1*N.exp(self.const2*N.random.standard_normal(self.paths)) |
|
|||
21 | stock_price = stock_price*growth_factor |
|
|||
22 | stock_price_sum = stock_price_sum + stock_price |
|
|||
23 | stock_price_avg = stock_price_sum/self.days |
|
|||
24 | zeros = N.zeros(self.paths, dtype='float64') |
|
|||
25 | r_factor = exp(-self.r*self.h*self.days) |
|
|||
26 | self.vanilla_put = r_factor*N.mean(N.maximum(zeros,self.K-stock_price)) |
|
|||
27 | self.asian_put = r_factor*N.mean(N.maximum(zeros,self.K-stock_price_avg)) |
|
|||
28 | self.vanilla_call = r_factor*N.mean(N.maximum(zeros,stock_price-self.K)) |
|
|||
29 | self.asian_call = r_factor*N.mean(N.maximum(zeros,stock_price_avg-self.K)) |
|
|||
30 |
|
4 | |||
|
5 | def price_options(S=100.0, K=100.0, sigma=0.25, r=0.05, days=260, paths=10000): | |||
|
6 | """ | |||
|
7 | Price European and Asian options using a Monte Carlo method. | |||
31 |
|
|
8 | ||
32 | def main(): |
|
9 | Parameters | |
33 | op = MCOptionPricer() |
|
10 | ---------- | |
34 | op.run() |
|
11 | S : float | |
35 | print "Vanilla Put Price = ", op.vanilla_put |
|
12 | The initial price of the stock. | |
36 | print "Asian Put Price = ", op.asian_put |
|
13 | K : float | |
37 | print "Vanilla Call Price = ", op.vanilla_call |
|
14 | The strike price of the option. | |
38 | print "Asian Call Price = ", op.asian_call |
|
15 | sigma : float | |
|
16 | The volatility of the stock. | |||
|
17 | r : float | |||
|
18 | The risk free interest rate. | |||
|
19 | days : int | |||
|
20 | The number of days until the option expires. | |||
|
21 | paths : int | |||
|
22 | The number of Monte Carlo paths used to price the option. | |||
39 |
|
|
23 | ||
40 |
|
24 | Returns | ||
41 | if __name__ == '__main__': |
|
25 | ------- | |
42 | main() |
|
26 | A tuple of (E. call, E. put, A. call, A. put) option prices. | |
|
27 | """ | |||
|
28 | h = 1.0/days | |||
|
29 | const1 = exp((r-0.5*sigma**2)*h) | |||
|
30 | const2 = sigma*sqrt(h) | |||
|
31 | stock_price = S*np.ones(paths, dtype='float64') | |||
|
32 | stock_price_sum = np.zeros(paths, dtype='float64') | |||
|
33 | for j in range(days): | |||
|
34 | growth_factor = const1*np.exp(const2*np.random.standard_normal(paths)) | |||
|
35 | stock_price = stock_price*growth_factor | |||
|
36 | stock_price_sum = stock_price_sum + stock_price | |||
|
37 | stock_price_avg = stock_price_sum/days | |||
|
38 | zeros = np.zeros(paths, dtype='float64') | |||
|
39 | r_factor = exp(-r*h*days) | |||
|
40 | euro_put = r_factor*np.mean(np.maximum(zeros, K-stock_price)) | |||
|
41 | asian_put = r_factor*np.mean(np.maximum(zeros, K-stock_price_avg)) | |||
|
42 | euro_call = r_factor*np.mean(np.maximum(zeros, stock_price-K)) | |||
|
43 | asian_call = r_factor*np.mean(np.maximum(zeros, stock_price_avg-K)) | |||
|
44 | return (euro_call, euro_put, asian_call, asian_put) | |||
43 |
|
45 |
@@ -1,13 +1,20 b'' | |||||
1 | """Count the frequencies of words in a string""" |
|
1 | """Count the frequencies of words in a string""" | |
2 |
|
2 | |||
|
3 | from __future__ import division | |||
|
4 | ||||
|
5 | import cmath as math | |||
|
6 | ||||
|
7 | ||||
3 | def wordfreq(text): |
|
8 | def wordfreq(text): | |
4 | """Return a dictionary of words and word counts in a string.""" |
|
9 | """Return a dictionary of words and word counts in a string.""" | |
5 |
|
10 | |||
6 | freqs = {} |
|
11 | freqs = {} | |
7 | for word in text.split(): |
|
12 | for word in text.split(): | |
8 | freqs[word] = freqs.get(word, 0) + 1 |
|
13 | lword = word.lower() | |
|
14 | freqs[lword] = freqs.get(lword, 0) + 1 | |||
9 | return freqs |
|
15 | return freqs | |
10 |
|
16 | |||
|
17 | ||||
11 | def print_wordfreq(freqs, n=10): |
|
18 | def print_wordfreq(freqs, n=10): | |
12 | """Print the n most common words and counts in the freqs dict.""" |
|
19 | """Print the n most common words and counts in the freqs dict.""" | |
13 |
|
20 | |||
@@ -17,7 +24,43 b' def print_wordfreq(freqs, n=10):' | |||||
17 | for (count, word) in items[:n]: |
|
24 | for (count, word) in items[:n]: | |
18 | print word, count |
|
25 | print word, count | |
19 |
|
26 | |||
20 | if __name__ == '__main__': |
|
27 | ||
21 | import gzip |
|
28 | def wordfreq_to_weightsize(worddict, minsize=25, maxsize=50, minalpha=0.5, maxalpha=1.0): | |
22 | text = gzip.open('HISTORY.gz').read() |
|
29 | mincount = min(worddict.itervalues()) | |
23 | freqs = wordfreq(text) No newline at end of file |
|
30 | maxcount = max(worddict.itervalues()) | |
|
31 | weights = {} | |||
|
32 | for k, v in worddict.iteritems(): | |||
|
33 | w = (v-mincount)/(maxcount-mincount) | |||
|
34 | alpha = minalpha + (maxalpha-minalpha)*w | |||
|
35 | size = minsize + (maxsize-minsize)*w | |||
|
36 | weights[k] = (alpha, size) | |||
|
37 | return weights | |||
|
38 | ||||
|
39 | ||||
|
40 | def tagcloud(worddict, n=10, minsize=25, maxsize=50, minalpha=0.5, maxalpha=1.0): | |||
|
41 | from matplotlib import pyplot as plt | |||
|
42 | import random | |||
|
43 | ||||
|
44 | worddict = wordfreq_to_weightsize(worddict, minsize, maxsize, minalpha, maxalpha) | |||
|
45 | ||||
|
46 | fig = plt.figure() | |||
|
47 | ax = fig.add_subplot(111) | |||
|
48 | ax.set_position([0.0,0.0,1.0,1.0]) | |||
|
49 | plt.xticks([]) | |||
|
50 | plt.yticks([]) | |||
|
51 | ||||
|
52 | words = worddict.keys() | |||
|
53 | alphas = [v[0] for v in worddict.values()] | |||
|
54 | sizes = [v[1] for v in worddict.values()] | |||
|
55 | items = zip(alphas, sizes, words) | |||
|
56 | items.sort(reverse=True) | |||
|
57 | for alpha, size, word in items[:n]: | |||
|
58 | # xpos = random.normalvariate(0.5, 0.3) | |||
|
59 | # ypos = random.normalvariate(0.5, 0.3) | |||
|
60 | xpos = random.uniform(0.0,1.0) | |||
|
61 | ypos = random.uniform(0.0,1.0) | |||
|
62 | ax.text(xpos, ypos, word.lower(), alpha=alpha, fontsize=size) | |||
|
63 | ax.autoscale_view() | |||
|
64 | return ax | |||
|
65 | ||||
|
66 | No newline at end of file |
@@ -11,7 +11,7 b'' | |||||
11 | ipcluster is a control tool for IPython's parallel computing functions. |
|
11 | ipcluster is a control tool for IPython's parallel computing functions. | |
12 |
|
12 | |||
13 | IPython cluster startup. This starts a controller and engines using various |
|
13 | IPython cluster startup. This starts a controller and engines using various | |
14 | approaches. Use the IPYTHONDIR environment variable to change your IPython |
|
14 | approaches. Use the IPYTHON_DIR environment variable to change your IPython | |
15 | directory from the default of .ipython or _ipython. The log and security |
|
15 | directory from the default of .ipython or _ipython. The log and security | |
16 | subdirectories of your IPython directory will be used by this script for log |
|
16 | subdirectories of your IPython directory will be used by this script for log | |
17 | files and security files. |
|
17 | files and security files. |
@@ -141,8 +141,8 b' may want to use a small, lightweight editor here (in case your default' | |||||
141 | EDITOR is something like Emacs). |
|
141 | EDITOR is something like Emacs). | |
142 | .TP |
|
142 | .TP | |
143 | .B \-ipythondir <name> |
|
143 | .B \-ipythondir <name> | |
144 | The name of your IPython configuration directory IPYTHONDIR. This can |
|
144 | The name of your IPython configuration directory IPYTHON_DIR. This can | |
145 | also be specified through the environment variable IPYTHONDIR. |
|
145 | also be specified through the environment variable IPYTHON_DIR. | |
146 | .TP |
|
146 | .TP | |
147 | .B \-log|l |
|
147 | .B \-log|l | |
148 | Generate a log file of all input. The file is named ipython_log.py in your |
|
148 | Generate a log file of all input. The file is named ipython_log.py in your | |
@@ -197,10 +197,10 b' your config file (default off).' | |||||
197 | .TP |
|
197 | .TP | |
198 | .B \-profile|p <name> |
|
198 | .B \-profile|p <name> | |
199 | Assume that your config file is ipythonrc-<name> (looks in current dir |
|
199 | Assume that your config file is ipythonrc-<name> (looks in current dir | |
200 | first, then in IPYTHONDIR). This is a quick way to keep and load |
|
200 | first, then in IPYTHON_DIR). This is a quick way to keep and load | |
201 | multiple config files for different tasks, especially if you use the |
|
201 | multiple config files for different tasks, especially if you use the | |
202 | include option of config files. You can keep a basic |
|
202 | include option of config files. You can keep a basic | |
203 | IPYTHONDIR/ipythonrc file and then have other 'profiles' which include |
|
203 | IPYTHON_DIR/ipythonrc file and then have other 'profiles' which include | |
204 | this one and load extra things for particular tasks. For example: |
|
204 | this one and load extra things for particular tasks. For example: | |
205 | .br |
|
205 | .br | |
206 | .sp 1 |
|
206 | .sp 1 | |
@@ -244,7 +244,7 b' Start in bare bones mode (no config file loaded).' | |||||
244 | .TP |
|
244 | .TP | |
245 | .B \-rcfile <name> |
|
245 | .B \-rcfile <name> | |
246 | Name of your IPython resource configuration file. normally IPython |
|
246 | Name of your IPython resource configuration file. normally IPython | |
247 | loads ipythonrc (from current directory) or IPYTHONDIR/ipythonrc. If |
|
247 | loads ipythonrc (from current directory) or IPYTHON_DIR/ipythonrc. If | |
248 | the loading of your config file fails, IPython starts with a bare |
|
248 | the loading of your config file fails, IPython starts with a bare | |
249 | bones configuration (no modules loaded at all). |
|
249 | bones configuration (no modules loaded at all). | |
250 | .TP |
|
250 | .TP | |
@@ -286,7 +286,7 b" Shorthand for '\\-separate_in 0 \\-separate_out 0 \\-separate_out2 0'." | |||||
286 | Simply removes all input/output separators. |
|
286 | Simply removes all input/output separators. | |
287 | .TP |
|
287 | .TP | |
288 | .B \-upgrade |
|
288 | .B \-upgrade | |
289 | Allows you to upgrade your IPYTHONDIR configuration when you install a |
|
289 | Allows you to upgrade your IPYTHON_DIR configuration when you install a | |
290 | new version of IPython. Since new versions may include new command |
|
290 | new version of IPython. Since new versions may include new command | |
291 | lines options or example files, this copies updated ipythonrc-type |
|
291 | lines options or example files, this copies updated ipythonrc-type | |
292 | files. However, it backs up (with a .old extension) all files which |
|
292 | files. However, it backs up (with a .old extension) all files which |
@@ -162,10 +162,13 b" latex_font_size = '11pt'" | |||||
162 | # Grouping the document tree into LaTeX files. List of tuples |
|
162 | # Grouping the document tree into LaTeX files. List of tuples | |
163 | # (source start file, target name, title, author, document class [howto/manual]). |
|
163 | # (source start file, target name, title, author, document class [howto/manual]). | |
164 |
|
164 | |||
165 | latex_documents = [ ('index', 'ipython.tex', 'IPython Documentation', |
|
165 | latex_documents = [ | |
166 | ur"""The IPython Development Team""", |
|
166 | ('index', 'ipython.tex', 'IPython Documentation', | |
167 | 'manual', True), |
|
167 | ur"""The IPython Development Team""", 'manual', True), | |
168 | ] |
|
168 | ('parallel/winhpc_index', 'winhpc_whitepaper.tex', | |
|
169 | 'Using IPython on Windows HPC Server 2008', | |||
|
170 | ur"Brian E. Granger", 'manual', True) | |||
|
171 | ] | |||
169 |
|
172 | |||
170 | # The name of an image file (relative to this directory) to place at the top of |
|
173 | # The name of an image file (relative to this directory) to place at the top of | |
171 | # the title page. |
|
174 | # the title page. | |
@@ -182,7 +185,7 b" latex_documents = [ ('index', 'ipython.tex', 'IPython Documentation'," | |||||
182 | #latex_appendices = [] |
|
185 | #latex_appendices = [] | |
183 |
|
186 | |||
184 | # If false, no module index is generated. |
|
187 | # If false, no module index is generated. | |
185 |
|
|
188 | latex_use_modindex = True | |
186 |
|
189 | |||
187 |
|
190 | |||
188 | # Cleanup |
|
191 | # Cleanup |
@@ -243,15 +243,15 b' So where should you put your configuration files? By default, all IPython' | |||||
243 | applications look in the so called "IPython directory". The location of |
|
243 | applications look in the so called "IPython directory". The location of | |
244 | this directory is determined by the following algorithm: |
|
244 | this directory is determined by the following algorithm: | |
245 |
|
245 | |||
246 | * If the ``-ipythondir`` command line flag is given, its value is used. |
|
246 | * If the ``--ipython-dir`` command line flag is given, its value is used. | |
247 |
|
247 | |||
248 | * If not, the value returned by :func:`IPython.utils.genutils.get_ipython_dir` |
|
248 | * If not, the value returned by :func:`IPython.utils.genutils.get_ipython_dir` | |
249 | is used. This function will first look at the :envvar:`IPYTHONDIR` |
|
249 | is used. This function will first look at the :envvar:`IPYTHON_DIR` | |
250 | environment variable and then default to the directory |
|
250 | environment variable and then default to the directory | |
251 |
:file:`$HOME/.ipython |
|
251 | :file:`$HOME/.ipython`. | |
252 |
|
252 | |||
253 | For most users, the default value will simply be something like |
|
253 | For most users, the default value will simply be something like | |
254 |
:file:`$HOME/.ipython |
|
254 | :file:`$HOME/.ipython`. | |
255 |
|
255 | |||
256 | Once the location of the IPython directory has been determined, you need to |
|
256 | Once the location of the IPython directory has been determined, you need to | |
257 | know what filename to use for the configuration file. The basic idea is that |
|
257 | know what filename to use for the configuration file. The basic idea is that |
@@ -15,4 +15,5 b" IPython developer's guide" | |||||
15 | roadmap.txt |
|
15 | roadmap.txt | |
16 | reorg.txt |
|
16 | reorg.txt | |
17 | notification_blueprint.txt |
|
17 | notification_blueprint.txt | |
|
18 | ipgraph.txt | |||
18 |
|
19 |
@@ -31,7 +31,7 b' your ipythonrc configuration file for details on those. This file' | |||||
31 | typically installed in the $HOME/.ipython directory. For Windows users, |
|
31 | typically installed in the $HOME/.ipython directory. For Windows users, | |
32 | $HOME resolves to C:\\Documents and Settings\\YourUserName in most |
|
32 | $HOME resolves to C:\\Documents and Settings\\YourUserName in most | |
33 | instances. In the rest of this text, we will refer to this directory as |
|
33 | instances. In the rest of this text, we will refer to this directory as | |
34 | IPYTHONDIR. |
|
34 | IPYTHON_DIR. | |
35 |
|
35 | |||
36 |
|
36 | |||
37 |
|
37 | |||
@@ -150,9 +150,9 b' All options with a [no] prepended can be specified in negated form' | |||||
150 | something like Emacs). |
|
150 | something like Emacs). | |
151 |
|
151 | |||
152 | -ipythondir <name> |
|
152 | -ipythondir <name> | |
153 | name of your IPython configuration directory IPYTHONDIR. This |
|
153 | name of your IPython configuration directory IPYTHON_DIR. This | |
154 | can also be specified through the environment variable |
|
154 | can also be specified through the environment variable | |
155 | IPYTHONDIR. |
|
155 | IPYTHON_DIR. | |
156 |
|
156 | |||
157 | -log, l |
|
157 | -log, l | |
158 | generate a log file of all input. The file is named |
|
158 | generate a log file of all input. The file is named | |
@@ -211,10 +211,10 b' All options with a [no] prepended can be specified in negated form' | |||||
211 |
|
211 | |||
212 | assume that your config file is ipythonrc-<name> or |
|
212 | assume that your config file is ipythonrc-<name> or | |
213 | ipy_profile_<name>.py (looks in current dir first, then in |
|
213 | ipy_profile_<name>.py (looks in current dir first, then in | |
214 | IPYTHONDIR). This is a quick way to keep and load multiple |
|
214 | IPYTHON_DIR). This is a quick way to keep and load multiple | |
215 | config files for different tasks, especially if you use the |
|
215 | config files for different tasks, especially if you use the | |
216 | include option of config files. You can keep a basic |
|
216 | include option of config files. You can keep a basic | |
217 | IPYTHONDIR/ipythonrc file and then have other 'profiles' which |
|
217 | IPYTHON_DIR/ipythonrc file and then have other 'profiles' which | |
218 | include this one and load extra things for particular |
|
218 | include this one and load extra things for particular | |
219 | tasks. For example: |
|
219 | tasks. For example: | |
220 |
|
220 | |||
@@ -252,7 +252,7 b' All options with a [no] prepended can be specified in negated form' | |||||
252 | -rcfile <name> |
|
252 | -rcfile <name> | |
253 | name of your IPython resource configuration file. Normally |
|
253 | name of your IPython resource configuration file. Normally | |
254 | IPython loads ipythonrc (from current directory) or |
|
254 | IPython loads ipythonrc (from current directory) or | |
255 | IPYTHONDIR/ipythonrc. |
|
255 | IPYTHON_DIR/ipythonrc. | |
256 |
|
256 | |||
257 | If the loading of your config file fails, IPython starts with |
|
257 | If the loading of your config file fails, IPython starts with | |
258 | a bare bones configuration (no modules loaded at all). |
|
258 | a bare bones configuration (no modules loaded at all). | |
@@ -299,7 +299,7 b' All options with a [no] prepended can be specified in negated form' | |||||
299 | 0'. Simply removes all input/output separators. |
|
299 | 0'. Simply removes all input/output separators. | |
300 |
|
300 | |||
301 | -upgrade |
|
301 | -upgrade | |
302 | allows you to upgrade your IPYTHONDIR configuration when you |
|
302 | allows you to upgrade your IPYTHON_DIR configuration when you | |
303 | install a new version of IPython. Since new versions may |
|
303 | install a new version of IPython. Since new versions may | |
304 | include new command line options or example files, this copies |
|
304 | include new command line options or example files, this copies | |
305 | updated ipythonrc-type files. However, it backs up (with a |
|
305 | updated ipythonrc-type files. However, it backs up (with a | |
@@ -542,7 +542,7 b' Persistent command history across sessions' | |||||
542 |
|
542 | |||
543 | IPython will save your input history when it leaves and reload it next |
|
543 | IPython will save your input history when it leaves and reload it next | |
544 | time you restart it. By default, the history file is named |
|
544 | time you restart it. By default, the history file is named | |
545 | $IPYTHONDIR/history, but if you've loaded a named profile, |
|
545 | $IPYTHON_DIR/history, but if you've loaded a named profile, | |
546 | '-PROFILE_NAME' is appended to the name. This allows you to keep |
|
546 | '-PROFILE_NAME' is appended to the name. This allows you to keep | |
547 | separate histories related to various tasks: commands related to |
|
547 | separate histories related to various tasks: commands related to | |
548 | numerical work will not be clobbered by a system shell history, for |
|
548 | numerical work will not be clobbered by a system shell history, for | |
@@ -636,7 +636,7 b' follows:' | |||||
636 | %logstart [log_name [log_mode]] |
|
636 | %logstart [log_name [log_mode]] | |
637 |
|
637 | |||
638 | If no name is given, it defaults to a file named 'log' in your |
|
638 | If no name is given, it defaults to a file named 'log' in your | |
639 | IPYTHONDIR directory, in 'rotate' mode (see below). |
|
639 | IPYTHON_DIR directory, in 'rotate' mode (see below). | |
640 |
|
640 | |||
641 | '%logstart name' saves to file 'name' in 'backup' mode. It saves your |
|
641 | '%logstart name' saves to file 'name' in 'backup' mode. It saves your | |
642 | history up to that point and then continues logging. |
|
642 | history up to that point and then continues logging. |
@@ -13,5 +13,7 b' Using IPython for parallel computing' | |||||
13 | parallel_task.txt |
|
13 | parallel_task.txt | |
14 | parallel_mpi.txt |
|
14 | parallel_mpi.txt | |
15 | parallel_security.txt |
|
15 | parallel_security.txt | |
|
16 | parallel_winhpc.txt | |||
|
17 | parallel_demos.txt | |||
16 |
|
18 | |||
17 |
|
19 |
@@ -169,9 +169,9 b" if 'setuptools' in sys.modules:" | |||||
169 | 'console_scripts': [ |
|
169 | 'console_scripts': [ | |
170 | 'ipython = IPython.core.ipapp:launch_new_instance', |
|
170 | 'ipython = IPython.core.ipapp:launch_new_instance', | |
171 | 'pycolor = IPython.utils.PyColorize:main', |
|
171 | 'pycolor = IPython.utils.PyColorize:main', | |
172 |
'ipcontroller = IPython.kernel. |
|
172 | 'ipcontroller = IPython.kernel.ipcontrollerapp:launch_new_instance', | |
173 |
'ipengine = IPython.kernel. |
|
173 | 'ipengine = IPython.kernel.ipengineapp:launch_new_instance', | |
174 |
'ipcluster = IPython.kernel. |
|
174 | 'ipcluster = IPython.kernel.ipclusterapp:launch_new_instance', | |
175 | 'ipythonx = IPython.frontend.wx.ipythonx:main', |
|
175 | 'ipythonx = IPython.frontend.wx.ipythonx:main', | |
176 | 'iptest = IPython.testing.iptest:main', |
|
176 | 'iptest = IPython.testing.iptest:main', | |
177 | 'irunner = IPython.lib.irunner:main' |
|
177 | 'irunner = IPython.lib.irunner:main' |
@@ -119,8 +119,8 b' def find_packages():' | |||||
119 | add_package(packages, 'frontend.wx') |
|
119 | add_package(packages, 'frontend.wx') | |
120 | add_package(packages, 'gui') |
|
120 | add_package(packages, 'gui') | |
121 | add_package(packages, 'gui.wx') |
|
121 | add_package(packages, 'gui.wx') | |
122 |
add_package(packages, 'kernel', config= |
|
122 | add_package(packages, 'kernel', config=False, tests=True, scripts=True) | |
123 |
add_package(packages, 'kernel.core', config= |
|
123 | add_package(packages, 'kernel.core', config=False, tests=True) | |
124 | add_package(packages, 'lib', tests=True) |
|
124 | add_package(packages, 'lib', tests=True) | |
125 | add_package(packages, 'quarantine', tests=True) |
|
125 | add_package(packages, 'quarantine', tests=True) | |
126 | add_package(packages, 'scripts') |
|
126 | add_package(packages, 'scripts') |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed | ||
This diff has been collapsed as it changes many lines, (813 lines changed) Show them Hide them |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
General Comments 0
You need to be logged in to leave comments.
Login now