##// END OF EJS Templates
reorganize default config files to match profiles as directories...
MinRK -
Show More
@@ -0,0 +1,5 b''
1 This is the IPython directory.
2
3 For more information on configuring IPython, do:
4
5 ipython config -h
@@ -0,0 +1,24 b''
1 c = get_config()
2
3 # This can be used at any point in a config file to load a sub config
4 # and merge it into the current one.
5 load_subconfig('ipython_config.py')
6
7 lines = """
8 from IPython.kernel.client import *
9 """
10
11 # You have to make sure that attributes that are containers already
12 # exist before using them. Simple assigning a new list will override
13 # all previous values.
14 if hasattr(c.Global, 'exec_lines'):
15 c.Global.exec_lines.append(lines)
16 else:
17 c.Global.exec_lines = [lines]
18
19 # Load the parallelmagic extension to enable %result, %px, %autopx magics.
20 if hasattr(c.Global, 'extensions'):
21 c.Global.extensions.append('parallelmagic')
22 else:
23 c.Global.extensions = ['parallelmagic']
24
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
@@ -0,0 +1,241 b''
1 import os
2
3 c = get_config()
4
5 #-----------------------------------------------------------------------------
6 # Select which launchers to use
7 #-----------------------------------------------------------------------------
8
9 # This allows you to control what method is used to start the controller
10 # and engines. The following methods are currently supported:
11 # - Start as a regular process on localhost.
12 # - Start using mpiexec.
13 # - Start using the Windows HPC Server 2008 scheduler
14 # - Start using PBS/SGE
15 # - Start using SSH
16
17
18 # The selected launchers can be configured below.
19
20 # Options are:
21 # - LocalControllerLauncher
22 # - MPIExecControllerLauncher
23 # - PBSControllerLauncher
24 # - SGEControllerLauncher
25 # - WindowsHPCControllerLauncher
26 # c.Global.controller_launcher = 'IPython.parallel.apps.launcher.LocalControllerLauncher'
27 # c.Global.controller_launcher = 'IPython.parallel.apps.launcher.PBSControllerLauncher'
28
29 # Options are:
30 # - LocalEngineSetLauncher
31 # - MPIExecEngineSetLauncher
32 # - PBSEngineSetLauncher
33 # - SGEEngineSetLauncher
34 # - WindowsHPCEngineSetLauncher
35 # c.Global.engine_launcher = 'IPython.parallel.apps.launcher.LocalEngineSetLauncher'
36
37 #-----------------------------------------------------------------------------
38 # Global configuration
39 #-----------------------------------------------------------------------------
40
41 # The default number of engines that will be started. This is overridden by
42 # the -n command line option: "ipcluster start -n 4"
43 # c.Global.n = 2
44
45 # Log to a file in cluster_dir/log, otherwise just log to sys.stdout.
46 # c.Global.log_to_file = False
47
48 # Remove old logs from cluster_dir/log before starting.
49 # c.Global.clean_logs = True
50
51 # The working directory for the process. The application will use os.chdir
52 # to change to this directory before starting.
53 # c.Global.work_dir = os.getcwd()
54
55
56 #-----------------------------------------------------------------------------
57 # Local process launchers
58 #-----------------------------------------------------------------------------
59
60 # The command line arguments to call the controller with.
61 # c.LocalControllerLauncher.controller_args = \
62 # ['--log-to-file','--log-level', '40']
63
64 # The working directory for the controller
65 # c.LocalEngineSetLauncher.work_dir = u''
66
67 # Command line argument passed to the engines.
68 # c.LocalEngineSetLauncher.engine_args = ['--log-to-file','--log-level', '40']
69
70 #-----------------------------------------------------------------------------
71 # MPIExec launchers
72 #-----------------------------------------------------------------------------
73
74 # The mpiexec/mpirun command to use in both the controller and engines.
75 # c.MPIExecLauncher.mpi_cmd = ['mpiexec']
76
77 # Additional arguments to pass to the actual mpiexec command.
78 # c.MPIExecLauncher.mpi_args = []
79
80 # The mpiexec/mpirun command and args can be overridden if they should be different
81 # for controller and engines.
82 # c.MPIExecControllerLauncher.mpi_cmd = ['mpiexec']
83 # c.MPIExecControllerLauncher.mpi_args = []
84 # c.MPIExecEngineSetLauncher.mpi_cmd = ['mpiexec']
85 # c.MPIExecEngineSetLauncher.mpi_args = []
86
87 # The command line argument to call the controller with.
88 # c.MPIExecControllerLauncher.controller_args = \
89 # ['--log-to-file','--log-level', '40']
90
91 # Command line argument passed to the engines.
92 # c.MPIExecEngineSetLauncher.engine_args = ['--log-to-file','--log-level', '40']
93
94 # The default number of engines to start if not given elsewhere.
95 # c.MPIExecEngineSetLauncher.n = 1
96
97 #-----------------------------------------------------------------------------
98 # SSH launchers
99 #-----------------------------------------------------------------------------
100
101 # ipclusterz can be used to launch controller and engines remotely via ssh.
102 # Note that currently ipclusterz does not do any file distribution, so if
103 # machines are not on a shared filesystem, config and json files must be
104 # distributed. For this reason, the reuse_files defaults to True on an
105 # ssh-launched Controller. This flag can be overridded by the program_args
106 # attribute of c.SSHControllerLauncher.
107
108 # set the ssh cmd for launching remote commands. The default is ['ssh']
109 # c.SSHLauncher.ssh_cmd = ['ssh']
110
111 # set the ssh cmd for launching remote commands. The default is ['ssh']
112 # c.SSHLauncher.ssh_args = ['tt']
113
114 # Set the user and hostname for the controller
115 # c.SSHControllerLauncher.hostname = 'controller.example.com'
116 # c.SSHControllerLauncher.user = os.environ.get('USER','username')
117
118 # Set the arguments to be passed to ipcontrollerz
119 # note that remotely launched ipcontrollerz will not get the contents of
120 # the local ipcontrollerz_config.py unless it resides on the *remote host*
121 # in the location specified by the --cluster_dir argument.
122 # c.SSHControllerLauncher.program_args = ['-r', '-ip', '0.0.0.0', '--cluster_dir', '/path/to/cd']
123
124 # Set the default args passed to ipenginez for SSH launched engines
125 # c.SSHEngineSetLauncher.engine_args = ['--mpi', 'mpi4py']
126
127 # SSH engines are launched as a dict of locations/n-engines.
128 # if a value is a tuple instead of an int, it is assumed to be of the form
129 # (n, [args]), setting the arguments to passed to ipenginez on `host`.
130 # otherwise, c.SSHEngineSetLauncher.engine_args will be used as the default.
131
132 # In this case, there will be 3 engines at my.example.com, and
133 # 2 at you@ipython.scipy.org with a special json connector location.
134 # c.SSHEngineSetLauncher.engines = {'my.example.com' : 3,
135 # 'you@ipython.scipy.org' : (2, ['-f', '/path/to/ipcontroller-engine.json']}
136 # }
137
138 #-----------------------------------------------------------------------------
139 # Unix batch (PBS) schedulers launchers
140 #-----------------------------------------------------------------------------
141
142 # SGE and PBS are very similar. All configurables in this section called 'PBS*'
143 # also exist as 'SGE*'.
144
145 # The command line program to use to submit a PBS job.
146 # c.PBSLauncher.submit_command = ['qsub']
147
148 # The command line program to use to delete a PBS job.
149 # c.PBSLauncher.delete_command = ['qdel']
150
151 # The PBS queue in which the job should run
152 # c.PBSLauncher.queue = 'myqueue'
153
154 # A regular expression that takes the output of qsub and find the job id.
155 # c.PBSLauncher.job_id_regexp = r'\d+'
156
157 # If for some reason the Controller and Engines have different options above, they
158 # can be set as c.PBSControllerLauncher.<option> etc.
159
160 # PBS and SGE have default templates, but you can specify your own, either as strings
161 # or from files, as described here:
162
163 # The batch submission script used to start the controller. This is where
164 # environment variables would be setup, etc. This string is interpreted using
165 # the Itpl module in IPython.external. Basically, you can use ${n} for the
166 # number of engine and ${cluster_dir} for the cluster_dir.
167 # c.PBSControllerLauncher.batch_template = """
168 # #PBS -N ipcontroller
169 # #PBS -q $queue
170 #
171 # ipcontrollerz --cluster-dir $cluster_dir
172 # """
173
174 # You can also load this template from a file
175 # c.PBSControllerLauncher.batch_template_file = u"/path/to/my/template.sh"
176
177 # The name of the instantiated batch script that will actually be used to
178 # submit the job. This will be written to the cluster directory.
179 # c.PBSControllerLauncher.batch_file_name = u'pbs_controller'
180
181 # The batch submission script used to start the engines. This is where
182 # environment variables would be setup, etc. This string is interpreted using
183 # the Itpl module in IPython.external. Basically, you can use ${n} for the
184 # number of engine and ${cluster_dir} for the cluster_dir.
185 # c.PBSEngineSetLauncher.batch_template = """
186 # #PBS -N ipcontroller
187 # #PBS -l nprocs=$n
188 #
189 # ipenginez --cluster-dir $cluster_dir$s
190 # """
191
192 # You can also load this template from a file
193 # c.PBSControllerLauncher.batch_template_file = u"/path/to/my/template.sh"
194
195 # The name of the instantiated batch script that will actually be used to
196 # submit the job. This will be written to the cluster directory.
197 # c.PBSEngineSetLauncher.batch_file_name = u'pbs_engines'
198
199
200
201 #-----------------------------------------------------------------------------
202 # Windows HPC Server 2008 launcher configuration
203 #-----------------------------------------------------------------------------
204
205 # c.IPControllerJob.job_name = 'IPController'
206 # c.IPControllerJob.is_exclusive = False
207 # c.IPControllerJob.username = r'USERDOMAIN\USERNAME'
208 # c.IPControllerJob.priority = 'Highest'
209 # c.IPControllerJob.requested_nodes = ''
210 # c.IPControllerJob.project = 'MyProject'
211
212 # c.IPControllerTask.task_name = 'IPController'
213 # c.IPControllerTask.controller_cmd = [u'ipcontroller.exe']
214 # c.IPControllerTask.controller_args = ['--log-to-file', '--log-level', '40']
215 # c.IPControllerTask.environment_variables = {}
216
217 # c.WindowsHPCControllerLauncher.scheduler = 'HEADNODE'
218 # c.WindowsHPCControllerLauncher.job_file_name = u'ipcontroller_job.xml'
219
220
221 # c.IPEngineSetJob.job_name = 'IPEngineSet'
222 # c.IPEngineSetJob.is_exclusive = False
223 # c.IPEngineSetJob.username = r'USERDOMAIN\USERNAME'
224 # c.IPEngineSetJob.priority = 'Highest'
225 # c.IPEngineSetJob.requested_nodes = ''
226 # c.IPEngineSetJob.project = 'MyProject'
227
228 # c.IPEngineTask.task_name = 'IPEngine'
229 # c.IPEngineTask.engine_cmd = [u'ipengine.exe']
230 # c.IPEngineTask.engine_args = ['--log-to-file', '--log-level', '40']
231 # c.IPEngineTask.environment_variables = {}
232
233 # c.WindowsHPCEngineSetLauncher.scheduler = 'HEADNODE'
234 # c.WindowsHPCEngineSetLauncher.job_file_name = u'ipengineset_job.xml'
235
236
237
238
239
240
241
@@ -0,0 +1,180 b''
1 from IPython.config.loader import Config
2
3 c = get_config()
4
5 #-----------------------------------------------------------------------------
6 # Global configuration
7 #-----------------------------------------------------------------------------
8
9 # Basic Global config attributes
10
11 # Start up messages are logged to stdout using the logging module.
12 # These all happen before the twisted reactor is started and are
13 # useful for debugging purposes. Can be (10=DEBUG,20=INFO,30=WARN,40=CRITICAL)
14 # and smaller is more verbose.
15 # c.Global.log_level = 20
16
17 # Log to a file in cluster_dir/log, otherwise just log to sys.stdout.
18 # c.Global.log_to_file = False
19
20 # Remove old logs from cluster_dir/log before starting.
21 # c.Global.clean_logs = True
22
23 # A list of Python statements that will be run before starting the
24 # controller. This is provided because occasionally certain things need to
25 # be imported in the controller for pickling to work.
26 # c.Global.import_statements = ['import math']
27
28 # Reuse the controller's JSON files. If False, JSON files are regenerated
29 # each time the controller is run. If True, they will be reused, *but*, you
30 # also must set the network ports by hand. If set, this will override the
31 # values set for the client and engine connections below.
32 # c.Global.reuse_files = True
33
34 # Enable exec_key authentication on all messages. Default is True
35 # c.Global.secure = True
36
37 # The working directory for the process. The application will use os.chdir
38 # to change to this directory before starting.
39 # c.Global.work_dir = os.getcwd()
40
41 # The log url for logging to an `iploggerz` application. This will override
42 # log-to-file.
43 # c.Global.log_url = 'tcp://127.0.0.1:20202'
44
45 # The specific external IP that is used to disambiguate multi-interface URLs.
46 # The default behavior is to guess from external IPs gleaned from `socket`.
47 # c.Global.location = '192.168.1.123'
48
49 # The ssh server remote clients should use to connect to this controller.
50 # It must be a machine that can see the interface specified in client_ip.
51 # The default for client_ip is localhost, in which case the sshserver must
52 # be an external IP of the controller machine.
53 # c.Global.sshserver = 'controller.example.com'
54
55 # the url to use for registration. If set, this overrides engine-ip,
56 # engine-transport client-ip,client-transport, and regport.
57 # c.RegistrationFactory.url = 'tcp://*:12345'
58
59 # the port to use for registration. Clients and Engines both use this
60 # port for registration.
61 # c.RegistrationFactory.regport = 10101
62
63 #-----------------------------------------------------------------------------
64 # Configure the Task Scheduler
65 #-----------------------------------------------------------------------------
66
67 # The routing scheme. 'pure' will use the pure-ZMQ scheduler. Any other
68 # value will use a Python scheduler with various routing schemes.
69 # python schemes are: lru, weighted, random, twobin. Default is 'weighted'.
70 # Note that the pure ZMQ scheduler does not support many features, such as
71 # dying engines, dependencies, or engine-subset load-balancing.
72 # c.ControllerFactory.scheme = 'pure'
73
74 # The Python scheduler can limit the number of outstanding tasks per engine
75 # by using an HWM option. This allows engines with long-running tasks
76 # to not steal too many tasks from other engines. The default is 0, which
77 # means agressively distribute messages, never waiting for them to finish.
78 # c.TaskScheduler.hwm = 0
79
80 # Whether to use Threads or Processes to start the Schedulers. Threads will
81 # use less resources, but potentially reduce throughput. Default is to
82 # use processes. Note that the a Python scheduler will always be in a Process.
83 # c.ControllerFactory.usethreads
84
85 #-----------------------------------------------------------------------------
86 # Configure the Hub
87 #-----------------------------------------------------------------------------
88
89 # Which class to use for the db backend. Currently supported are DictDB (the
90 # default), and MongoDB. Uncomment this line to enable MongoDB, which will
91 # slow-down the Hub's responsiveness, but also reduce its memory footprint.
92 # c.HubFactory.db_class = 'IPython.parallel.controller.mongodb.MongoDB'
93
94 # The heartbeat ping frequency. This is the frequency (in ms) at which the
95 # Hub pings engines for heartbeats. This determines how quickly the Hub
96 # will react to engines coming and going. A lower number means faster response
97 # time, but more network activity. The default is 100ms
98 # c.HubFactory.ping = 100
99
100 # HubFactory queue port pairs, to set by name: mux, iopub, control, task. Set
101 # each as a tuple of length 2 of ints. The default is to find random
102 # available ports
103 # c.HubFactory.mux = (10102,10112)
104
105 #-----------------------------------------------------------------------------
106 # Configure the client connections
107 #-----------------------------------------------------------------------------
108
109 # Basic client connection config attributes
110
111 # The network interface the controller will listen on for client connections.
112 # This should be an IP address or interface on the controller. An asterisk
113 # means listen on all interfaces. The transport can be any transport
114 # supported by zeromq (tcp,epgm,pgm,ib,ipc):
115 # c.HubFactory.client_ip = '*'
116 # c.HubFactory.client_transport = 'tcp'
117
118 # individual client ports to configure by name: query_port, notifier_port
119 # c.HubFactory.query_port = 12345
120
121 #-----------------------------------------------------------------------------
122 # Configure the engine connections
123 #-----------------------------------------------------------------------------
124
125 # Basic config attributes for the engine connections.
126
127 # The network interface the controller will listen on for engine connections.
128 # This should be an IP address or interface on the controller. An asterisk
129 # means listen on all interfaces. The transport can be any transport
130 # supported by zeromq (tcp,epgm,pgm,ib,ipc):
131 # c.HubFactory.engine_ip = '*'
132 # c.HubFactory.engine_transport = 'tcp'
133
134 # set the engine heartbeat ports to use:
135 # c.HubFactory.hb = (10303,10313)
136
137 #-----------------------------------------------------------------------------
138 # Configure the TaskRecord database backend
139 #-----------------------------------------------------------------------------
140
141 # For memory/persistance reasons, tasks can be stored out-of-memory in a database.
142 # Currently, only sqlite and mongodb are supported as backends, but the interface
143 # is fairly simple, so advanced developers could write their own backend.
144
145 # ----- in-memory configuration --------
146 # this line restores the default behavior: in-memory storage of all results.
147 # c.HubFactory.db_class = 'IPython.parallel.controller.dictdb.DictDB'
148
149 # ----- sqlite configuration --------
150 # use this line to activate sqlite:
151 # c.HubFactory.db_class = 'IPython.parallel.controller.sqlitedb.SQLiteDB'
152
153 # You can specify the name of the db-file. By default, this will be located
154 # in the active cluster_dir, e.g. ~/.ipython/clusterz_default/tasks.db
155 # c.SQLiteDB.filename = 'tasks.db'
156
157 # You can also specify the location of the db-file, if you want it to be somewhere
158 # other than the cluster_dir.
159 # c.SQLiteDB.location = '/scratch/'
160
161 # This will specify the name of the table for the controller to use. The default
162 # behavior is to use the session ID of the SessionFactory object (a uuid). Overriding
163 # this will result in results persisting for multiple sessions.
164 # c.SQLiteDB.table = 'results'
165
166 # ----- mongodb configuration --------
167 # use this line to activate mongodb:
168 # c.HubFactory.db_class = 'IPython.parallel.controller.mongodb.MongoDB'
169
170 # You can specify the args and kwargs pymongo will use when creating the Connection.
171 # For more information on what these options might be, see pymongo documentation.
172 # c.MongoDB.connection_kwargs = {}
173 # c.MongoDB.connection_args = []
174
175 # This will specify the name of the mongo database for the controller to use. The default
176 # behavior is to use the session ID of the SessionFactory object (a uuid). Overriding
177 # this will result in task results persisting through multiple sessions.
178 # c.MongoDB.database = 'ipythondb'
179
180
@@ -0,0 +1,85 b''
1 c = get_config()
2
3 #-----------------------------------------------------------------------------
4 # Global configuration
5 #-----------------------------------------------------------------------------
6
7 # Start up messages are logged to stdout using the logging module.
8 # These all happen before the twisted reactor is started and are
9 # useful for debugging purposes. Can be (10=DEBUG,20=INFO,30=WARN,40=CRITICAL)
10 # and smaller is more verbose.
11 # c.Global.log_level = 20
12
13 # Log to a file in cluster_dir/log, otherwise just log to sys.stdout.
14 # c.Global.log_to_file = False
15
16 # Remove old logs from cluster_dir/log before starting.
17 # c.Global.clean_logs = True
18
19 # A list of strings that will be executed in the users namespace on the engine
20 # before it connects to the controller.
21 # c.Global.exec_lines = ['import numpy']
22
23 # The engine will try to connect to the controller multiple times, to allow
24 # the controller time to startup and write its FURL file. These parameters
25 # control the number of retries (connect_max_tries) and the initial delay
26 # (connect_delay) between attemps. The actual delay between attempts gets
27 # longer each time by a factor of 1.5 (delay[i] = 1.5*delay[i-1])
28 # those attemps.
29 # c.Global.connect_delay = 0.1
30 # c.Global.connect_max_tries = 15
31
32 # By default, the engine will look for the controller's JSON file in its own
33 # cluster directory. Sometimes, the JSON file will be elsewhere and this
34 # attribute can be set to the full path of the JSON file.
35 # c.Global.url_file = u'/path/to/my/ipcontroller-engine.json'
36
37 # The working directory for the process. The application will use os.chdir
38 # to change to this directory before starting.
39 # c.Global.work_dir = os.getcwd()
40
41 #-----------------------------------------------------------------------------
42 # MPI configuration
43 #-----------------------------------------------------------------------------
44
45 # Upon starting the engine can be configured to call MPI_Init. This section
46 # configures that.
47
48 # Select which MPI section to execute to setup MPI. The value of this
49 # attribute must match the name of another attribute in the MPI config
50 # section (mpi4py, pytrilinos, etc.). This can also be set by the --mpi
51 # command line option.
52 # c.MPI.use = ''
53
54 # Initialize MPI using mpi4py. To use this, set c.MPI.use = 'mpi4py' to use
55 # --mpi=mpi4py at the command line.
56 # c.MPI.mpi4py = """from mpi4py import MPI as mpi
57 # mpi.size = mpi.COMM_WORLD.Get_size()
58 # mpi.rank = mpi.COMM_WORLD.Get_rank()
59 # """
60
61 # Initialize MPI using pytrilinos. To use this, set c.MPI.use = 'pytrilinos'
62 # to use --mpi=pytrilinos at the command line.
63 # c.MPI.pytrilinos = """from PyTrilinos import Epetra
64 # class SimpleStruct:
65 # pass
66 # mpi = SimpleStruct()
67 # mpi.rank = 0
68 # mpi.size = 0
69 # """
70
71 #-----------------------------------------------------------------------------
72 # Developer level configuration attributes
73 #-----------------------------------------------------------------------------
74
75 # You shouldn't have to modify anything in this section. These attributes
76 # are more for developers who want to change the behavior of the controller
77 # at a fundamental level.
78
79 # You should not have to change these attributes.
80
81 # c.Global.url_file_name = u'ipcontroller-engine.furl'
82
83
84
85
@@ -0,0 +1,165 b''
1 # Get the config being loaded so we can set attributes on it
2 c = get_config()
3
4 #-----------------------------------------------------------------------------
5 # Global options
6 #-----------------------------------------------------------------------------
7
8 # c.Global.display_banner = True
9
10 # c.Global.classic = False
11
12 # c.Global.nosep = True
13
14 # If you still use multiple versions of IPytho on the same machine,
15 # set this to True to suppress warnings about old configuration files
16 # c.Global.ignore_old_config = False
17
18 # Set this to determine the detail of what is logged at startup.
19 # The default is 30 and possible values are 0,10,20,30,40,50.
20 # c.Global.log_level = 20
21
22 # This should be a list of importable Python modules that have an
23 # load_ipython_extension(ip) method. This method gets called when the extension
24 # is loaded. You can put your extensions anywhere they can be imported
25 # but we add the extensions subdir of the ipython directory to sys.path
26 # during extension loading, so you can put them there as well.
27 # c.Global.extensions = [
28 # 'myextension'
29 # ]
30
31 # These lines are run in IPython in the user's namespace after extensions
32 # are loaded. They can contain full IPython syntax with magics etc.
33 # c.Global.exec_lines = [
34 # 'import numpy',
35 # 'a = 10; b = 20',
36 # '1/0'
37 # ]
38
39 # These files are run in IPython in the user's namespace. Files with a .py
40 # extension need to be pure Python. Files with a .ipy extension can have
41 # custom IPython syntax (like magics, etc.).
42 # These files need to be in the cwd, the ipython_dir or be absolute paths.
43 # c.Global.exec_files = [
44 # 'mycode.py',
45 # 'fancy.ipy'
46 # ]
47
48 #-----------------------------------------------------------------------------
49 # InteractiveShell options
50 #-----------------------------------------------------------------------------
51
52 # c.InteractiveShell.autocall = 1
53
54 # c.TerminalInteractiveShell.autoedit_syntax = False
55
56 # c.InteractiveShell.autoindent = True
57
58 # c.InteractiveShell.automagic = False
59
60 # c.TerminalTerminalInteractiveShell.banner1 = 'This if for overriding the default IPython banner'
61
62 # c.TerminalTerminalInteractiveShell.banner2 = "This is for extra banner text"
63
64 # c.InteractiveShell.cache_size = 1000
65
66 # c.InteractiveShell.colors = 'LightBG'
67
68 # c.InteractiveShell.color_info = True
69
70 # c.TerminalInteractiveShell.confirm_exit = True
71
72 # c.InteractiveShell.deep_reload = False
73
74 # c.TerminalInteractiveShell.editor = 'nano'
75
76 # c.InteractiveShell.logstart = True
77
78 # c.InteractiveShell.logfile = u'ipython_log.py'
79
80 # c.InteractiveShell.logappend = u'mylog.py'
81
82 # c.InteractiveShell.object_info_string_level = 0
83
84 # c.TerminalInteractiveShell.pager = 'less'
85
86 # c.InteractiveShell.pdb = False
87
88 # c.InteractiveShell.prompt_in1 = 'In [\#]: '
89 # c.InteractiveShell.prompt_in2 = ' .\D.: '
90 # c.InteractiveShell.prompt_out = 'Out[\#]: '
91 # c.InteractiveShell.prompts_pad_left = True
92
93 # c.InteractiveShell.quiet = False
94
95 # c.InteractiveShell.history_length = 10000
96
97 # Readline
98 # c.InteractiveShell.readline_use = True
99
100 # be careful with meta-key ('\M-<x>') bindings, because
101 # they conflict with 8-bit encodings (e.g. UTF8)
102
103 # c.InteractiveShell.readline_parse_and_bind = [
104 # 'tab: complete',
105 # '"\C-l": possible-completions',
106 # 'set show-all-if-ambiguous on',
107 # '"\C-o": tab-insert',
108 # '"\C-r": reverse-search-history',
109 # '"\C-s": forward-search-history',
110 # '"\C-p": history-search-backward',
111 # '"\C-n": history-search-forward',
112 # '"\e[A": history-search-backward',
113 # '"\e[B": history-search-forward',
114 # '"\C-k": kill-line',
115 # '"\C-u": unix-line-discard',
116 # ]
117 # c.InteractiveShell.readline_remove_delims = '-/~'
118 # c.InteractiveShell.readline_merge_completions = True
119 # c.InteractiveShell.readline_omit__names = 0
120
121 # c.TerminalInteractiveShell.screen_length = 0
122
123 # c.InteractiveShell.separate_in = '\n'
124 # c.InteractiveShell.separate_out = ''
125 # c.InteractiveShell.separate_out2 = ''
126
127 # c.TerminalInteractiveShell.term_title = False
128
129 # c.InteractiveShell.wildcards_case_sensitive = True
130
131 # c.InteractiveShell.xmode = 'Context'
132
133 #-----------------------------------------------------------------------------
134 # Formatter and display options
135 #-----------------------------------------------------------------------------
136
137 # c.PlainTextFormatter.pprint = True
138
139 #-----------------------------------------------------------------------------
140 # PrefilterManager options
141 #-----------------------------------------------------------------------------
142
143 # c.PrefilterManager.multi_line_specials = True
144
145 #-----------------------------------------------------------------------------
146 # AliasManager options
147 #-----------------------------------------------------------------------------
148
149 # Do this to disable all defaults
150 # c.AliasManager.default_aliases = []
151
152 # c.AliasManager.user_aliases = [
153 # ('foo', 'echo Hi')
154 # ]
155
156 #-----------------------------------------------------------------------------
157 # HistoryManager options
158 #-----------------------------------------------------------------------------
159
160 # Enable logging output as well as input to the database.
161 # c.HistoryManager.db_log_output = False
162
163 # Only write to the database every n commands - this can save disk
164 # access (and hence power) over the default of writing on every command.
165 # c.HistoryManager.db_cache_size = 0
@@ -0,0 +1,19 b''
1 c = get_config()
2
3 # This can be used at any point in a config file to load a sub config
4 # and merge it into the current one.
5 load_subconfig('ipython_config.py')
6
7 lines = """
8 import cmath
9 from math import *
10 """
11
12 # You have to make sure that attributes that are containers already
13 # exist before using them. Simple assigning a new list will override
14 # all previous values.
15 if hasattr(c.Global, 'exec_lines'):
16 c.Global.exec_lines.append(lines)
17 else:
18 c.Global.exec_lines = [lines]
19
@@ -0,0 +1,22 b''
1 c = get_config()
2
3 # This can be used at any point in a config file to load a sub config
4 # and merge it into the current one.
5 load_subconfig('ipython_config.py')
6
7 lines = """
8 import matplotlib
9 %gui -a wx
10 matplotlib.use('wxagg')
11 matplotlib.interactive(True)
12 from matplotlib import pyplot as plt
13 from matplotlib.pyplot import *
14 """
15
16 # You have to make sure that attributes that are containers already
17 # exist before using them. Simple assigning a new list will override
18 # all previous values.
19 if hasattr(c.Global, 'exec_lines'):
20 c.Global.exec_lines.append(lines)
21 else:
22 c.Global.exec_lines = [lines] No newline at end of file
@@ -0,0 +1,29 b''
1 c = get_config()
2
3 # This can be used at any point in a config file to load a sub config
4 # and merge it into the current one.
5 load_subconfig('ipython_config.py')
6
7 c.InteractiveShell.prompt_in1 = '\C_LightGreen\u@\h\C_LightBlue[\C_LightCyan\Y1\C_LightBlue]\C_Green|\#> '
8 c.InteractiveShell.prompt_in2 = '\C_Green|\C_LightGreen\D\C_Green> '
9 c.InteractiveShell.prompt_out = '<\#> '
10
11 c.InteractiveShell.prompts_pad_left = True
12
13 c.InteractiveShell.separate_in = ''
14 c.InteractiveShell.separate_out = ''
15 c.InteractiveShell.separate_out2 = ''
16
17 c.PrefilterManager.multi_line_specials = True
18
19 lines = """
20 %rehashx
21 """
22
23 # You have to make sure that attributes that are containers already
24 # exist before using them. Simple assigning a new list will override
25 # all previous values.
26 if hasattr(c.Global, 'exec_lines'):
27 c.Global.exec_lines.append(lines)
28 else:
29 c.Global.exec_lines = [lines] No newline at end of file
@@ -0,0 +1,29 b''
1 c = get_config()
2
3 # This can be used at any point in a config file to load a sub config
4 # and merge it into the current one.
5 load_subconfig('ipython_config.py')
6
7 lines = """
8 from __future__ import division
9 from sympy import *
10 x, y, z = symbols('xyz')
11 k, m, n = symbols('kmn', integer=True)
12 f, g, h = map(Function, 'fgh')
13 """
14
15 # You have to make sure that attributes that are containers already
16 # exist before using them. Simple assigning a new list will override
17 # all previous values.
18
19 if hasattr(c.Global, 'exec_lines'):
20 c.Global.exec_lines.append(lines)
21 else:
22 c.Global.exec_lines = [lines]
23
24 # Load the sympy_printing extension to enable nice printing of sympy expr's.
25 if hasattr(c.Global, 'extensions'):
26 c.Global.extensions.append('sympy_printing')
27 else:
28 c.Global.extensions = ['sympy_printing']
29
@@ -150,8 +150,8 b' def find_package_data():'
150 # This is not enough for these things to appear in an sdist.
150 # This is not enough for these things to appear in an sdist.
151 # We need to muck with the MANIFEST to get this to work
151 # We need to muck with the MANIFEST to get this to work
152 package_data = {
152 package_data = {
153 'IPython.config.userconfig' : ['*'],
153 'IPython.config.profile' : ['README', '*/*.py'],
154 'IPython.testing' : ['*.txt']
154 'IPython.testing' : ['*.txt'],
155 }
155 }
156 return package_data
156 return package_data
157
157
General Comments 0
You need to be logged in to leave comments. Login now