diff --git a/IPython/config/application.py b/IPython/config/application.py index 05022e0..5679b7d 100644 --- a/IPython/config/application.py +++ b/IPython/config/application.py @@ -19,10 +19,11 @@ Authors: # Imports #----------------------------------------------------------------------------- -from copy import deepcopy import logging +import os import re import sys +from copy import deepcopy from IPython.config.configurable import SingletonConfigurable from IPython.config.loader import ( @@ -98,9 +99,13 @@ class Application(SingletonConfigurable): version = Unicode(u'0.0') # The log level for the application - log_level = Enum((0,10,20,30,40,50), default_value=logging.WARN, - config=True, - help="Set the log level.") + log_level = Enum((0,10,20,30,40,50,'DEBUG','INFO','WARN','ERROR','CRITICAL'), + default_value=logging.WARN, + config=True, + help="Set the log level by value or name.") + def _log_level_changed(self, name, old, new): + if isinstance(new, basestring): + self.log_level = getattr(logging, new) # the alias map for configurables aliases = Dict(dict(log_level='Application.log_level')) @@ -336,6 +341,16 @@ class Application(SingletonConfigurable): loader = PyFileConfigLoader(filename, path=path) config = loader.load_config() self.update_config(config) + + def generate_config_file(self): + """generate default config file from Configurables""" + lines = ["# Configuration file for %s."%self.name] + lines.append('') + lines.append('c = get_config()') + lines.append('') + for cls in self.classes: + lines.append(cls.class_config_section()) + return '\n'.join(lines) def exit(self, exit_status=0): self.log.debug("Exiting application: %s" % self.name) diff --git a/IPython/config/configurable.py b/IPython/config/configurable.py index 94a7595..61a3ee2 100755 --- a/IPython/config/configurable.py +++ b/IPython/config/configurable.py @@ -180,6 +180,38 @@ class Configurable(HasTraits): """Get the help string for a single trait and print it.""" print cls.class_get_help() + @classmethod + def class_config_section(cls): + """Get the config class config section""" + def c(s): + """return a commented, wrapped block.""" + s = '\n\n'.join(wrap_paragraphs(s, 78)) + + return '# ' + s.replace('\n', '\n# ') + + # section header + breaker = '#' + '-'*78 + s = "# %s configuration"%cls.__name__ + lines = [breaker, s, breaker, ''] + # get the description trait + desc = cls.class_traits().get('description') + if desc: + desc = desc.default_value + else: + # no description trait, use __doc__ + desc = getattr(cls, '__doc__', '') + if desc: + lines.append(c(desc)) + lines.append('') + + for name,trait in cls.class_traits(config=True).iteritems(): + help = trait.get_metadata('help') or '' + lines.append(c(help)) + lines.append('# c.%s.%s = %r'%(cls.__name__, name, trait.get_default_value())) + lines.append('') + return '\n'.join(lines) + + class SingletonConfigurable(Configurable): """A configurable that only allows one instance. @@ -280,4 +312,4 @@ class LoggingConfigurable(Configurable): from IPython.config.application import Application return Application.instance().log - \ No newline at end of file + diff --git a/IPython/config/profile/default/__init__.py b/IPython/config/profile/default/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/IPython/config/profile/default/__init__.py +++ /dev/null diff --git a/IPython/config/profile/default/ipcluster_config.py b/IPython/config/profile/default/ipcluster_config.py deleted file mode 100644 index d345fd0..0000000 --- a/IPython/config/profile/default/ipcluster_config.py +++ /dev/null @@ -1,242 +0,0 @@ -import os - -c = get_config() - -#----------------------------------------------------------------------------- -# Select which launchers to use -#----------------------------------------------------------------------------- - -# This allows you to control what method is used to start the controller -# and engines. The following methods are currently supported: -# - Start as a regular process on localhost. -# - Start using mpiexec. -# - Start using the Windows HPC Server 2008 scheduler -# - Start using PBS/SGE -# - Start using SSH - - -# The selected launchers can be configured below. - -# Options are: -# - LocalControllerLauncher -# - MPIExecControllerLauncher -# - PBSControllerLauncher -# - SGEControllerLauncher -# - WindowsHPCControllerLauncher -# c.IPClusterStartApp.controller_launcher = 'IPython.parallel.apps.launcher.LocalControllerLauncher' -# c.IPClusterStartApp.controller_launcher = 'IPython.parallel.apps.launcher.PBSControllerLauncher' - -# Options are: -# - LocalEngineSetLauncher -# - MPIExecEngineSetLauncher -# - PBSEngineSetLauncher -# - SGEEngineSetLauncher -# - WindowsHPCEngineSetLauncher -# c.IPClusterEnginesApp.engine_launcher = 'IPython.parallel.apps.launcher.LocalEngineSetLauncher' - -#----------------------------------------------------------------------------- -# Application configuration -#----------------------------------------------------------------------------- - -# The default number of engines that will be started. This is overridden by -# the -n command line option: "ipcluster start -n 4" -# c.IPClusterEnginesApp.n = 2 - -# Log to a file in cluster_dir/log, otherwise just log to sys.stdout. -# c.BaseParallelApp.log_to_file = False - -# Remove old logs from cluster_dir/log before starting. -# c.BaseParallelApp.clean_logs = True - -# The working directory for the process. The application will use os.chdir -# to change to this directory before starting. -# c.BaseParallelApp.work_dir = os.getcwd() - - -#----------------------------------------------------------------------------- -# Local process launchers -#----------------------------------------------------------------------------- - -# The command line arguments to call the controller with. -# c.LocalControllerLauncher.controller_args = \ -# ['--log-to-file','--log-level', '40'] - -# The working directory for the controller -# c.LocalEngineSetLauncher.work_dir = u'' - -# Command line argument passed to the engines. -# c.LocalEngineSetLauncher.engine_args = ['--log-to-file','--log-level', '40'] - -#----------------------------------------------------------------------------- -# MPIExec launchers -#----------------------------------------------------------------------------- - -# The mpiexec/mpirun command to use in both the controller and engines. -# c.MPIExecLauncher.mpi_cmd = ['mpiexec'] - -# Additional arguments to pass to the actual mpiexec command. -# c.MPIExecLauncher.mpi_args = [] - -# The mpiexec/mpirun command and args can be overridden if they should be different -# for controller and engines. -# c.MPIExecControllerLauncher.mpi_cmd = ['mpiexec'] -# c.MPIExecControllerLauncher.mpi_args = [] -# c.MPIExecEngineSetLauncher.mpi_cmd = ['mpiexec'] -# c.MPIExecEngineSetLauncher.mpi_args = [] - -# The command line argument to call the controller with. -# c.MPIExecControllerLauncher.controller_args = \ -# ['--log-to-file','--log-level', '40'] - -# Command line argument passed to the engines. -# c.MPIExecEngineSetLauncher.engine_args = ['--log-to-file','--log-level', '40'] - -# The default number of engines to start if not given elsewhere. -# c.MPIExecEngineSetLauncher.n = 1 - -#----------------------------------------------------------------------------- -# SSH launchers -#----------------------------------------------------------------------------- - -# ipclusterz can be used to launch controller and engines remotely via ssh. -# Note that currently ipclusterz does not do any file distribution, so if -# machines are not on a shared filesystem, config and json files must be -# distributed. For this reason, the reuse_files defaults to True on an -# ssh-launched Controller. This flag can be overridded by the program_args -# attribute of c.SSHControllerLauncher. - -# set the ssh cmd for launching remote commands. The default is ['ssh'] -# c.SSHLauncher.ssh_cmd = ['ssh'] - -# set the ssh cmd for launching remote commands. The default is ['ssh'] -# c.SSHLauncher.ssh_args = ['tt'] - -# Set the user and hostname for the controller -# c.SSHControllerLauncher.hostname = 'controller.example.com' -# c.SSHControllerLauncher.user = os.environ.get('USER','username') - -# Set the arguments to be passed to ipcontrollerz -# note that remotely launched ipcontrollerz will not get the contents of -# the local ipcontrollerz_config.py unless it resides on the *remote host* -# in the location specified by the --cluster_dir argument. -# c.SSHControllerLauncher.program_args = ['-r', '-ip', '0.0.0.0', '--cluster_dir', '/path/to/cd'] - -# Set the default args passed to ipengine for SSH launched engines -# c.SSHEngineSetLauncher.engine_args = ['--mpi', 'mpi4py'] - -# SSH engines are launched as a dict of locations/n-engines. -# if a value is a tuple instead of an int, it is assumed to be of the form -# (n, [args]), setting the arguments to passed to ipengine on `host`. -# otherwise, c.SSHEngineSetLauncher.engine_args will be used as the default. - -# In this case, there will be 3 engines at my.example.com, and -# 2 at you@ipython.scipy.org with a special json connector location. -# c.SSHEngineSetLauncher.engines = {'my.example.com' : 3, -# 'you@ipython.scipy.org' : (2, ['-f', '/path/to/ipcontroller-engine.json']} -# } - -#----------------------------------------------------------------------------- -# Unix batch (PBS) schedulers launchers -#----------------------------------------------------------------------------- - -# SGE and PBS are very similar. All configurables in this section called 'PBS*' -# also exist as 'SGE*'. - -# The command line program to use to submit a PBS job. -# c.PBSLauncher.submit_command = ['qsub'] - -# The command line program to use to delete a PBS job. -# c.PBSLauncher.delete_command = ['qdel'] - -# The PBS queue in which the job should run -# c.PBSLauncher.queue = 'myqueue' - -# A regular expression that takes the output of qsub and find the job id. -# c.PBSLauncher.job_id_regexp = r'\d+' - -# If for some reason the Controller and Engines have different options above, they -# can be set as c.PBSControllerLauncher.