diff --git a/IPython/kernel/clusterdir.py b/IPython/kernel/clusterdir.py index 9c54141..4ad8dc0 100644 --- a/IPython/kernel/clusterdir.py +++ b/IPython/kernel/clusterdir.py @@ -15,6 +15,8 @@ The IPython cluster directory # Imports #----------------------------------------------------------------------------- +from __future__ import with_statement + import os import shutil import sys @@ -37,6 +39,10 @@ class ClusterDirError(Exception): pass +class PIDFileError(Exception): + pass + + class ClusterDir(Component): """An object to manage the cluster directory and its resources. @@ -50,9 +56,11 @@ class ClusterDir(Component): security_dir_name = Unicode('security') log_dir_name = Unicode('log') - security_dir = Unicode() - log_dir = Unicode('') - location = Unicode('') + pid_dir_name = Unicode('pid') + security_dir = Unicode(u'') + log_dir = Unicode(u'') + pid_dir = Unicode(u'') + location = Unicode(u'') def __init__(self, location): super(ClusterDir, self).__init__(None) @@ -65,6 +73,7 @@ class ClusterDir(Component): os.chmod(new, 0777) self.security_dir = os.path.join(new, self.security_dir_name) self.log_dir = os.path.join(new, self.log_dir_name) + self.pid_dir = os.path.join(new, self.pid_dir_name) self.check_dirs() def _log_dir_changed(self, name, old, new): @@ -85,9 +94,19 @@ class ClusterDir(Component): else: os.chmod(self.security_dir, 0700) + def _pid_dir_changed(self, name, old, new): + self.check_pid_dir() + + def check_pid_dir(self): + if not os.path.isdir(self.pid_dir): + os.mkdir(self.pid_dir, 0700) + else: + os.chmod(self.pid_dir, 0700) + def check_dirs(self): self.check_security_dir() self.check_log_dir() + self.check_pid_dir() def load_config_file(self, filename): """Load a config file from the top level of the cluster dir. @@ -209,13 +228,13 @@ class AppWithClusterDirArgParseConfigLoader(ArgParseConfigLoader): """Default command line options for IPython cluster applications.""" def _add_other_arguments(self): - self.parser.add_argument('-ipythondir', '--ipython-dir', + self.parser.add_argument('--ipython-dir', dest='Global.ipythondir',type=str, help='Set to override default location of Global.ipythondir.', default=NoConfigDefault, metavar='Global.ipythondir' ) - self.parser.add_argument('-p','-profile', '--profile', + self.parser.add_argument('-p', '--profile', dest='Global.profile',type=str, help='The string name of the profile to be used. This determines ' 'the name of the cluster dir as: cluster_. The default profile ' @@ -224,25 +243,25 @@ class AppWithClusterDirArgParseConfigLoader(ArgParseConfigLoader): default=NoConfigDefault, metavar='Global.profile' ) - self.parser.add_argument('-log_level', '--log-level', + self.parser.add_argument('--log-level', dest="Global.log_level",type=int, help='Set the log level (0,10,20,30,40,50). Default is 30.', default=NoConfigDefault, metavar="Global.log_level" ) - self.parser.add_argument('-cluster_dir', '--cluster-dir', + self.parser.add_argument('--cluster-dir', dest='Global.cluster_dir',type=str, help='Set the cluster dir. This overrides the logic used by the ' '--profile option.', default=NoConfigDefault, metavar='Global.cluster_dir' ) - self.parser.add_argument('-clean_logs', '--clean-logs', + self.parser.add_argument('--clean-logs', dest='Global.clean_logs', action='store_true', help='Delete old log flies before starting.', default=NoConfigDefault ) - self.parser.add_argument('-noclean_logs', '--no-clean-logs', + self.parser.add_argument('--no-clean-logs', dest='Global.clean_logs', action='store_false', help="Don't Delete old log flies before starting.", default=NoConfigDefault @@ -375,6 +394,8 @@ class ApplicationWithClusterDir(Application): self.security_dir = config.Global.security_dir = sdir ldir = self.cluster_dir_obj.log_dir self.log_dir = config.Global.log_dir = ldir + pdir = self.cluster_dir_obj.pid_dir + self.pid_dir = config.Global.pid_dir = pdir self.log.info("Cluster directory set to: %s" % self.cluster_dir) def start_logging(self): @@ -392,3 +413,46 @@ class ApplicationWithClusterDir(Application): else: open_log_file = sys.stdout log.startLogging(open_log_file) + + def write_pid_file(self): + """Create a .pid file in the pid_dir with my pid. + + This must be called after pre_construct, which sets `self.pid_dir`. + This raises :exc:`PIDFileError` if the pid file exists already. + """ + pid_file = os.path.join(self.pid_dir, self.name + '.pid') + if os.path.isfile(pid_file): + pid = self.get_pid_from_file() + raise PIDFileError( + 'The pid file [%s] already exists. \nThis could mean that this ' + 'server is already running with [pid=%s].' % (pid_file, pid)) + with open(pid_file, 'w') as f: + self.log.info("Creating pid file: %s" % pid_file) + f.write(repr(os.getpid())+'\n') + + def remove_pid_file(self): + """Remove the pid file. + + This should be called at shutdown by registering a callback with + :func:`reactor.addSystemEventTrigger`. + """ + pid_file = os.path.join(self.pid_dir, self.name + '.pid') + if os.path.isfile(pid_file): + try: + self.log.info("Removing pid file: %s" % pid_file) + os.remove(pid_file) + except: + pass + + def get_pid_from_file(self): + """Get the pid from the pid file. + + If the pid file doesn't exist a :exc:`PIDFileError` is raised. + """ + pid_file = os.path.join(self.pid_dir, self.name + '.pid') + if os.path.isfile(pid_file): + with open(pid_file, 'r') as f: + pid = int(f.read().strip()) + return pid + else: + raise PIDFileError('pid file not found: %s' % pid_file) \ No newline at end of file diff --git a/IPython/kernel/ipclusterapp.py b/IPython/kernel/ipclusterapp.py index 6c6360e..08aed1b 100644 --- a/IPython/kernel/ipclusterapp.py +++ b/IPython/kernel/ipclusterapp.py @@ -20,13 +20,15 @@ import os import signal import sys +from twisted.scripts._twistd_unix import daemonize + from IPython.core import release from IPython.external import argparse from IPython.config.loader import ArgParseConfigLoader, NoConfigDefault from IPython.utils.importstring import import_item from IPython.kernel.clusterdir import ( - ApplicationWithClusterDir, ClusterDirError + ApplicationWithClusterDir, ClusterDirError, PIDFileError ) from twisted.internet import reactor, defer @@ -48,12 +50,12 @@ class IPClusterCLLoader(ArgParseConfigLoader): def _add_arguments(self): # This has all the common options that all subcommands use parent_parser1 = argparse.ArgumentParser(add_help=False) - parent_parser1.add_argument('-ipythondir', '--ipython-dir', + parent_parser1.add_argument('--ipython-dir', dest='Global.ipythondir',type=str, help='Set to override default location of Global.ipythondir.', default=NoConfigDefault, metavar='Global.ipythondir') - parent_parser1.add_argument('-log_level', '--log-level', + parent_parser1.add_argument('--log-level', dest="Global.log_level",type=int, help='Set the log level (0,10,20,30,40,50). Default is 30.', default=NoConfigDefault, @@ -61,7 +63,7 @@ class IPClusterCLLoader(ArgParseConfigLoader): # This has all the common options that other subcommands use parent_parser2 = argparse.ArgumentParser(add_help=False) - parent_parser2.add_argument('-p','-profile', '--profile', + parent_parser2.add_argument('-p','--profile', dest='Global.profile',type=str, default=NoConfigDefault, help='The string name of the profile to be used. This determines ' @@ -70,7 +72,7 @@ class IPClusterCLLoader(ArgParseConfigLoader): 'if the --cluster-dir option is not used.', default=NoConfigDefault, metavar='Global.profile') - parent_parser2.add_argument('-cluster_dir', '--cluster-dir', + parent_parser2.add_argument('--cluster-dir', dest='Global.cluster_dir',type=str, default=NoConfigDefault, help='Set the cluster dir. This overrides the logic used by the ' @@ -122,16 +124,38 @@ class IPClusterCLLoader(ArgParseConfigLoader): help='The number of engines to start.', metavar='Global.n' ) - parser_start.add_argument('-clean_logs', '--clean-logs', + parser_start.add_argument('--clean-logs', dest='Global.clean_logs', action='store_true', help='Delete old log flies before starting.', default=NoConfigDefault ) - parser_start.add_argument('-noclean_logs', '--no-clean-logs', + parser_start.add_argument('--no-clean-logs', dest='Global.clean_logs', action='store_false', help="Don't delete old log flies before starting.", default=NoConfigDefault ) + parser_start.add_argument('--daemon', + dest='Global.daemonize', action='store_true', + help='Daemonize the ipcluster program. This implies --log-to-file', + default=NoConfigDefault + ) + parser_start.add_argument('--nodaemon', + dest='Global.daemonize', action='store_false', + help="Dont't daemonize the ipcluster program.", + default=NoConfigDefault + ) + + parser_start = subparsers.add_parser( + 'stop', + help='Stop a cluster.', + parents=[parent_parser1, parent_parser2] + ) + parser_start.add_argument('--signal-number', + dest='Global.stop_signal', type=int, + help="The signal number to use in stopping the cluster (default=2).", + metavar="Global.stop_signal", + default=NoConfigDefault + ) default_config_file_name = 'ipcluster_config.py' @@ -153,6 +177,8 @@ class IPClusterApp(ApplicationWithClusterDir): self.default_config.Global.n = 2 self.default_config.Global.reset_config = False self.default_config.Global.clean_logs = True + self.default_config.Global.stop_signal = 2 + self.default_config.Global.daemonize = False def create_command_line_config(self): """Create and return a command line config loader.""" @@ -170,7 +196,7 @@ class IPClusterApp(ApplicationWithClusterDir): elif subcommand=='create': self.auto_create_cluster_dir = True super(IPClusterApp, self).find_resources() - elif subcommand=='start': + elif subcommand=='start' or subcommand=='stop': self.auto_create_cluster_dir = False try: super(IPClusterApp, self).find_resources() @@ -182,6 +208,16 @@ class IPClusterApp(ApplicationWithClusterDir): "information about creating and listing cluster dirs." ) + def pre_construct(self): + super(IPClusterApp, self).pre_construct() + config = self.master_config + try: + daemon = config.Global.daemonize + if daemon: + config.Global.log_to_file = True + except AttributeError: + pass + def construct(self): config = self.master_config if config.Global.subcommand=='list': @@ -288,11 +324,52 @@ class IPClusterApp(ApplicationWithClusterDir): super(IPClusterApp, self).start_logging() def start_app(self): + """Start the application, depending on what subcommand is used.""" config = self.master_config - if config.Global.subcommand=='create' or config.Global.subcommand=='list': + subcmd = config.Global.subcommand + if subcmd=='create' or subcmd=='list': return - elif config.Global.subcommand=='start': + elif subcmd=='start': + # First see if the cluster is already running + try: + pid = self.get_pid_from_file() + except: + pass + else: + self.log.critical( + 'Cluster is already running with [pid=%s]. ' + 'use "ipcluster stop" to stop the cluster.' % pid + ) + # Here I exit with a unusual exit status that other processes + # can watch for to learn how I existed. + sys.exit(10) + # Now log and daemonize + self.log.info('Starting ipcluster with [daemon=%r]' % config.Global.daemonize) + if config.Global.daemonize: + if os.name=='posix': + os.chdir(config.Global.cluster_dir) + self.log_level = 40 + daemonize() + + # Now write the new pid file after our new forked pid is active. + self.write_pid_file() + reactor.addSystemEventTrigger('during','shutdown', self.remove_pid_file) reactor.run() + elif subcmd=='stop': + try: + pid = self.get_pid_from_file() + except PIDFileError: + self.log.critical( + 'Problem reading pid file, cluster is probably not running.' + ) + # Here I exit with a unusual exit status that other processes + # can watch for to learn how I existed. + sys.exit(11) + sig = config.Global.stop_signal + self.log.info( + "Stopping cluster [pid=%r] with [signal=%r]" % (pid, sig) + ) + os.kill(pid, sig) def launch_new_instance(): diff --git a/IPython/kernel/ipcontrollerapp.py b/IPython/kernel/ipcontrollerapp.py index 4ac0954..ca3750f 100644 --- a/IPython/kernel/ipcontrollerapp.py +++ b/IPython/kernel/ipcontrollerapp.py @@ -15,6 +15,8 @@ The IPython controller application. # Imports #----------------------------------------------------------------------------- +from __future__ import with_statement + import copy import os import sys @@ -158,9 +160,13 @@ cl_args = ( 'are deleted before the controller starts. This must be set if ' 'specific ports are specified by --engine-port or --client-port.') ), - (('-ns','--no-security'), dict( + (('--no-secure',), dict( action='store_false', dest='Global.secure', default=NoConfigDefault, help='Turn off SSL encryption for all connections.') + ), + (('--secure',), dict( + action='store_true', dest='Global.secure', default=NoConfigDefault, + help='Turn off SSL encryption for all connections.') ) ) @@ -213,7 +219,7 @@ class IPControllerApp(ApplicationWithClusterDir): self.start_logging() self.import_statements() - + # Create the service hierarchy self.main_service = service.MultiService() # The controller service @@ -240,6 +246,8 @@ class IPControllerApp(ApplicationWithClusterDir): def start_app(self): # Start the controller service and set things running self.main_service.startService() + self.write_pid_file() + reactor.addSystemEventTrigger('during','shutdown', self.remove_pid_file) reactor.run() diff --git a/docs/examples/kernel/fractal.py b/docs/examples/kernel/fractal.py new file mode 100644 index 0000000..715d30a --- /dev/null +++ b/docs/examples/kernel/fractal.py @@ -0,0 +1,90 @@ +from numpy import * + +def mandel(n, m, itermax, xmin, xmax, ymin, ymax): + ''' + Fast mandelbrot computation using numpy. + + (n, m) are the output image dimensions + itermax is the maximum number of iterations to do + xmin, xmax, ymin, ymax specify the region of the + set to compute. + ''' + # The point of ix and iy is that they are 2D arrays + # giving the x-coord and y-coord at each point in + # the array. The reason for doing this will become + # clear below... + ix, iy = mgrid[0:n, 0:m] + # Now x and y are the x-values and y-values at each + # point in the array, linspace(start, end, n) + # is an array of n linearly spaced points between + # start and end, and we then index this array using + # numpy fancy indexing. If A is an array and I is + # an array of indices, then A[I] has the same shape + # as I and at each place i in I has the value A[i]. + x = linspace(xmin, xmax, n)[ix] + y = linspace(ymin, ymax, m)[iy] + # c is the complex number with the given x, y coords + c = x+complex(0,1)*y + del x, y # save a bit of memory, we only need z + # the output image coloured according to the number + # of iterations it takes to get to the boundary + # abs(z)>2 + img = zeros(c.shape, dtype=int) + # Here is where the improvement over the standard + # algorithm for drawing fractals in numpy comes in. + # We flatten all the arrays ix, iy and c. This + # flattening doesn't use any more memory because + # we are just changing the shape of the array, the + # data in memory stays the same. It also affects + # each array in the same way, so that index i in + # array c has x, y coords ix[i], iy[i]. The way the + # algorithm works is that whenever abs(z)>2 we + # remove the corresponding index from each of the + # arrays ix, iy and c. Since we do the same thing + # to each array, the correspondence between c and + # the x, y coords stored in ix and iy is kept. + ix.shape = n*m + iy.shape = n*m + c.shape = n*m + # we iterate z->z^2+c with z starting at 0, but the + # first iteration makes z=c so we just start there. + # We need to copy c because otherwise the operation + # z->z^2 will send c->c^2. + z = copy(c) + for i in xrange(itermax): + if not len(z): break # all points have escaped + # equivalent to z = z*z+c but quicker and uses + # less memory + multiply(z, z, z) + add(z, c, z) + # these are the points that have escaped + rem = abs(z)>2.0 + # colour them with the iteration number, we + # add one so that points which haven't + # escaped have 0 as their iteration number, + # this is why we keep the arrays ix and iy + # because we need to know which point in img + # to colour + img[ix[rem], iy[rem]] = i+1 + # -rem is the array of points which haven't + # escaped, in numpy -A for a boolean array A + # is the NOT operation. + rem = -rem + # So we select out the points in + # z, ix, iy and c which are still to be + # iterated on in the next step + z = z[rem] + ix, iy = ix[rem], iy[rem] + c = c[rem] + return img + +if __name__=='__main__': + from pylab import * + import time + start = time.time() + I = mandel(400, 400, 100, -2, .5, -1.25, 1.25) + print 'Time taken:', time.time()-start + I[I==0] = 101 + img = imshow(I.T, origin='lower left') + img.write_png('mandel.png', noscale=True) + show() diff --git a/docs/examples/kernel/wordfreq.py b/docs/examples/kernel/wordfreq.py index e76f39f..e12549f 100644 --- a/docs/examples/kernel/wordfreq.py +++ b/docs/examples/kernel/wordfreq.py @@ -1,13 +1,20 @@ """Count the frequencies of words in a string""" +from __future__ import division + +import cmath as math + + def wordfreq(text): """Return a dictionary of words and word counts in a string.""" freqs = {} for word in text.split(): - freqs[word] = freqs.get(word, 0) + 1 + lword = word.lower() + freqs[lword] = freqs.get(lword, 0) + 1 return freqs + def print_wordfreq(freqs, n=10): """Print the n most common words and counts in the freqs dict.""" @@ -17,7 +24,43 @@ def print_wordfreq(freqs, n=10): for (count, word) in items[:n]: print word, count -if __name__ == '__main__': - import gzip - text = gzip.open('HISTORY.gz').read() - freqs = wordfreq(text) \ No newline at end of file + +def wordfreq_to_weightsize(worddict, minsize=10, maxsize=50, minalpha=0.4, maxalpha=1.0): + mincount = min(worddict.itervalues()) + maxcount = max(worddict.itervalues()) + weights = {} + for k, v in worddict.iteritems(): + w = (v-mincount)/(maxcount-mincount) + alpha = minalpha + (maxalpha-minalpha)*w + size = minsize + (maxsize-minsize)*w + weights[k] = (alpha, size) + return weights + + +def tagcloud(worddict, n=10, minsize=10, maxsize=50, minalpha=0.4, maxalpha=1.0): + from matplotlib import pyplot as plt + import random + + worddict = wordfreq_to_weightsize(worddict, minsize, maxsize, minalpha, maxalpha) + + fig = plt.figure() + ax = fig.add_subplot(111) + ax.set_position([0.0,0.0,1.0,1.0]) + plt.xticks([]) + plt.yticks([]) + + words = worddict.keys() + alphas = [v[0] for v in worddict.values()] + sizes = [v[1] for v in worddict.values()] + items = zip(alphas, sizes, words) + items.sort(reverse=True) + for alpha, size, word in items[:n]: + xpos = random.normalvariate(0.5, 0.3) + ypos = random.normalvariate(0.5, 0.3) + # xpos = random.uniform(0.0,1.0) + # ypos = random.uniform(0.0,1.0) + ax.text(xpos, ypos, word.lower(), alpha=alpha, fontsize=size) + ax.autoscale_view() + return ax + + \ No newline at end of file