##// END OF EJS Templates
worker: use os._exit for posix worker in all cases...
worker: use os._exit for posix worker in all cases Like commandserver, the worker should never run other resource cleanup logic. Previously this is not true for workers if they have exceptions other than KeyboardInterrupt. This actually caused a real-world deadlock with remotefilelog: 1. remotefilelog/fileserverclient creates a sshpeer. pipei/o/e get created. 2. worker inherits that sshpeer's pipei/o/e. 3. worker runs sshpeer.cleanup (only happens without os._exit) 4. worker closes pipeo/i, which will normally make the sshpeer read EOF from its stdin and exit. But the master process still have pipeo, so no EOF. 5. worker reads pipee (stderr of sshpeer), which never completes because the ssh process does not exit, does not close its stderr. 6. master waits for all workers, which never completes because they never complete sshpeer.cleanup. This could also be addressed by closing these fds after fork, which is not easy because Python 2.x does not have an official "afterfork" hook. Hacking os.fork is also ugly. Besides, sshpeer is probably not the only troublemarker. The patch changes _posixworker so all its code paths will use os._exit to avoid running unwanted resource clean-ups.

File last commit:

r30180:736f92c4 default
r30521:86cd09bc default
Show More
localstore.py
68 lines | 2.4 KiB | text/x-python | PythonLexer
# Copyright 2009-2010 Gregory P. Ward
# Copyright 2009-2010 Intelerad Medical Systems Incorporated
# Copyright 2010-2011 Fog Creek Software
# Copyright 2010-2011 Unity Technologies
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
'''store class for local filesystem'''
from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import util
from . import (
basestore,
lfutil,
)
class localstore(basestore.basestore):
'''localstore first attempts to grab files out of the store in the remote
Mercurial repository. Failing that, it attempts to grab the files from
the user cache.'''
def __init__(self, ui, repo, remote):
self.remote = remote.local()
super(localstore, self).__init__(ui, repo, self.remote.url())
def put(self, source, hash):
if lfutil.instore(self.remote, hash):
return
lfutil.link(source, lfutil.storepath(self.remote, hash))
def exists(self, hashes):
retval = {}
for hash in hashes:
retval[hash] = lfutil.instore(self.remote, hash)
return retval
def _getfile(self, tmpfile, filename, hash):
path = lfutil.findfile(self.remote, hash)
if not path:
raise basestore.StoreError(filename, hash, self.url,
_("can't get file locally"))
with open(path, 'rb') as fd:
return lfutil.copyandhash(
util.filechunkiter(fd), tmpfile)
def _verifyfiles(self, contents, filestocheck):
failed = False
for cset, filename, expectedhash in filestocheck:
storepath, exists = lfutil.findstorepath(self.repo, expectedhash)
if not exists:
storepath, exists = lfutil.findstorepath(
self.remote, expectedhash)
if not exists:
self.ui.warn(
_('changeset %s: %s references missing %s\n')
% (cset, filename, storepath))
failed = True
elif contents:
actualhash = lfutil.hashfile(storepath)
if actualhash != expectedhash:
self.ui.warn(
_('changeset %s: %s references corrupted %s\n')
% (cset, filename, storepath))
failed = True
return failed