##// END OF EJS Templates
worker: use os._exit for posix worker in all cases...
worker: use os._exit for posix worker in all cases Like commandserver, the worker should never run other resource cleanup logic. Previously this is not true for workers if they have exceptions other than KeyboardInterrupt. This actually caused a real-world deadlock with remotefilelog: 1. remotefilelog/fileserverclient creates a sshpeer. pipei/o/e get created. 2. worker inherits that sshpeer's pipei/o/e. 3. worker runs sshpeer.cleanup (only happens without os._exit) 4. worker closes pipeo/i, which will normally make the sshpeer read EOF from its stdin and exit. But the master process still have pipeo, so no EOF. 5. worker reads pipee (stderr of sshpeer), which never completes because the ssh process does not exit, does not close its stderr. 6. master waits for all workers, which never completes because they never complete sshpeer.cleanup. This could also be addressed by closing these fds after fork, which is not easy because Python 2.x does not have an official "afterfork" hook. Hacking os.fork is also ugly. Besides, sshpeer is probably not the only troublemarker. The patch changes _posixworker so all its code paths will use os._exit to avoid running unwanted resource clean-ups.

File last commit:

r30142:3dcaf1c4 default
r30521:86cd09bc default
Show More
basestore.py
164 lines | 6.3 KiB | text/x-python | PythonLexer
various
hgext: add largefiles extension...
r15168 # Copyright 2009-2010 Gregory P. Ward
# Copyright 2009-2010 Intelerad Medical Systems Incorporated
# Copyright 2010-2011 Fog Creek Software
# Copyright 2010-2011 Unity Technologies
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
Greg Ward
largefiles: improve comments, internal docstrings...
r15252 '''base class for store implementations and store-related utility code'''
liscju
py3: make largefiles/basestore.py use absolute_import
r29307 from __future__ import absolute_import
various
hgext: add largefiles extension...
r15168
from mercurial.i18n import _
liscju
py3: make largefiles/basestore.py use absolute_import
r29307 from mercurial import node, util
from . import lfutil
various
hgext: add largefiles extension...
r15168
class StoreError(Exception):
'''Raised when there is a problem getting files from or putting
files to a central store.'''
def __init__(self, filename, hash, url, detail):
self.filename = filename
self.hash = hash
self.url = url
self.detail = detail
def longmessage(self):
Wagner Bruna
largefiles: enhance error message to make it more i18n-friendly
r18461 return (_("error getting id %s from url %s for file %s: %s\n") %
Mads Kiilerich
largefiles: hide passwords in URLs in ui messages
r19950 (self.hash, util.hidepassword(self.url), self.filename,
self.detail))
various
hgext: add largefiles extension...
r15168
def __str__(self):
Mads Kiilerich
largefiles: hide passwords in URLs in ui messages
r19950 return "%s: %s" % (util.hidepassword(self.url), self.detail)
various
hgext: add largefiles extension...
r15168
class basestore(object):
def __init__(self, ui, repo, url):
self.ui = ui
self.repo = repo
self.url = url
def put(self, source, hash):
Mads Kiilerich
largefiles: 'put' should store 'source' file in under 'hash', also in localstore
r19007 '''Put source file into the store so it can be retrieved by hash.'''
various
hgext: add largefiles extension...
r15168 raise NotImplementedError('abstract method')
Na'Tosha Bard
largefiles: batch statlfile requests when pushing a largefiles repo (issue3386)...
r17127 def exists(self, hashes):
Mads Kiilerich
largefiles: fold oddly named _verify into remotestore.exists
r18573 '''Check to see if the store contains the given hashes. Given an
iterable of hashes it returns a mapping from hash to bool.'''
various
hgext: add largefiles extension...
r15168 raise NotImplementedError('abstract method')
def get(self, files):
'''Get the specified largefiles from the store and write to local
files under repo.root. files is a list of (filename, hash)
Mads Kiilerich
fix trivial spelling errors
r17424 tuples. Return (success, missing), lists of files successfully
various
hgext: add largefiles extension...
r15168 downloaded and those not found in the store. success is a list
of (filename, hash) tuples; missing is a list of filenames that
we could not get. (The detailed error message will already have
been presented to the user, so missing is just supplied as a
summary.)'''
success = []
missing = []
ui = self.ui
at = 0
Mads Kiilerich
largefiles: stat all largefiles in one batch before downloading...
r19008 available = self.exists(set(hash for (_filename, hash) in files))
various
hgext: add largefiles extension...
r15168 for filename, hash in files:
av6
largefiles: specify unit for ui.progress when operating on files...
r28463 ui.progress(_('getting largefiles'), at, unit=_('files'),
various
hgext: add largefiles extension...
r15168 total=len(files))
at += 1
ui.note(_('getting %s:%s\n') % (filename, hash))
Mads Kiilerich
largefiles: stat all largefiles in one batch before downloading...
r19008 if not available.get(hash):
ui.warn(_('%s: largefile %s not available from %s\n')
Mads Kiilerich
largefiles: hide passwords in URLs in ui messages
r19950 % (filename, hash, util.hidepassword(self.url)))
Mads Kiilerich
largefiles: stat all largefiles in one batch before downloading...
r19008 missing.append(filename)
continue
Mads Kiilerich
largefiles: refactor basestore, extract _gethash method
r19918 if self._gethash(filename, hash):
success.append((filename, hash))
else:
various
hgext: add largefiles extension...
r15168 missing.append(filename)
ui.progress(_('getting largefiles'), None)
return (success, missing)
Mads Kiilerich
largefiles: refactor basestore, extract _gethash method
r19918 def _gethash(self, filename, hash):
"""Get file with the provided hash and store it in the local repo's
store and in the usercache.
filename is for informational messages only.
"""
util.makedirs(lfutil.storepath(self.repo, ''))
storefilename = lfutil.storepath(self.repo, hash)
tmpname = storefilename + '.tmp'
Mads Kiilerich
largefiles: use context for file closing...
r30142 with util.atomictempfile(tmpname,
createmode=self.repo.store.createmode) as tmpfile:
try:
gothash = self._getfile(tmpfile, filename, hash)
except StoreError as err:
self.ui.warn(err.longmessage())
gothash = ""
Mads Kiilerich
largefiles: refactor basestore, extract _gethash method
r19918
if gothash != hash:
if gothash != "":
self.ui.warn(_('%s: data corruption (expected %s, got %s)\n')
% (filename, hash, gothash))
util.unlink(tmpname)
return False
util.rename(tmpname, storefilename)
lfutil.linktousercache(self.repo, hash)
return True
various
hgext: add largefiles extension...
r15168 def verify(self, revs, contents=False):
'''Verify the existence (and, optionally, contents) of every big
file revision referenced by every changeset in revs.
Return 0 if all is well, non-zero on any errors.'''
Mads Kiilerich
largefiles: verify status should be written as status, not as write...
r18546 self.ui.status(_('searching %d changesets for largefiles\n') %
len(revs))
various
hgext: add largefiles extension...
r15168 verified = set() # set of (filename, filenode) tuples
liscju
largefiles: change basestore._verifyfile to take list of files to check...
r29067 filestocheck = [] # list of (cset, filename, expectedhash)
various
hgext: add largefiles extension...
r15168 for rev in revs:
cctx = self.repo[rev]
cset = "%d:%s" % (cctx.rev(), node.short(cctx.node()))
Mads Kiilerich
largefiles: verify all files in each revision and report errors in any revision...
r18486 for standin in cctx:
liscju
largefiles: change basestore._verifyfile to take list of files to check...
r29067 filename = lfutil.splitstandin(standin)
if filename:
fctx = cctx[standin]
key = (filename, fctx.filenode())
if key not in verified:
verified.add(key)
expectedhash = fctx.data()[0:40]
filestocheck.append((cset, filename, expectedhash))
failed = self._verifyfiles(contents, filestocheck)
various
hgext: add largefiles extension...
r15168
Na'Tosha Bard
largefiles: remove use of underscores that breaks coding convention
r16247 numrevs = len(verified)
numlfiles = len(set([fname for (fname, fnode) in verified]))
various
hgext: add largefiles extension...
r15168 if contents:
Mads Kiilerich
largefiles: verify status should be written as status, not as write...
r18546 self.ui.status(
_('verified contents of %d revisions of %d largefiles\n')
% (numrevs, numlfiles))
various
hgext: add largefiles extension...
r15168 else:
Mads Kiilerich
largefiles: verify status should be written as status, not as write...
r18546 self.ui.status(
_('verified existence of %d revisions of %d largefiles\n')
% (numrevs, numlfiles))
various
hgext: add largefiles extension...
r15168 return int(failed)
def _getfile(self, tmpfile, filename, hash):
'''Fetch one revision of one file from the store and write it
to tmpfile. Compute the hash of the file on-the-fly as it
Mads Kiilerich
largefiles: refactoring - return hex from _getfile and copyandhash
r18999 downloads and return the hash. Close tmpfile. Raise
various
hgext: add largefiles extension...
r15168 StoreError if unable to download the file (e.g. it does not
exist in the store).'''
raise NotImplementedError('abstract method')
liscju
largefiles: change basestore._verifyfile to take list of files to check...
r29067 def _verifyfiles(self, contents, filestocheck):
'''Perform the actual verification of files in the store.
Mads Kiilerich
largefiles: docstrings for verify methods
r18574 'contents' controls verification of content hash.
liscju
largefiles: change basestore._verifyfile to take list of files to check...
r29067 'filestocheck' is list of files to check.
Returns _true_ if any problems are found!
various
hgext: add largefiles extension...
r15168 '''
raise NotImplementedError('abstract method')