##// END OF EJS Templates
branchmap: explicitly warm+write all subsets of the branchmap caches...
branchmap: explicitly warm+write all subsets of the branchmap caches 'full' claims it will warm all of the caches that are known about, but this was not the case - it did not actually warm the branchmap caches for subsets that we haven't requested, or for subsets that are still considered "valid". By explicitly writing them to disk, we can force the subsets for ex: "served" to be written ("immutable" and "base"), making it cheaper to calculate "served" the next time it needs to be updated. Differential Revision: https://phab.mercurial-scm.org/D6710

File last commit:

r42224:566daffc default
r42940:cdf0e952 default
Show More
basestore.py
164 lines | 6.3 KiB | text/x-python | PythonLexer
various
hgext: add largefiles extension...
r15168 # Copyright 2009-2010 Gregory P. Ward
# Copyright 2009-2010 Intelerad Medical Systems Incorporated
# Copyright 2010-2011 Fog Creek Software
# Copyright 2010-2011 Unity Technologies
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
Greg Ward
largefiles: improve comments, internal docstrings...
r15252 '''base class for store implementations and store-related utility code'''
liscju
py3: make largefiles/basestore.py use absolute_import
r29307 from __future__ import absolute_import
various
hgext: add largefiles extension...
r15168
from mercurial.i18n import _
liscju
py3: make largefiles/basestore.py use absolute_import
r29307 from mercurial import node, util
from . import lfutil
various
hgext: add largefiles extension...
r15168
class StoreError(Exception):
'''Raised when there is a problem getting files from or putting
files to a central store.'''
def __init__(self, filename, hash, url, detail):
self.filename = filename
self.hash = hash
self.url = url
self.detail = detail
def longmessage(self):
Wagner Bruna
largefiles: enhance error message to make it more i18n-friendly
r18461 return (_("error getting id %s from url %s for file %s: %s\n") %
Mads Kiilerich
largefiles: hide passwords in URLs in ui messages
r19950 (self.hash, util.hidepassword(self.url), self.filename,
self.detail))
various
hgext: add largefiles extension...
r15168
def __str__(self):
Mads Kiilerich
largefiles: hide passwords in URLs in ui messages
r19950 return "%s: %s" % (util.hidepassword(self.url), self.detail)
various
hgext: add largefiles extension...
r15168
class basestore(object):
def __init__(self, ui, repo, url):
self.ui = ui
self.repo = repo
self.url = url
def put(self, source, hash):
Mads Kiilerich
largefiles: 'put' should store 'source' file in under 'hash', also in localstore
r19007 '''Put source file into the store so it can be retrieved by hash.'''
various
hgext: add largefiles extension...
r15168 raise NotImplementedError('abstract method')
Na'Tosha Bard
largefiles: batch statlfile requests when pushing a largefiles repo (issue3386)...
r17127 def exists(self, hashes):
Mads Kiilerich
largefiles: fold oddly named _verify into remotestore.exists
r18573 '''Check to see if the store contains the given hashes. Given an
iterable of hashes it returns a mapping from hash to bool.'''
various
hgext: add largefiles extension...
r15168 raise NotImplementedError('abstract method')
def get(self, files):
'''Get the specified largefiles from the store and write to local
files under repo.root. files is a list of (filename, hash)
Mads Kiilerich
fix trivial spelling errors
r17424 tuples. Return (success, missing), lists of files successfully
various
hgext: add largefiles extension...
r15168 downloaded and those not found in the store. success is a list
of (filename, hash) tuples; missing is a list of filenames that
we could not get. (The detailed error message will already have
been presented to the user, so missing is just supplied as a
summary.)'''
success = []
missing = []
ui = self.ui
at = 0
Mads Kiilerich
largefiles: stat all largefiles in one batch before downloading...
r19008 available = self.exists(set(hash for (_filename, hash) in files))
Matt Harbison
largefiles: use a context manager to control the progress bar lifetime
r39427 with ui.makeprogress(_('getting largefiles'), unit=_('files'),
total=len(files)) as progress:
for filename, hash in files:
progress.update(at)
at += 1
ui.note(_('getting %s:%s\n') % (filename, hash))
various
hgext: add largefiles extension...
r15168
Matt Harbison
largefiles: use a context manager to control the progress bar lifetime
r39427 if not available.get(hash):
ui.warn(_('%s: largefile %s not available from %s\n')
% (filename, hash, util.hidepassword(self.url)))
missing.append(filename)
continue
Mads Kiilerich
largefiles: stat all largefiles in one batch before downloading...
r19008
Matt Harbison
largefiles: use a context manager to control the progress bar lifetime
r39427 if self._gethash(filename, hash):
success.append((filename, hash))
else:
missing.append(filename)
various
hgext: add largefiles extension...
r15168
return (success, missing)
Mads Kiilerich
largefiles: refactor basestore, extract _gethash method
r19918 def _gethash(self, filename, hash):
"""Get file with the provided hash and store it in the local repo's
store and in the usercache.
filename is for informational messages only.
"""
util.makedirs(lfutil.storepath(self.repo, ''))
storefilename = lfutil.storepath(self.repo, hash)
tmpname = storefilename + '.tmp'
Mads Kiilerich
largefiles: use context for file closing...
r30142 with util.atomictempfile(tmpname,
createmode=self.repo.store.createmode) as tmpfile:
try:
gothash = self._getfile(tmpfile, filename, hash)
except StoreError as err:
self.ui.warn(err.longmessage())
gothash = ""
Mads Kiilerich
largefiles: refactor basestore, extract _gethash method
r19918
if gothash != hash:
if gothash != "":
self.ui.warn(_('%s: data corruption (expected %s, got %s)\n')
% (filename, hash, gothash))
util.unlink(tmpname)
return False
util.rename(tmpname, storefilename)
lfutil.linktousercache(self.repo, hash)
return True
various
hgext: add largefiles extension...
r15168 def verify(self, revs, contents=False):
'''Verify the existence (and, optionally, contents) of every big
file revision referenced by every changeset in revs.
Return 0 if all is well, non-zero on any errors.'''
Mads Kiilerich
largefiles: verify status should be written as status, not as write...
r18546 self.ui.status(_('searching %d changesets for largefiles\n') %
len(revs))
various
hgext: add largefiles extension...
r15168 verified = set() # set of (filename, filenode) tuples
liscju
largefiles: change basestore._verifyfile to take list of files to check...
r29067 filestocheck = [] # list of (cset, filename, expectedhash)
various
hgext: add largefiles extension...
r15168 for rev in revs:
cctx = self.repo[rev]
cset = "%d:%s" % (cctx.rev(), node.short(cctx.node()))
Mads Kiilerich
largefiles: verify all files in each revision and report errors in any revision...
r18486 for standin in cctx:
liscju
largefiles: change basestore._verifyfile to take list of files to check...
r29067 filename = lfutil.splitstandin(standin)
if filename:
fctx = cctx[standin]
key = (filename, fctx.filenode())
if key not in verified:
verified.add(key)
FUJIWARA Katsunori
largefiles: use readasstandin() to read hex hash directly from filectx...
r31740 expectedhash = lfutil.readasstandin(fctx)
liscju
largefiles: change basestore._verifyfile to take list of files to check...
r29067 filestocheck.append((cset, filename, expectedhash))
failed = self._verifyfiles(contents, filestocheck)
various
hgext: add largefiles extension...
r15168
Na'Tosha Bard
largefiles: remove use of underscores that breaks coding convention
r16247 numrevs = len(verified)
Martin von Zweigbergk
cleanup: use set literals where possible...
r42224 numlfiles = len({fname for (fname, fnode) in verified})
various
hgext: add largefiles extension...
r15168 if contents:
Mads Kiilerich
largefiles: verify status should be written as status, not as write...
r18546 self.ui.status(
_('verified contents of %d revisions of %d largefiles\n')
% (numrevs, numlfiles))
various
hgext: add largefiles extension...
r15168 else:
Mads Kiilerich
largefiles: verify status should be written as status, not as write...
r18546 self.ui.status(
_('verified existence of %d revisions of %d largefiles\n')
% (numrevs, numlfiles))
various
hgext: add largefiles extension...
r15168 return int(failed)
def _getfile(self, tmpfile, filename, hash):
'''Fetch one revision of one file from the store and write it
to tmpfile. Compute the hash of the file on-the-fly as it
Mads Kiilerich
largefiles: refactoring - return hex from _getfile and copyandhash
r18999 downloads and return the hash. Close tmpfile. Raise
various
hgext: add largefiles extension...
r15168 StoreError if unable to download the file (e.g. it does not
exist in the store).'''
raise NotImplementedError('abstract method')
liscju
largefiles: change basestore._verifyfile to take list of files to check...
r29067 def _verifyfiles(self, contents, filestocheck):
'''Perform the actual verification of files in the store.
Mads Kiilerich
largefiles: docstrings for verify methods
r18574 'contents' controls verification of content hash.
liscju
largefiles: change basestore._verifyfile to take list of files to check...
r29067 'filestocheck' is list of files to check.
Returns _true_ if any problems are found!
various
hgext: add largefiles extension...
r15168 '''
raise NotImplementedError('abstract method')