##// END OF EJS Templates
namespaces: let namespaces override singlenode() definition...
namespaces: let namespaces override singlenode() definition Some namespaces have multiple nodes per name (meaning that their namemap() returns multiple nodes). One such namespace is the "topics" namespace (from the evolve repo). We also have our own internal namespace at Google (for review units) that has multiple nodes per name. These namespaces may not want to use the default "pick highest revnum" resolution that we currently use when resolving a name to a single node. As an example, they may decide that `hg co <name>` should check out a commit that's last in some sense even if an earlier commit had just been amended and thus had a higher revnum [1]. This patch gives the namespace the option to continue to return multiple nodes and to override how the best node is picked. Allowing namespaces to override that may also be useful as an optimization (it may be cheaper for the namespace to find just that node). I have been arguing (in D3715) for using all the nodes returned from namemap() when resolving the symbol to a revset, so e.g. `hg log -r stable` would resolve to *all* nodes on stable, not just the one with the highest revnum (except that I don't actually think we should change it for the branch namespace because of BC). Most people seem opposed to that. If we decide not to do it, I think we can deprecate the namemap() function in favor of the new singlenode() (I find it weird to have namespaces, like the branch namespace, where namemap() isn't nodemap()'s inverse). I therefore think this patch makes sense regardless of what we decide on that issue. [1] Actually, even the branch namespace would have wanted to override singlenode() if it had supported multiple nodes. That's because closes branch heads are mostly ignored, so "hg co default" will not check out the highest-revnum node if that's a closed head. Differential Revision: https://phab.mercurial-scm.org/D3852

File last commit:

r38426:164306d3 default
r38505:4c068365 @58 default
Show More
basestore.py
165 lines | 6.3 KiB | text/x-python | PythonLexer
# Copyright 2009-2010 Gregory P. Ward
# Copyright 2009-2010 Intelerad Medical Systems Incorporated
# Copyright 2010-2011 Fog Creek Software
# Copyright 2010-2011 Unity Technologies
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
'''base class for store implementations and store-related utility code'''
from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import node, util
from . import lfutil
class StoreError(Exception):
'''Raised when there is a problem getting files from or putting
files to a central store.'''
def __init__(self, filename, hash, url, detail):
self.filename = filename
self.hash = hash
self.url = url
self.detail = detail
def longmessage(self):
return (_("error getting id %s from url %s for file %s: %s\n") %
(self.hash, util.hidepassword(self.url), self.filename,
self.detail))
def __str__(self):
return "%s: %s" % (util.hidepassword(self.url), self.detail)
class basestore(object):
def __init__(self, ui, repo, url):
self.ui = ui
self.repo = repo
self.url = url
def put(self, source, hash):
'''Put source file into the store so it can be retrieved by hash.'''
raise NotImplementedError('abstract method')
def exists(self, hashes):
'''Check to see if the store contains the given hashes. Given an
iterable of hashes it returns a mapping from hash to bool.'''
raise NotImplementedError('abstract method')
def get(self, files):
'''Get the specified largefiles from the store and write to local
files under repo.root. files is a list of (filename, hash)
tuples. Return (success, missing), lists of files successfully
downloaded and those not found in the store. success is a list
of (filename, hash) tuples; missing is a list of filenames that
we could not get. (The detailed error message will already have
been presented to the user, so missing is just supplied as a
summary.)'''
success = []
missing = []
ui = self.ui
at = 0
available = self.exists(set(hash for (_filename, hash) in files))
progress = ui.makeprogress(_('getting largefiles'), unit=_('files'),
total=len(files))
for filename, hash in files:
progress.update(at)
at += 1
ui.note(_('getting %s:%s\n') % (filename, hash))
if not available.get(hash):
ui.warn(_('%s: largefile %s not available from %s\n')
% (filename, hash, util.hidepassword(self.url)))
missing.append(filename)
continue
if self._gethash(filename, hash):
success.append((filename, hash))
else:
missing.append(filename)
progress.complete()
return (success, missing)
def _gethash(self, filename, hash):
"""Get file with the provided hash and store it in the local repo's
store and in the usercache.
filename is for informational messages only.
"""
util.makedirs(lfutil.storepath(self.repo, ''))
storefilename = lfutil.storepath(self.repo, hash)
tmpname = storefilename + '.tmp'
with util.atomictempfile(tmpname,
createmode=self.repo.store.createmode) as tmpfile:
try:
gothash = self._getfile(tmpfile, filename, hash)
except StoreError as err:
self.ui.warn(err.longmessage())
gothash = ""
if gothash != hash:
if gothash != "":
self.ui.warn(_('%s: data corruption (expected %s, got %s)\n')
% (filename, hash, gothash))
util.unlink(tmpname)
return False
util.rename(tmpname, storefilename)
lfutil.linktousercache(self.repo, hash)
return True
def verify(self, revs, contents=False):
'''Verify the existence (and, optionally, contents) of every big
file revision referenced by every changeset in revs.
Return 0 if all is well, non-zero on any errors.'''
self.ui.status(_('searching %d changesets for largefiles\n') %
len(revs))
verified = set() # set of (filename, filenode) tuples
filestocheck = [] # list of (cset, filename, expectedhash)
for rev in revs:
cctx = self.repo[rev]
cset = "%d:%s" % (cctx.rev(), node.short(cctx.node()))
for standin in cctx:
filename = lfutil.splitstandin(standin)
if filename:
fctx = cctx[standin]
key = (filename, fctx.filenode())
if key not in verified:
verified.add(key)
expectedhash = lfutil.readasstandin(fctx)
filestocheck.append((cset, filename, expectedhash))
failed = self._verifyfiles(contents, filestocheck)
numrevs = len(verified)
numlfiles = len(set([fname for (fname, fnode) in verified]))
if contents:
self.ui.status(
_('verified contents of %d revisions of %d largefiles\n')
% (numrevs, numlfiles))
else:
self.ui.status(
_('verified existence of %d revisions of %d largefiles\n')
% (numrevs, numlfiles))
return int(failed)
def _getfile(self, tmpfile, filename, hash):
'''Fetch one revision of one file from the store and write it
to tmpfile. Compute the hash of the file on-the-fly as it
downloads and return the hash. Close tmpfile. Raise
StoreError if unable to download the file (e.g. it does not
exist in the store).'''
raise NotImplementedError('abstract method')
def _verifyfiles(self, contents, filestocheck):
'''Perform the actual verification of files in the store.
'contents' controls verification of content hash.
'filestocheck' is list of files to check.
Returns _true_ if any problems are found!
'''
raise NotImplementedError('abstract method')