##// END OF EJS Templates
phabricator: teach `getoldnodedrevmap()` to handle folded reviews...
phabricator: teach `getoldnodedrevmap()` to handle folded reviews The tricky part here is reasoning through all of the possible predecessor scenarios. In the typical case of submitting a folded range and then resubmitting it (also folded), filtering the list of commits for the diff stored on Phabricator through the local predecessor list for each single node will result in the typical 1:1 mapping to the old node. There are edge cases like using `hg fold` within the range prior to resubmitting, that will result in mapping to multiple old nodes. In that case, the first direct predecessor is needed for the base of the diff, and the last direct predecessor is needed for the head of the diff in order to make sure that the entire range is included in the diff content. And none of this matters for commits in the middle of the range, as they are never used. Fortunately the only crucial thing here is the `drev` number for each node. For these complicated cases where there are multiple old nodes, simply ignore them all. This will cause `createdifferentialrevision()` to generate a new diff (within the same Differential), and avoids complicating the code. Differential Revision: https://phab.mercurial-scm.org/D8311

File last commit:

r44937:9d2b2df2 default
r45136:5f9c917e default
Show More
remotestore.py
153 lines | 5.1 KiB | text/x-python | PythonLexer
# Copyright 2010-2011 Fog Creek Software
# Copyright 2010-2011 Unity Technologies
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
'''remote largefile store; the base class for wirestore'''
from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import (
error,
pycompat,
util,
)
from mercurial.utils import stringutil
from . import (
basestore,
lfutil,
localstore,
)
urlerr = util.urlerr
urlreq = util.urlreq
class remotestore(basestore.basestore):
'''a largefile store accessed over a network'''
def __init__(self, ui, repo, url):
super(remotestore, self).__init__(ui, repo, url)
self._lstore = None
if repo is not None:
self._lstore = localstore.localstore(self.ui, self.repo, self.repo)
def put(self, source, hash):
if self.sendfile(source, hash):
raise error.Abort(
_(b'remotestore: could not put %s to remote store %s')
% (source, util.hidepassword(self.url))
)
self.ui.debug(
_(b'remotestore: put %s to remote store %s\n')
% (source, util.hidepassword(self.url))
)
def exists(self, hashes):
return {
h: s == 0
for (h, s) in pycompat.iteritems(
self._stat(hashes)
) # dict-from-generator
}
def sendfile(self, filename, hash):
self.ui.debug(b'remotestore: sendfile(%s, %s)\n' % (filename, hash))
try:
with lfutil.httpsendfile(self.ui, filename) as fd:
return self._put(hash, fd)
except IOError as e:
raise error.Abort(
_(b'remotestore: could not open file %s: %s')
% (filename, stringutil.forcebytestr(e))
)
def _getfile(self, tmpfile, filename, hash):
try:
chunks = self._get(hash)
except urlerr.httperror as e:
# 401s get converted to error.Aborts; everything else is fine being
# turned into a StoreError
raise basestore.StoreError(
filename, hash, self.url, stringutil.forcebytestr(e)
)
except urlerr.urlerror as e:
# This usually indicates a connection problem, so don't
# keep trying with the other files... they will probably
# all fail too.
raise error.Abort(
b'%s: %s' % (util.hidepassword(self.url), e.reason)
)
except IOError as e:
raise basestore.StoreError(
filename, hash, self.url, stringutil.forcebytestr(e)
)
return lfutil.copyandhash(chunks, tmpfile)
def _hashesavailablelocally(self, hashes):
existslocallymap = self._lstore.exists(hashes)
localhashes = [hash for hash in hashes if existslocallymap[hash]]
return localhashes
def _verifyfiles(self, contents, filestocheck):
failed = False
expectedhashes = [
expectedhash for cset, filename, expectedhash in filestocheck
]
localhashes = self._hashesavailablelocally(expectedhashes)
stats = self._stat(
[
expectedhash
for expectedhash in expectedhashes
if expectedhash not in localhashes
]
)
for cset, filename, expectedhash in filestocheck:
if expectedhash in localhashes:
filetocheck = (cset, filename, expectedhash)
verifyresult = self._lstore._verifyfiles(
contents, [filetocheck]
)
if verifyresult:
failed = True
else:
stat = stats[expectedhash]
if stat:
if stat == 1:
self.ui.warn(
_(b'changeset %s: %s: contents differ\n')
% (cset, filename)
)
failed = True
elif stat == 2:
self.ui.warn(
_(b'changeset %s: %s missing\n') % (cset, filename)
)
failed = True
else:
raise RuntimeError(
b'verify failed: unexpected response '
b'from statlfile (%r)' % stat
)
return failed
def _put(self, hash, fd):
'''Put file with the given hash in the remote store.'''
raise NotImplementedError(b'abstract method')
def _get(self, hash):
'''Get a iterator for content with the given hash.'''
raise NotImplementedError(b'abstract method')
def _stat(self, hashes):
'''Get information about availability of files specified by
hashes in the remote store. Return dictionary mapping hashes
to return code where 0 means that file is available, other
values if not.'''
raise NotImplementedError(b'abstract method')