##// END OF EJS Templates
localrepo: create new function for instantiating a local repo object...
localrepo: create new function for instantiating a local repo object Today, there is a single local repository class - localrepository. Its __init__ is responsible for loading the .hg/requires file and taking different actions depending on what is present. In addition, extensions may define a "reposetup" function that monkeypatches constructed repository instances, often by implementing a derived type and changing the __class__ of the repo instance. Work around alternate storage backends and partial clone has made it clear to me that shoehorning all this logic into __init__ and operating on an existing instance is too convoluted. For example, localrepository assumes revlog storage and swapping in non-revlog storage requires overriding e.g. file() to return something that isn't a revlog. I've authored various patches that either: a) teach various methods (like file()) about different states and taking the appropriate code path at run-time b) create methods/attributes/callables used for instantiating things and populating these in __init__ "a" incurs run-time performance penalties and makes code more complicated since various functions have a bunch of "if storage is X" branches. "b" makes localrepository quickly explode in complexity. My plan for tackling this problem is to make the local repository type more dynamic. Instead of a static localrepository class/type that supports all of the local repository configurations (revlogs vs other, revlogs with ellipsis, revlog v1 versus revlog v2, etc), we'll dynamically construct a type providing the implementations that are needed for the repository on disk, derived from the .hg/requires file and configuration options. The constructed repository type will be specialized and methods won't need to be taught about different implementations nor overloaded. We may also leverage this functionality for building types that don't implement all attributes. For example, the "intents" feature allows commands to declare that they are read only. By dynamically constructing a repository type, we could return a repository instance with no attributes related to mutating the repository. This could include things like a "changelog" property implementation that doesn't check whether it needs to invalidate the hidden revisions set on every access. This commit establishes a function for building a local repository instance. Future commits will start moving functionality from localrepository.__init__ to this function. Then we'll start dynamically changing the returned type depending on options that are present. This change may seem radical. But it should be fully compatible with the reposetup() model - at least for now. Differential Revision: https://phab.mercurial-scm.org/D4563

File last commit:

r38806:e7aa113b default
r39723:bfeab472 default
Show More
treediscovery.py
174 lines | 5.6 KiB | text/x-python | PythonLexer
# discovery.py - protocol changeset discovery functions
#
# Copyright 2010 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import
import collections
from .i18n import _
from .node import (
nullid,
short,
)
from . import (
error,
pycompat,
)
def findcommonincoming(repo, remote, heads=None, force=False):
"""Return a tuple (common, fetch, heads) used to identify the common
subset of nodes between repo and remote.
"common" is a list of (at least) the heads of the common subset.
"fetch" is a list of roots of the nodes that would be incoming, to be
supplied to changegroupsubset.
"heads" is either the supplied heads, or else the remote's heads.
"""
knownnode = repo.changelog.hasnode
search = []
fetch = set()
seen = set()
seenbranch = set()
base = set()
if not heads:
with remote.commandexecutor() as e:
heads = e.callcommand('heads', {}).result()
if repo.changelog.tip() == nullid:
base.add(nullid)
if heads != [nullid]:
return [nullid], [nullid], list(heads)
return [nullid], [], heads
# assume we're closer to the tip than the root
# and start by examining the heads
repo.ui.status(_("searching for changes\n"))
unknown = []
for h in heads:
if not knownnode(h):
unknown.append(h)
else:
base.add(h)
if not unknown:
return list(base), [], list(heads)
req = set(unknown)
reqcnt = 0
progress = repo.ui.makeprogress(_('searching'), unit=_('queries'))
# search through remote branches
# a 'branch' here is a linear segment of history, with four parts:
# head, root, first parent, second parent
# (a branch always has two parents (or none) by definition)
with remote.commandexecutor() as e:
branches = e.callcommand('branches', {'nodes': unknown}).result()
unknown = collections.deque(branches)
while unknown:
r = []
while unknown:
n = unknown.popleft()
if n[0] in seen:
continue
repo.ui.debug("examining %s:%s\n"
% (short(n[0]), short(n[1])))
if n[0] == nullid: # found the end of the branch
pass
elif n in seenbranch:
repo.ui.debug("branch already found\n")
continue
elif n[1] and knownnode(n[1]): # do we know the base?
repo.ui.debug("found incomplete branch %s:%s\n"
% (short(n[0]), short(n[1])))
search.append(n[0:2]) # schedule branch range for scanning
seenbranch.add(n)
else:
if n[1] not in seen and n[1] not in fetch:
if knownnode(n[2]) and knownnode(n[3]):
repo.ui.debug("found new changeset %s\n" %
short(n[1]))
fetch.add(n[1]) # earliest unknown
for p in n[2:4]:
if knownnode(p):
base.add(p) # latest known
for p in n[2:4]:
if p not in req and not knownnode(p):
r.append(p)
req.add(p)
seen.add(n[0])
if r:
reqcnt += 1
progress.increment()
repo.ui.debug("request %d: %s\n" %
(reqcnt, " ".join(map(short, r))))
for p in pycompat.xrange(0, len(r), 10):
with remote.commandexecutor() as e:
branches = e.callcommand('branches', {
'nodes': r[p:p + 10],
}).result()
for b in branches:
repo.ui.debug("received %s:%s\n" %
(short(b[0]), short(b[1])))
unknown.append(b)
# do binary search on the branches we found
while search:
newsearch = []
reqcnt += 1
progress.increment()
with remote.commandexecutor() as e:
between = e.callcommand('between', {'pairs': search}).result()
for n, l in zip(search, between):
l.append(n[1])
p = n[0]
f = 1
for i in l:
repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
if knownnode(i):
if f <= 2:
repo.ui.debug("found new branch changeset %s\n" %
short(p))
fetch.add(p)
base.add(i)
else:
repo.ui.debug("narrowed branch search to %s:%s\n"
% (short(p), short(i)))
newsearch.append((p, i))
break
p, f = i, f * 2
search = newsearch
# sanity check our fetch list
for f in fetch:
if knownnode(f):
raise error.RepoError(_("already have changeset ")
+ short(f[:4]))
base = list(base)
if base == [nullid]:
if force:
repo.ui.warn(_("warning: repository is unrelated\n"))
else:
raise error.Abort(_("repository is unrelated"))
repo.ui.debug("found new changesets starting at " +
" ".join([short(f) for f in fetch]) + "\n")
progress.complete()
repo.ui.debug("%d total queries\n" % reqcnt)
return base, list(fetch), heads