hg.py
557 lines
| 18.5 KiB
| text/x-python
|
PythonLexer
/ mercurial / hg.py
mpm@selenic.com
|
r0 | # hg.py - repository classes for mercurial | ||
# | ||||
Thomas Arendsen Hein
|
r4635 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | ||
Vadim Gelfer
|
r2859 | # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> | ||
mpm@selenic.com
|
r0 | # | ||
Martin Geisler
|
r8225 | # This software may be used and distributed according to the terms of the | ||
Matt Mackall
|
r10263 | # GNU General Public License version 2 or any later version. | ||
mpm@selenic.com
|
r0 | |||
Matt Mackall
|
r3891 | from i18n import _ | ||
Ronny Pfannschmidt
|
r8109 | from lock import release | ||
Martin Geisler
|
r12271 | from node import hex, nullid, nullrev, short | ||
Matt Mackall
|
r3877 | import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo | ||
Sune Foldager
|
r10365 | import lock, util, extensions, error, encoding, node | ||
Martin Geisler
|
r12273 | import cmdutil, discovery, url, changegroup | ||
Benoit Boissinot
|
r10651 | import merge as mergemod | ||
import verify as verifymod | ||||
Simon Heimberg
|
r8312 | import errno, os, shutil | ||
mpm@selenic.com
|
r0 | |||
Vadim Gelfer
|
r2740 | def _local(path): | ||
Alexander Solovyov
|
r11154 | path = util.expandpath(util.drop_scheme('file', path)) | ||
return (os.path.isfile(path) and bundlerepo or localrepo) | ||||
Vadim Gelfer
|
r2469 | |||
Sune Foldager
|
r10365 | def addbranchrevs(lrepo, repo, branches, revs): | ||
Sune Foldager
|
r11322 | hashbranch, branches = branches | ||
if not hashbranch and not branches: | ||||
Sune Foldager
|
r10365 | return revs or None, revs and revs[0] or None | ||
Sune Foldager
|
r10380 | revs = revs and list(revs) or [] | ||
if not repo.capable('branchmap'): | ||||
Sune Foldager
|
r11322 | if branches: | ||
raise util.Abort(_("remote branch lookup not supported")) | ||||
revs.append(hashbranch) | ||||
Sune Foldager
|
r10380 | return revs, revs[0] | ||
Sune Foldager
|
r10365 | branchmap = repo.branchmap() | ||
Sune Foldager
|
r11322 | |||
def primary(butf8): | ||||
if butf8 == '.': | ||||
Sune Foldager
|
r10365 | if not lrepo or not lrepo.local(): | ||
raise util.Abort(_("dirstate branch not accessible")) | ||||
Sune Foldager
|
r11306 | butf8 = lrepo.dirstate.branch() | ||
Sune Foldager
|
r11299 | if butf8 in branchmap: | ||
revs.extend(node.hex(r) for r in reversed(branchmap[butf8])) | ||||
Sune Foldager
|
r11322 | return True | ||
Sune Foldager
|
r10365 | else: | ||
Sune Foldager
|
r11322 | return False | ||
for branch in branches: | ||||
butf8 = encoding.fromlocal(branch) | ||||
if not primary(butf8): | ||||
raise error.RepoLookupError(_("unknown branch '%s'") % branch) | ||||
if hashbranch: | ||||
butf8 = encoding.fromlocal(hashbranch) | ||||
if not primary(butf8): | ||||
revs.append(hashbranch) | ||||
Sune Foldager
|
r10365 | return revs, revs[0] | ||
def parseurl(url, branches=None): | ||||
Sune Foldager
|
r11322 | '''parse url#branch, returning (url, (branch, branches))''' | ||
Matt Mackall
|
r5177 | |||
if '#' not in url: | ||||
Sune Foldager
|
r11322 | return url, (None, branches or []) | ||
Dirkjan Ochtman
|
r7045 | url, branch = url.split('#', 1) | ||
Sune Foldager
|
r11322 | return url, (branch, branches or []) | ||
Matt Mackall
|
r5177 | |||
Vadim Gelfer
|
r2472 | schemes = { | ||
Vadim Gelfer
|
r2740 | 'bundle': bundlerepo, | ||
'file': _local, | ||||
'http': httprepo, | ||||
'https': httprepo, | ||||
'ssh': sshrepo, | ||||
'static-http': statichttprepo, | ||||
Thomas Arendsen Hein
|
r4853 | } | ||
Vadim Gelfer
|
r2469 | |||
Vadim Gelfer
|
r2740 | def _lookup(path): | ||
scheme = 'file' | ||||
if path: | ||||
c = path.find(':') | ||||
if c > 0: | ||||
scheme = path[:c] | ||||
thing = schemes.get(scheme) or schemes['file'] | ||||
try: | ||||
return thing(path) | ||||
except TypeError: | ||||
return thing | ||||
Matt Mackall
|
r2775 | |||
Vadim Gelfer
|
r2719 | def islocal(repo): | ||
'''return true if repo or path is local''' | ||||
if isinstance(repo, str): | ||||
Vadim Gelfer
|
r2740 | try: | ||
return _lookup(repo).islocal(repo) | ||||
except AttributeError: | ||||
return False | ||||
Vadim Gelfer
|
r2719 | return repo.local() | ||
Brendan Cully
|
r3195 | def repository(ui, path='', create=False): | ||
Matt Mackall
|
r2774 | """return a repository object for the specified path""" | ||
Vadim Gelfer
|
r2847 | repo = _lookup(path).instance(ui, path, create) | ||
Alexis S. L. Carvalho
|
r4074 | ui = getattr(repo, "ui", ui) | ||
Alexis S. L. Carvalho
|
r5192 | for name, module in extensions.extensions(): | ||
hook = getattr(module, 'reposetup', None) | ||||
if hook: | ||||
hook(ui, repo) | ||||
Vadim Gelfer
|
r2847 | return repo | ||
Vadim Gelfer
|
r2597 | |||
Vadim Gelfer
|
r2719 | def defaultdest(source): | ||
'''return default destination of clone if none is given''' | ||||
return os.path.basename(os.path.normpath(source)) | ||||
Matt Mackall
|
r2774 | |||
Bryan O'Sullivan
|
r6524 | def localpath(path): | ||
if path.startswith('file://localhost/'): | ||||
return path[16:] | ||||
if path.startswith('file://'): | ||||
return path[7:] | ||||
if path.startswith('file:'): | ||||
return path[5:] | ||||
return path | ||||
Matt Mackall
|
r8807 | def share(ui, source, dest=None, update=True): | ||
Matt Mackall
|
r8800 | '''create a shared repository''' | ||
if not islocal(source): | ||||
raise util.Abort(_('can only share local repositories')) | ||||
Matt Mackall
|
r8807 | if not dest: | ||
Brendan Cully
|
r10099 | dest = defaultdest(source) | ||
Matt Mackall
|
r9344 | else: | ||
dest = ui.expandpath(dest) | ||||
Matt Mackall
|
r8807 | |||
Matt Mackall
|
r8800 | if isinstance(source, str): | ||
origsource = ui.expandpath(source) | ||||
Sune Foldager
|
r10365 | source, branches = parseurl(origsource) | ||
Matt Mackall
|
r8800 | srcrepo = repository(ui, source) | ||
Sune Foldager
|
r10365 | rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None) | ||
Matt Mackall
|
r8800 | else: | ||
srcrepo = source | ||||
origsource = source = srcrepo.url() | ||||
checkout = None | ||||
sharedpath = srcrepo.sharedpath # if our source is already sharing | ||||
root = os.path.realpath(dest) | ||||
roothg = os.path.join(root, '.hg') | ||||
if os.path.exists(roothg): | ||||
raise util.Abort(_('destination already exists')) | ||||
if not os.path.isdir(root): | ||||
os.mkdir(root) | ||||
os.mkdir(roothg) | ||||
requirements = '' | ||||
try: | ||||
requirements = srcrepo.opener('requires').read() | ||||
except IOError, inst: | ||||
if inst.errno != errno.ENOENT: | ||||
raise | ||||
requirements += 'shared\n' | ||||
file(os.path.join(roothg, 'requires'), 'w').write(requirements) | ||||
file(os.path.join(roothg, 'sharedpath'), 'w').write(sharedpath) | ||||
default = srcrepo.ui.config('paths', 'default') | ||||
if default: | ||||
f = file(os.path.join(roothg, 'hgrc'), 'w') | ||||
f.write('[paths]\ndefault = %s\n' % default) | ||||
f.close() | ||||
r = repository(ui, root) | ||||
if update: | ||||
r.ui.status(_("updating working directory\n")) | ||||
if update is not True: | ||||
checkout = update | ||||
for test in (checkout, 'default', 'tip'): | ||||
Matt Mackall
|
r9423 | if test is None: | ||
continue | ||||
Matt Mackall
|
r8800 | try: | ||
uprev = r.lookup(test) | ||||
break | ||||
Matt Mackall
|
r9423 | except error.RepoLookupError: | ||
Matt Mackall
|
r8800 | continue | ||
_update(r, uprev) | ||||
Vadim Gelfer
|
r2613 | def clone(ui, source, dest=None, pull=False, rev=None, update=True, | ||
Sune Foldager
|
r10379 | stream=False, branch=None): | ||
Vadim Gelfer
|
r2597 | """Make a copy of an existing repository. | ||
Create a copy of an existing repository in a new directory. The | ||||
source and destination are URLs, as passed to the repository | ||||
function. Returns a pair of repository objects, the source and | ||||
newly created destination. | ||||
The location of the source is added to the new repository's | ||||
.hg/hgrc file, as the default to be used for future pulls and | ||||
pushes. | ||||
If an exception is raised, the partly cloned/updated destination | ||||
repository will be deleted. | ||||
Vadim Gelfer
|
r2600 | |||
Vadim Gelfer
|
r2719 | Arguments: | ||
source: repository object or URL | ||||
Vadim Gelfer
|
r2597 | |||
dest: URL of destination repository to create (defaults to base | ||||
name of source repository) | ||||
pull: always pull from source repository, even in local case | ||||
Vadim Gelfer
|
r2621 | stream: stream raw data uncompressed from repository (fast over | ||
LAN, slow over WAN) | ||||
Vadim Gelfer
|
r2613 | |||
Vadim Gelfer
|
r2597 | rev: revision to clone up to (implies pull=True) | ||
update: update working directory after clone completes, if | ||||
Bryan O'Sullivan
|
r6526 | destination is local repository (True means update to default rev, | ||
anything else is treated as a revision) | ||||
Sune Foldager
|
r10379 | |||
branch: branches to clone | ||||
Vadim Gelfer
|
r2597 | """ | ||
Matt Mackall
|
r4478 | |||
Vadim Gelfer
|
r2719 | if isinstance(source, str): | ||
Alexis S. L. Carvalho
|
r6089 | origsource = ui.expandpath(source) | ||
Sune Foldager
|
r10379 | source, branch = parseurl(origsource, branch) | ||
Vadim Gelfer
|
r2719 | src_repo = repository(ui, source) | ||
else: | ||||
src_repo = source | ||||
Nicolas Dumazet
|
r11818 | branch = (None, branch or []) | ||
Alexis S. L. Carvalho
|
r6089 | origsource = source = src_repo.url() | ||
Sune Foldager
|
r10365 | rev, checkout = addbranchrevs(src_repo, src_repo, branch, rev) | ||
Vadim Gelfer
|
r2719 | |||
Vadim Gelfer
|
r2597 | if dest is None: | ||
Vadim Gelfer
|
r2719 | dest = defaultdest(source) | ||
Thomas Arendsen Hein
|
r3841 | ui.status(_("destination directory: %s\n") % dest) | ||
Matt Mackall
|
r9344 | else: | ||
dest = ui.expandpath(dest) | ||||
Vadim Gelfer
|
r2719 | |||
dest = localpath(dest) | ||||
source = localpath(source) | ||||
Vadim Gelfer
|
r2597 | |||
if os.path.exists(dest): | ||||
Steve Borho
|
r7927 | if not os.path.isdir(dest): | ||
raise util.Abort(_("destination '%s' already exists") % dest) | ||||
elif os.listdir(dest): | ||||
raise util.Abort(_("destination '%s' is not empty") % dest) | ||||
Vadim Gelfer
|
r2597 | |||
class DirCleanup(object): | ||||
def __init__(self, dir_): | ||||
self.rmtree = shutil.rmtree | ||||
self.dir_ = dir_ | ||||
def close(self): | ||||
self.dir_ = None | ||||
Ronny Pfannschmidt
|
r8110 | def cleanup(self): | ||
Vadim Gelfer
|
r2597 | if self.dir_: | ||
self.rmtree(self.dir_, True) | ||||
Matt Mackall
|
r4915 | src_lock = dest_lock = dir_cleanup = None | ||
try: | ||||
if islocal(dest): | ||||
dir_cleanup = DirCleanup(dest) | ||||
Vadim Gelfer
|
r2597 | |||
Matt Mackall
|
r4915 | abspath = origsource | ||
copy = False | ||||
Matt Mackall
|
r6315 | if src_repo.cancopy() and islocal(dest): | ||
Alexis S. L. Carvalho
|
r5248 | abspath = os.path.abspath(util.drop_scheme('file', origsource)) | ||
Matt Mackall
|
r4915 | copy = not pull and not rev | ||
Vadim Gelfer
|
r2597 | |||
Matt Mackall
|
r4915 | if copy: | ||
try: | ||||
# we use a lock here because if we race with commit, we | ||||
# can end up with extra data in the cloned revlogs that's | ||||
# not pointed to by changesets, thus causing verify to | ||||
# fail | ||||
Benoit Boissinot
|
r8649 | src_lock = src_repo.lock(wait=False) | ||
Matt Mackall
|
r7640 | except error.LockError: | ||
Matt Mackall
|
r4915 | copy = False | ||
Vadim Gelfer
|
r2597 | |||
Matt Mackall
|
r4915 | if copy: | ||
Fred Wulff
|
r8907 | src_repo.hook('preoutgoing', throw=True, source='clone') | ||
Steve Borho
|
r7935 | hgdir = os.path.realpath(os.path.join(dest, ".hg")) | ||
Matt Mackall
|
r4915 | if not os.path.exists(dest): | ||
os.mkdir(dest) | ||||
Steve Borho
|
r7935 | else: | ||
# only clean up directories we create ourselves | ||||
dir_cleanup.dir_ = hgdir | ||||
Matt Mackall
|
r5569 | try: | ||
Steve Borho
|
r7935 | dest_path = hgdir | ||
Matt Mackall
|
r5569 | os.mkdir(dest_path) | ||
except OSError, inst: | ||||
if inst.errno == errno.EEXIST: | ||||
dir_cleanup.close() | ||||
raise util.Abort(_("destination '%s' already exists") | ||||
% dest) | ||||
raise | ||||
Vadim Gelfer
|
r2597 | |||
Adrian Buehlmann
|
r11255 | hardlink = None | ||
Adrian Buehlmann
|
r11251 | num = 0 | ||
Matt Mackall
|
r6903 | for f in src_repo.store.copylist(): | ||
Matt Mackall
|
r9984 | src = os.path.join(src_repo.sharedpath, f) | ||
Benoit Boissinot
|
r6944 | dst = os.path.join(dest_path, f) | ||
dstbase = os.path.dirname(dst) | ||||
if dstbase and not os.path.exists(dstbase): | ||||
os.mkdir(dstbase) | ||||
Matt Mackall
|
r6903 | if os.path.exists(src): | ||
if dst.endswith('data'): | ||||
# lock to avoid premature writing to the target | ||||
dest_lock = lock.lock(os.path.join(dstbase, "lock")) | ||||
Adrian Buehlmann
|
r11251 | hardlink, n = util.copyfiles(src, dst, hardlink) | ||
num += n | ||||
if hardlink: | ||||
ui.debug("linked %d files\n" % num) | ||||
else: | ||||
ui.debug("copied %d files\n" % num) | ||||
Vadim Gelfer
|
r2597 | |||
Matt Mackall
|
r4915 | # we need to re-init the repo after manually copying the data | ||
# into it | ||||
dest_repo = repository(ui, dest) | ||||
Martin Geisler
|
r12144 | src_repo.hook('outgoing', source='clone', | ||
node=node.hex(node.nullid)) | ||||
Matt Mackall
|
r4915 | else: | ||
Matt Mackall
|
r5569 | try: | ||
dest_repo = repository(ui, dest, create=True) | ||||
except OSError, inst: | ||||
if inst.errno == errno.EEXIST: | ||||
dir_cleanup.close() | ||||
raise util.Abort(_("destination '%s' already exists") | ||||
% dest) | ||||
raise | ||||
Vadim Gelfer
|
r2597 | |||
Matt Mackall
|
r4915 | revs = None | ||
if rev: | ||||
if 'lookup' not in src_repo.capabilities: | ||||
Martin Geisler
|
r9171 | raise util.Abort(_("src repository does not support " | ||
"revision lookup and so doesn't " | ||||
"support clone by revision")) | ||||
Matt Mackall
|
r4915 | revs = [src_repo.lookup(r) for r in rev] | ||
Brett Carter
|
r8417 | checkout = revs[0] | ||
Matt Mackall
|
r4915 | if dest_repo.local(): | ||
dest_repo.clone(src_repo, heads=revs, stream=stream) | ||||
elif src_repo.local(): | ||||
src_repo.push(dest_repo, revs=revs) | ||||
else: | ||||
raise util.Abort(_("clone from remote to remote not supported")) | ||||
Vadim Gelfer
|
r2597 | |||
Benoit Boissinot
|
r5186 | if dir_cleanup: | ||
dir_cleanup.close() | ||||
Vadim Gelfer
|
r2597 | |||
if dest_repo.local(): | ||||
Matt Mackall
|
r4915 | fp = dest_repo.opener("hgrc", "w", text=True) | ||
fp.write("[paths]\n") | ||||
Matt Mackall
|
r8179 | fp.write("default = %s\n" % abspath) | ||
Matt Mackall
|
r4915 | fp.close() | ||
Benoit Boissinot
|
r5185 | |||
Matt Mackall
|
r8814 | dest_repo.ui.setconfig('paths', 'default', abspath) | ||
Matt Mackall
|
r4915 | if update: | ||
Bryan O'Sullivan
|
r6526 | if update is not True: | ||
checkout = update | ||||
Adrian Buehlmann
|
r9714 | if src_repo.local(): | ||
checkout = src_repo.lookup(update) | ||||
Dirkjan Ochtman
|
r7045 | for test in (checkout, 'default', 'tip'): | ||
Matt Mackall
|
r9423 | if test is None: | ||
continue | ||||
Alexis S. L. Carvalho
|
r5248 | try: | ||
Dirkjan Ochtman
|
r7045 | uprev = dest_repo.lookup(test) | ||
break | ||||
Matt Mackall
|
r9423 | except error.RepoLookupError: | ||
Dirkjan Ochtman
|
r7045 | continue | ||
Adrian Buehlmann
|
r9611 | bn = dest_repo[uprev].branch() | ||
Thomas Arendsen Hein
|
r9788 | dest_repo.ui.status(_("updating to branch %s\n") | ||
% encoding.tolocal(bn)) | ||||
Dirkjan Ochtman
|
r7045 | _update(dest_repo, uprev) | ||
Vadim Gelfer
|
r2597 | |||
Matt Mackall
|
r4915 | return src_repo, dest_repo | ||
finally: | ||||
Ronny Pfannschmidt
|
r8109 | release(src_lock, dest_lock) | ||
Ronny Pfannschmidt
|
r8110 | if dir_cleanup is not None: | ||
dir_cleanup.cleanup() | ||||
Matt Mackall
|
r2775 | |||
Matt Mackall
|
r3316 | def _showstats(repo, stats): | ||
Martin Geisler
|
r9454 | repo.ui.status(_("%d files updated, %d files merged, " | ||
"%d files removed, %d files unresolved\n") % stats) | ||||
Matt Mackall
|
r3316 | |||
Matt Mackall
|
r2808 | def update(repo, node): | ||
"""update the working directory to node, merging linear changes""" | ||||
Benoit Boissinot
|
r10651 | stats = mergemod.update(repo, node, False, False, None) | ||
Matt Mackall
|
r3316 | _showstats(repo, stats) | ||
if stats[3]: | ||||
Matt Mackall
|
r6518 | repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n")) | ||
Matt Mackall
|
r5635 | return stats[3] > 0 | ||
Matt Mackall
|
r2775 | |||
Benoit Boissinot
|
r7546 | # naming conflict in clone() | ||
_update = update | ||||
Matt Mackall
|
r4917 | def clean(repo, node, show_stats=True): | ||
Matt Mackall
|
r2808 | """forcibly switch the working directory to node, clobbering changes""" | ||
Benoit Boissinot
|
r10651 | stats = mergemod.update(repo, node, False, True, None) | ||
Matt Mackall
|
r10282 | if show_stats: | ||
_showstats(repo, stats) | ||||
Matt Mackall
|
r5635 | return stats[3] > 0 | ||
Matt Mackall
|
r2775 | |||
Matt Mackall
|
r4917 | def merge(repo, node, force=None, remind=True): | ||
Matt Mackall
|
r2808 | """branch merge with node, resolving changes""" | ||
Benoit Boissinot
|
r10651 | stats = mergemod.update(repo, node, True, force, False) | ||
Matt Mackall
|
r3316 | _showstats(repo, stats) | ||
if stats[3]: | ||||
Augie Fackler
|
r7821 | repo.ui.status(_("use 'hg resolve' to retry unresolved file merges " | ||
Brodie Rao
|
r12314 | "or 'hg update -C .' to abandon\n")) | ||
Matt Mackall
|
r3316 | elif remind: | ||
repo.ui.status(_("(branch merge, don't forget to commit)\n")) | ||||
Matt Mackall
|
r5635 | return stats[3] > 0 | ||
Matt Mackall
|
r2808 | |||
Martin Geisler
|
r12273 | def incoming(ui, repo, source, opts): | ||
Erik Zielke
|
r12400 | def recurse(): | ||
ret = 1 | ||||
if opts.get('subrepos'): | ||||
ctx = repo[None] | ||||
for subpath in sorted(ctx.substate): | ||||
sub = ctx.sub(subpath) | ||||
ret = min(ret, sub.incoming(ui, source, opts)) | ||||
return ret | ||||
Martin Geisler
|
r12273 | limit = cmdutil.loglimit(opts) | ||
source, branches = parseurl(ui.expandpath(source), opts.get('branch')) | ||||
other = repository(remoteui(repo, opts), source) | ||||
ui.status(_('comparing with %s\n') % url.hidepassword(source)) | ||||
revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev')) | ||||
if revs: | ||||
revs = [other.lookup(rev) for rev in revs] | ||||
tmp = discovery.findcommonincoming(repo, other, heads=revs, | ||||
force=opts.get('force')) | ||||
common, incoming, rheads = tmp | ||||
if not incoming: | ||||
try: | ||||
os.unlink(opts["bundle"]) | ||||
except: | ||||
pass | ||||
ui.status(_("no changes found\n")) | ||||
Erik Zielke
|
r12400 | return recurse() | ||
Martin Geisler
|
r12273 | |||
cleanup = None | ||||
try: | ||||
fname = opts["bundle"] | ||||
if fname or not other.local(): | ||||
# create a bundle (uncompressed if other repo is not local) | ||||
if revs is None and other.capable('changegroupsubset'): | ||||
revs = rheads | ||||
if revs is None: | ||||
cg = other.changegroup(incoming, "incoming") | ||||
else: | ||||
cg = other.changegroupsubset(incoming, revs, 'incoming') | ||||
bundletype = other.local() and "HG10BZ" or "HG10UN" | ||||
fname = cleanup = changegroup.writebundle(cg, fname, bundletype) | ||||
# keep written bundle? | ||||
if opts["bundle"]: | ||||
cleanup = None | ||||
if not other.local(): | ||||
# use the created uncompressed bundlerepo | ||||
other = bundlerepo.bundlerepository(ui, repo.root, fname) | ||||
o = other.changelog.nodesbetween(incoming, revs)[0] | ||||
if opts.get('newest_first'): | ||||
o.reverse() | ||||
displayer = cmdutil.show_changeset(ui, other, opts) | ||||
count = 0 | ||||
for n in o: | ||||
if limit is not None and count >= limit: | ||||
break | ||||
parents = [p for p in other.changelog.parents(n) if p != nullid] | ||||
if opts.get('no_merges') and len(parents) == 2: | ||||
continue | ||||
count += 1 | ||||
displayer.show(other[n]) | ||||
displayer.close() | ||||
finally: | ||||
if hasattr(other, 'close'): | ||||
other.close() | ||||
if cleanup: | ||||
os.unlink(cleanup) | ||||
Erik Zielke
|
r12400 | recurse() | ||
return 0 # exit code is zero since we found incoming changes | ||||
Martin Geisler
|
r12273 | |||
Martin Geisler
|
r12271 | def outgoing(ui, repo, dest, opts): | ||
Erik Zielke
|
r12400 | def recurse(): | ||
ret = 1 | ||||
if opts.get('subrepos'): | ||||
ctx = repo[None] | ||||
for subpath in sorted(ctx.substate): | ||||
sub = ctx.sub(subpath) | ||||
ret = min(ret, sub.outgoing(ui, dest, opts)) | ||||
return ret | ||||
Martin Geisler
|
r12271 | limit = cmdutil.loglimit(opts) | ||
dest = ui.expandpath(dest or 'default-push', dest or 'default') | ||||
dest, branches = parseurl(dest, opts.get('branch')) | ||||
revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev')) | ||||
if revs: | ||||
revs = [repo.lookup(rev) for rev in revs] | ||||
other = repository(remoteui(repo, opts), dest) | ||||
ui.status(_('comparing with %s\n') % url.hidepassword(dest)) | ||||
o = discovery.findoutgoing(repo, other, force=opts.get('force')) | ||||
if not o: | ||||
ui.status(_("no changes found\n")) | ||||
Erik Zielke
|
r12400 | return recurse() | ||
Martin Geisler
|
r12271 | o = repo.changelog.nodesbetween(o, revs)[0] | ||
if opts.get('newest_first'): | ||||
o.reverse() | ||||
displayer = cmdutil.show_changeset(ui, repo, opts) | ||||
count = 0 | ||||
for n in o: | ||||
if limit is not None and count >= limit: | ||||
break | ||||
parents = [p for p in repo.changelog.parents(n) if p != nullid] | ||||
if opts.get('no_merges') and len(parents) == 2: | ||||
continue | ||||
count += 1 | ||||
displayer.show(repo[n]) | ||||
displayer.close() | ||||
Erik Zielke
|
r12400 | recurse() | ||
return 0 # exit code is zero since we found outgoing changes | ||||
Martin Geisler
|
r12271 | |||
Matt Mackall
|
r4917 | def revert(repo, node, choose): | ||
Matt Mackall
|
r2808 | """revert changes to revision in node without updating dirstate""" | ||
Benoit Boissinot
|
r10651 | return mergemod.update(repo, node, False, True, choose)[3] > 0 | ||
Matt Mackall
|
r2778 | |||
def verify(repo): | ||||
"""verify the consistency of a repository""" | ||||
Benoit Boissinot
|
r10651 | return verifymod.verify(repo) | ||
Matt Mackall
|
r11273 | |||
def remoteui(src, opts): | ||||
'build a remote ui from ui or repo and opts' | ||||
if hasattr(src, 'baseui'): # looks like a repository | ||||
dst = src.baseui.copy() # drop repo-specific config | ||||
src = src.ui # copy target options from repo | ||||
else: # assume it's a global ui object | ||||
dst = src.copy() # keep all global options | ||||
# copy ssh-specific options | ||||
for o in 'ssh', 'remotecmd': | ||||
v = opts.get(o) or src.config('ui', o) | ||||
if v: | ||||
dst.setconfig("ui", o, v) | ||||
# copy bundle-specific options | ||||
r = src.config('bundle', 'mainreporoot') | ||||
if r: | ||||
dst.setconfig('bundle', 'mainreporoot', r) | ||||
# copy auth and http_proxy section settings | ||||
for sect in ('auth', 'http_proxy'): | ||||
for key, val in src.configitems(sect): | ||||
dst.setconfig(sect, key, val) | ||||
return dst | ||||