##// END OF EJS Templates
uncommit: move _movedirstate() to scmutil for reuse...
Martin von Zweigbergk -
r42103:232d4b9d default
parent child Browse files
Show More
@@ -1,261 +1,223
1 # uncommit - undo the actions of a commit
1 # uncommit - undo the actions of a commit
2 #
2 #
3 # Copyright 2011 Peter Arrenbrecht <peter.arrenbrecht@gmail.com>
3 # Copyright 2011 Peter Arrenbrecht <peter.arrenbrecht@gmail.com>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 # Pierre-Yves David <pierre-yves.david@ens-lyon.org>
5 # Pierre-Yves David <pierre-yves.david@ens-lyon.org>
6 # Patrick Mezard <patrick@mezard.eu>
6 # Patrick Mezard <patrick@mezard.eu>
7 # Copyright 2016 Facebook, Inc.
7 # Copyright 2016 Facebook, Inc.
8 #
8 #
9 # This software may be used and distributed according to the terms of the
9 # This software may be used and distributed according to the terms of the
10 # GNU General Public License version 2 or any later version.
10 # GNU General Public License version 2 or any later version.
11
11
12 """uncommit part or all of a local changeset (EXPERIMENTAL)
12 """uncommit part or all of a local changeset (EXPERIMENTAL)
13
13
14 This command undoes the effect of a local commit, returning the affected
14 This command undoes the effect of a local commit, returning the affected
15 files to their uncommitted state. This means that files modified, added or
15 files to their uncommitted state. This means that files modified, added or
16 removed in the changeset will be left unchanged, and so will remain modified,
16 removed in the changeset will be left unchanged, and so will remain modified,
17 added and removed in the working directory.
17 added and removed in the working directory.
18 """
18 """
19
19
20 from __future__ import absolute_import
20 from __future__ import absolute_import
21
21
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23
23
24 from mercurial import (
24 from mercurial import (
25 cmdutil,
25 cmdutil,
26 commands,
26 commands,
27 context,
27 context,
28 copies as copiesmod,
28 copies as copiesmod,
29 error,
29 error,
30 node,
30 node,
31 obsutil,
31 obsutil,
32 pycompat,
32 pycompat,
33 registrar,
33 registrar,
34 rewriteutil,
34 rewriteutil,
35 scmutil,
35 scmutil,
36 )
36 )
37
37
38 cmdtable = {}
38 cmdtable = {}
39 command = registrar.command(cmdtable)
39 command = registrar.command(cmdtable)
40
40
41 configtable = {}
41 configtable = {}
42 configitem = registrar.configitem(configtable)
42 configitem = registrar.configitem(configtable)
43
43
44 configitem('experimental', 'uncommitondirtywdir',
44 configitem('experimental', 'uncommitondirtywdir',
45 default=False,
45 default=False,
46 )
46 )
47 configitem('experimental', 'uncommit.keep',
47 configitem('experimental', 'uncommit.keep',
48 default=False,
48 default=False,
49 )
49 )
50
50
51 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
51 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
52 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
52 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
53 # be specifying the version(s) of Mercurial they are tested with, or
53 # be specifying the version(s) of Mercurial they are tested with, or
54 # leave the attribute unspecified.
54 # leave the attribute unspecified.
55 testedwith = 'ships-with-hg-core'
55 testedwith = 'ships-with-hg-core'
56
56
57 def _commitfiltered(repo, ctx, match, keepcommit):
57 def _commitfiltered(repo, ctx, match, keepcommit):
58 """Recommit ctx with changed files not in match. Return the new
58 """Recommit ctx with changed files not in match. Return the new
59 node identifier, or None if nothing changed.
59 node identifier, or None if nothing changed.
60 """
60 """
61 base = ctx.p1()
61 base = ctx.p1()
62 # ctx
62 # ctx
63 initialfiles = set(ctx.files())
63 initialfiles = set(ctx.files())
64 exclude = set(f for f in initialfiles if match(f))
64 exclude = set(f for f in initialfiles if match(f))
65
65
66 # No files matched commit, so nothing excluded
66 # No files matched commit, so nothing excluded
67 if not exclude:
67 if not exclude:
68 return None
68 return None
69
69
70 # return the p1 so that we don't create an obsmarker later
70 # return the p1 so that we don't create an obsmarker later
71 if not keepcommit:
71 if not keepcommit:
72 return ctx.p1().node()
72 return ctx.p1().node()
73
73
74 files = (initialfiles - exclude)
74 files = (initialfiles - exclude)
75 # Filter copies
75 # Filter copies
76 copied = copiesmod.pathcopies(base, ctx)
76 copied = copiesmod.pathcopies(base, ctx)
77 copied = dict((dst, src) for dst, src in copied.iteritems()
77 copied = dict((dst, src) for dst, src in copied.iteritems()
78 if dst in files)
78 if dst in files)
79 def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()):
79 def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()):
80 if path not in contentctx:
80 if path not in contentctx:
81 return None
81 return None
82 fctx = contentctx[path]
82 fctx = contentctx[path]
83 mctx = context.memfilectx(repo, memctx, fctx.path(), fctx.data(),
83 mctx = context.memfilectx(repo, memctx, fctx.path(), fctx.data(),
84 fctx.islink(),
84 fctx.islink(),
85 fctx.isexec(),
85 fctx.isexec(),
86 copied=copied.get(path))
86 copied=copied.get(path))
87 return mctx
87 return mctx
88
88
89 if not files:
89 if not files:
90 repo.ui.status(_("note: keeping empty commit\n"))
90 repo.ui.status(_("note: keeping empty commit\n"))
91
91
92 new = context.memctx(repo,
92 new = context.memctx(repo,
93 parents=[base.node(), node.nullid],
93 parents=[base.node(), node.nullid],
94 text=ctx.description(),
94 text=ctx.description(),
95 files=files,
95 files=files,
96 filectxfn=filectxfn,
96 filectxfn=filectxfn,
97 user=ctx.user(),
97 user=ctx.user(),
98 date=ctx.date(),
98 date=ctx.date(),
99 extra=ctx.extra())
99 extra=ctx.extra())
100 return repo.commitctx(new)
100 return repo.commitctx(new)
101
101
102 def _movedirstate(repo, newctx, match=None):
103 """Move the dirstate to newctx and adjust it as necessary."""
104 oldctx = repo['.']
105 ds = repo.dirstate
106 ds.setparents(newctx.node(), node.nullid)
107 copies = dict(ds.copies())
108 s = newctx.status(oldctx, match=match)
109 for f in s.modified:
110 if ds[f] == 'r':
111 # modified + removed -> removed
112 continue
113 ds.normallookup(f)
114
115 for f in s.added:
116 if ds[f] == 'r':
117 # added + removed -> unknown
118 ds.drop(f)
119 elif ds[f] != 'a':
120 ds.add(f)
121
122 for f in s.removed:
123 if ds[f] == 'a':
124 # removed + added -> normal
125 ds.normallookup(f)
126 elif ds[f] != 'r':
127 ds.remove(f)
128
129 # Merge old parent and old working dir copies
130 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
131 oldcopies.update(copies)
132 copies = dict((dst, oldcopies.get(src, src))
133 for dst, src in oldcopies.iteritems())
134 # Adjust the dirstate copies
135 for dst, src in copies.iteritems():
136 if (src not in newctx or dst in newctx or ds[dst] != 'a'):
137 src = None
138 ds.copy(src, dst)
139
140 @command('uncommit',
102 @command('uncommit',
141 [('', 'keep', None, _('allow an empty commit after uncommiting')),
103 [('', 'keep', None, _('allow an empty commit after uncommiting')),
142 ('', 'allow-dirty-working-copy', False,
104 ('', 'allow-dirty-working-copy', False,
143 _('allow uncommit with outstanding changes'))
105 _('allow uncommit with outstanding changes'))
144 ] + commands.walkopts,
106 ] + commands.walkopts,
145 _('[OPTION]... [FILE]...'),
107 _('[OPTION]... [FILE]...'),
146 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT)
108 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT)
147 def uncommit(ui, repo, *pats, **opts):
109 def uncommit(ui, repo, *pats, **opts):
148 """uncommit part or all of a local changeset
110 """uncommit part or all of a local changeset
149
111
150 This command undoes the effect of a local commit, returning the affected
112 This command undoes the effect of a local commit, returning the affected
151 files to their uncommitted state. This means that files modified or
113 files to their uncommitted state. This means that files modified or
152 deleted in the changeset will be left unchanged, and so will remain
114 deleted in the changeset will be left unchanged, and so will remain
153 modified in the working directory.
115 modified in the working directory.
154
116
155 If no files are specified, the commit will be pruned, unless --keep is
117 If no files are specified, the commit will be pruned, unless --keep is
156 given.
118 given.
157 """
119 """
158 opts = pycompat.byteskwargs(opts)
120 opts = pycompat.byteskwargs(opts)
159
121
160 with repo.wlock(), repo.lock():
122 with repo.wlock(), repo.lock():
161
123
162 m, a, r, d = repo.status()[:4]
124 m, a, r, d = repo.status()[:4]
163 isdirtypath = any(set(m + a + r + d) & set(pats))
125 isdirtypath = any(set(m + a + r + d) & set(pats))
164 allowdirtywcopy = (opts['allow_dirty_working_copy'] or
126 allowdirtywcopy = (opts['allow_dirty_working_copy'] or
165 repo.ui.configbool('experimental', 'uncommitondirtywdir'))
127 repo.ui.configbool('experimental', 'uncommitondirtywdir'))
166 if not allowdirtywcopy and (not pats or isdirtypath):
128 if not allowdirtywcopy and (not pats or isdirtypath):
167 cmdutil.bailifchanged(repo, hint=_('requires '
129 cmdutil.bailifchanged(repo, hint=_('requires '
168 '--allow-dirty-working-copy to uncommit'))
130 '--allow-dirty-working-copy to uncommit'))
169 old = repo['.']
131 old = repo['.']
170 rewriteutil.precheck(repo, [old.rev()], 'uncommit')
132 rewriteutil.precheck(repo, [old.rev()], 'uncommit')
171 if len(old.parents()) > 1:
133 if len(old.parents()) > 1:
172 raise error.Abort(_("cannot uncommit merge changeset"))
134 raise error.Abort(_("cannot uncommit merge changeset"))
173
135
174 with repo.transaction('uncommit'):
136 with repo.transaction('uncommit'):
175 match = scmutil.match(old, pats, opts)
137 match = scmutil.match(old, pats, opts)
176 keepcommit = pats
138 keepcommit = pats
177 if not keepcommit:
139 if not keepcommit:
178 if opts.get('keep') is not None:
140 if opts.get('keep') is not None:
179 keepcommit = opts.get('keep')
141 keepcommit = opts.get('keep')
180 else:
142 else:
181 keepcommit = ui.configbool('experimental', 'uncommit.keep')
143 keepcommit = ui.configbool('experimental', 'uncommit.keep')
182 newid = _commitfiltered(repo, old, match, keepcommit)
144 newid = _commitfiltered(repo, old, match, keepcommit)
183 if newid is None:
145 if newid is None:
184 ui.status(_("nothing to uncommit\n"))
146 ui.status(_("nothing to uncommit\n"))
185 return 1
147 return 1
186
148
187 mapping = {}
149 mapping = {}
188 if newid != old.p1().node():
150 if newid != old.p1().node():
189 # Move local changes on filtered changeset
151 # Move local changes on filtered changeset
190 mapping[old.node()] = (newid,)
152 mapping[old.node()] = (newid,)
191 else:
153 else:
192 # Fully removed the old commit
154 # Fully removed the old commit
193 mapping[old.node()] = ()
155 mapping[old.node()] = ()
194
156
195 with repo.dirstate.parentchange():
157 with repo.dirstate.parentchange():
196 _movedirstate(repo, repo[newid], match)
158 scmutil.movedirstate(repo, repo[newid], match)
197
159
198 scmutil.cleanupnodes(repo, mapping, 'uncommit', fixphase=True)
160 scmutil.cleanupnodes(repo, mapping, 'uncommit', fixphase=True)
199
161
200 def predecessormarkers(ctx):
162 def predecessormarkers(ctx):
201 """yields the obsolete markers marking the given changeset as a successor"""
163 """yields the obsolete markers marking the given changeset as a successor"""
202 for data in ctx.repo().obsstore.predecessors.get(ctx.node(), ()):
164 for data in ctx.repo().obsstore.predecessors.get(ctx.node(), ()):
203 yield obsutil.marker(ctx.repo(), data)
165 yield obsutil.marker(ctx.repo(), data)
204
166
205 @command('unamend', [], helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
167 @command('unamend', [], helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
206 helpbasic=True)
168 helpbasic=True)
207 def unamend(ui, repo, **opts):
169 def unamend(ui, repo, **opts):
208 """undo the most recent amend operation on a current changeset
170 """undo the most recent amend operation on a current changeset
209
171
210 This command will roll back to the previous version of a changeset,
172 This command will roll back to the previous version of a changeset,
211 leaving working directory in state in which it was before running
173 leaving working directory in state in which it was before running
212 `hg amend` (e.g. files modified as part of an amend will be
174 `hg amend` (e.g. files modified as part of an amend will be
213 marked as modified `hg status`)
175 marked as modified `hg status`)
214 """
176 """
215
177
216 unfi = repo.unfiltered()
178 unfi = repo.unfiltered()
217 with repo.wlock(), repo.lock(), repo.transaction('unamend'):
179 with repo.wlock(), repo.lock(), repo.transaction('unamend'):
218
180
219 # identify the commit from which to unamend
181 # identify the commit from which to unamend
220 curctx = repo['.']
182 curctx = repo['.']
221
183
222 rewriteutil.precheck(repo, [curctx.rev()], 'unamend')
184 rewriteutil.precheck(repo, [curctx.rev()], 'unamend')
223
185
224 # identify the commit to which to unamend
186 # identify the commit to which to unamend
225 markers = list(predecessormarkers(curctx))
187 markers = list(predecessormarkers(curctx))
226 if len(markers) != 1:
188 if len(markers) != 1:
227 e = _("changeset must have one predecessor, found %i predecessors")
189 e = _("changeset must have one predecessor, found %i predecessors")
228 raise error.Abort(e % len(markers))
190 raise error.Abort(e % len(markers))
229
191
230 prednode = markers[0].prednode()
192 prednode = markers[0].prednode()
231 predctx = unfi[prednode]
193 predctx = unfi[prednode]
232
194
233 # add an extra so that we get a new hash
195 # add an extra so that we get a new hash
234 # note: allowing unamend to undo an unamend is an intentional feature
196 # note: allowing unamend to undo an unamend is an intentional feature
235 extras = predctx.extra()
197 extras = predctx.extra()
236 extras['unamend_source'] = curctx.hex()
198 extras['unamend_source'] = curctx.hex()
237
199
238 def filectxfn(repo, ctx_, path):
200 def filectxfn(repo, ctx_, path):
239 try:
201 try:
240 return predctx.filectx(path)
202 return predctx.filectx(path)
241 except KeyError:
203 except KeyError:
242 return None
204 return None
243
205
244 # Make a new commit same as predctx
206 # Make a new commit same as predctx
245 newctx = context.memctx(repo,
207 newctx = context.memctx(repo,
246 parents=(predctx.p1(), predctx.p2()),
208 parents=(predctx.p1(), predctx.p2()),
247 text=predctx.description(),
209 text=predctx.description(),
248 files=predctx.files(),
210 files=predctx.files(),
249 filectxfn=filectxfn,
211 filectxfn=filectxfn,
250 user=predctx.user(),
212 user=predctx.user(),
251 date=predctx.date(),
213 date=predctx.date(),
252 extra=extras)
214 extra=extras)
253 newprednode = repo.commitctx(newctx)
215 newprednode = repo.commitctx(newctx)
254 newpredctx = repo[newprednode]
216 newpredctx = repo[newprednode]
255 dirstate = repo.dirstate
217 dirstate = repo.dirstate
256
218
257 with dirstate.parentchange():
219 with dirstate.parentchange():
258 _movedirstate(repo, newpredctx)
220 scmutil.movedirstate(repo, newpredctx)
259
221
260 mapping = {curctx.node(): (newprednode,)}
222 mapping = {curctx.node(): (newprednode,)}
261 scmutil.cleanupnodes(repo, mapping, 'unamend', fixphase=True)
223 scmutil.cleanupnodes(repo, mapping, 'unamend', fixphase=True)
@@ -1,1903 +1,1942
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import posixpath
14 import posixpath
15 import re
15 import re
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 nullrev,
24 nullrev,
25 short,
25 short,
26 wdirid,
26 wdirid,
27 wdirrev,
27 wdirrev,
28 )
28 )
29
29
30 from . import (
30 from . import (
31 copies as copiesmod,
31 encoding,
32 encoding,
32 error,
33 error,
33 match as matchmod,
34 match as matchmod,
34 obsolete,
35 obsolete,
35 obsutil,
36 obsutil,
36 pathutil,
37 pathutil,
37 phases,
38 phases,
38 policy,
39 policy,
39 pycompat,
40 pycompat,
40 revsetlang,
41 revsetlang,
41 similar,
42 similar,
42 smartset,
43 smartset,
43 url,
44 url,
44 util,
45 util,
45 vfs,
46 vfs,
46 )
47 )
47
48
48 from .utils import (
49 from .utils import (
49 procutil,
50 procutil,
50 stringutil,
51 stringutil,
51 )
52 )
52
53
53 if pycompat.iswindows:
54 if pycompat.iswindows:
54 from . import scmwindows as scmplatform
55 from . import scmwindows as scmplatform
55 else:
56 else:
56 from . import scmposix as scmplatform
57 from . import scmposix as scmplatform
57
58
58 parsers = policy.importmod(r'parsers')
59 parsers = policy.importmod(r'parsers')
59
60
60 termsize = scmplatform.termsize
61 termsize = scmplatform.termsize
61
62
62 class status(tuple):
63 class status(tuple):
63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
64 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
64 and 'ignored' properties are only relevant to the working copy.
65 and 'ignored' properties are only relevant to the working copy.
65 '''
66 '''
66
67
67 __slots__ = ()
68 __slots__ = ()
68
69
69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
70 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
70 clean):
71 clean):
71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
72 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
72 ignored, clean))
73 ignored, clean))
73
74
74 @property
75 @property
75 def modified(self):
76 def modified(self):
76 '''files that have been modified'''
77 '''files that have been modified'''
77 return self[0]
78 return self[0]
78
79
79 @property
80 @property
80 def added(self):
81 def added(self):
81 '''files that have been added'''
82 '''files that have been added'''
82 return self[1]
83 return self[1]
83
84
84 @property
85 @property
85 def removed(self):
86 def removed(self):
86 '''files that have been removed'''
87 '''files that have been removed'''
87 return self[2]
88 return self[2]
88
89
89 @property
90 @property
90 def deleted(self):
91 def deleted(self):
91 '''files that are in the dirstate, but have been deleted from the
92 '''files that are in the dirstate, but have been deleted from the
92 working copy (aka "missing")
93 working copy (aka "missing")
93 '''
94 '''
94 return self[3]
95 return self[3]
95
96
96 @property
97 @property
97 def unknown(self):
98 def unknown(self):
98 '''files not in the dirstate that are not ignored'''
99 '''files not in the dirstate that are not ignored'''
99 return self[4]
100 return self[4]
100
101
101 @property
102 @property
102 def ignored(self):
103 def ignored(self):
103 '''files not in the dirstate that are ignored (by _dirignore())'''
104 '''files not in the dirstate that are ignored (by _dirignore())'''
104 return self[5]
105 return self[5]
105
106
106 @property
107 @property
107 def clean(self):
108 def clean(self):
108 '''files that have not been modified'''
109 '''files that have not been modified'''
109 return self[6]
110 return self[6]
110
111
111 def __repr__(self, *args, **kwargs):
112 def __repr__(self, *args, **kwargs):
112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
113 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
113 r'unknown=%s, ignored=%s, clean=%s>') %
114 r'unknown=%s, ignored=%s, clean=%s>') %
114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
115 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
115
116
116 def itersubrepos(ctx1, ctx2):
117 def itersubrepos(ctx1, ctx2):
117 """find subrepos in ctx1 or ctx2"""
118 """find subrepos in ctx1 or ctx2"""
118 # Create a (subpath, ctx) mapping where we prefer subpaths from
119 # Create a (subpath, ctx) mapping where we prefer subpaths from
119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
120 # ctx1. The subpaths from ctx2 are important when the .hgsub file
120 # has been modified (in ctx2) but not yet committed (in ctx1).
121 # has been modified (in ctx2) but not yet committed (in ctx1).
121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
122 subpaths = dict.fromkeys(ctx2.substate, ctx2)
122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
123 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
123
124
124 missing = set()
125 missing = set()
125
126
126 for subpath in ctx2.substate:
127 for subpath in ctx2.substate:
127 if subpath not in ctx1.substate:
128 if subpath not in ctx1.substate:
128 del subpaths[subpath]
129 del subpaths[subpath]
129 missing.add(subpath)
130 missing.add(subpath)
130
131
131 for subpath, ctx in sorted(subpaths.iteritems()):
132 for subpath, ctx in sorted(subpaths.iteritems()):
132 yield subpath, ctx.sub(subpath)
133 yield subpath, ctx.sub(subpath)
133
134
134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
135 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
135 # status and diff will have an accurate result when it does
136 # status and diff will have an accurate result when it does
136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
137 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
137 # against itself.
138 # against itself.
138 for subpath in missing:
139 for subpath in missing:
139 yield subpath, ctx2.nullsub(subpath, ctx1)
140 yield subpath, ctx2.nullsub(subpath, ctx1)
140
141
141 def nochangesfound(ui, repo, excluded=None):
142 def nochangesfound(ui, repo, excluded=None):
142 '''Report no changes for push/pull, excluded is None or a list of
143 '''Report no changes for push/pull, excluded is None or a list of
143 nodes excluded from the push/pull.
144 nodes excluded from the push/pull.
144 '''
145 '''
145 secretlist = []
146 secretlist = []
146 if excluded:
147 if excluded:
147 for n in excluded:
148 for n in excluded:
148 ctx = repo[n]
149 ctx = repo[n]
149 if ctx.phase() >= phases.secret and not ctx.extinct():
150 if ctx.phase() >= phases.secret and not ctx.extinct():
150 secretlist.append(n)
151 secretlist.append(n)
151
152
152 if secretlist:
153 if secretlist:
153 ui.status(_("no changes found (ignored %d secret changesets)\n")
154 ui.status(_("no changes found (ignored %d secret changesets)\n")
154 % len(secretlist))
155 % len(secretlist))
155 else:
156 else:
156 ui.status(_("no changes found\n"))
157 ui.status(_("no changes found\n"))
157
158
158 def callcatch(ui, func):
159 def callcatch(ui, func):
159 """call func() with global exception handling
160 """call func() with global exception handling
160
161
161 return func() if no exception happens. otherwise do some error handling
162 return func() if no exception happens. otherwise do some error handling
162 and return an exit code accordingly. does not handle all exceptions.
163 and return an exit code accordingly. does not handle all exceptions.
163 """
164 """
164 try:
165 try:
165 try:
166 try:
166 return func()
167 return func()
167 except: # re-raises
168 except: # re-raises
168 ui.traceback()
169 ui.traceback()
169 raise
170 raise
170 # Global exception handling, alphabetically
171 # Global exception handling, alphabetically
171 # Mercurial-specific first, followed by built-in and library exceptions
172 # Mercurial-specific first, followed by built-in and library exceptions
172 except error.LockHeld as inst:
173 except error.LockHeld as inst:
173 if inst.errno == errno.ETIMEDOUT:
174 if inst.errno == errno.ETIMEDOUT:
174 reason = _('timed out waiting for lock held by %r') % (
175 reason = _('timed out waiting for lock held by %r') % (
175 pycompat.bytestr(inst.locker))
176 pycompat.bytestr(inst.locker))
176 else:
177 else:
177 reason = _('lock held by %r') % inst.locker
178 reason = _('lock held by %r') % inst.locker
178 ui.error(_("abort: %s: %s\n") % (
179 ui.error(_("abort: %s: %s\n") % (
179 inst.desc or stringutil.forcebytestr(inst.filename), reason))
180 inst.desc or stringutil.forcebytestr(inst.filename), reason))
180 if not inst.locker:
181 if not inst.locker:
181 ui.error(_("(lock might be very busy)\n"))
182 ui.error(_("(lock might be very busy)\n"))
182 except error.LockUnavailable as inst:
183 except error.LockUnavailable as inst:
183 ui.error(_("abort: could not lock %s: %s\n") %
184 ui.error(_("abort: could not lock %s: %s\n") %
184 (inst.desc or stringutil.forcebytestr(inst.filename),
185 (inst.desc or stringutil.forcebytestr(inst.filename),
185 encoding.strtolocal(inst.strerror)))
186 encoding.strtolocal(inst.strerror)))
186 except error.OutOfBandError as inst:
187 except error.OutOfBandError as inst:
187 if inst.args:
188 if inst.args:
188 msg = _("abort: remote error:\n")
189 msg = _("abort: remote error:\n")
189 else:
190 else:
190 msg = _("abort: remote error\n")
191 msg = _("abort: remote error\n")
191 ui.error(msg)
192 ui.error(msg)
192 if inst.args:
193 if inst.args:
193 ui.error(''.join(inst.args))
194 ui.error(''.join(inst.args))
194 if inst.hint:
195 if inst.hint:
195 ui.error('(%s)\n' % inst.hint)
196 ui.error('(%s)\n' % inst.hint)
196 except error.RepoError as inst:
197 except error.RepoError as inst:
197 ui.error(_("abort: %s!\n") % inst)
198 ui.error(_("abort: %s!\n") % inst)
198 if inst.hint:
199 if inst.hint:
199 ui.error(_("(%s)\n") % inst.hint)
200 ui.error(_("(%s)\n") % inst.hint)
200 except error.ResponseError as inst:
201 except error.ResponseError as inst:
201 ui.error(_("abort: %s") % inst.args[0])
202 ui.error(_("abort: %s") % inst.args[0])
202 msg = inst.args[1]
203 msg = inst.args[1]
203 if isinstance(msg, type(u'')):
204 if isinstance(msg, type(u'')):
204 msg = pycompat.sysbytes(msg)
205 msg = pycompat.sysbytes(msg)
205 if not isinstance(msg, bytes):
206 if not isinstance(msg, bytes):
206 ui.error(" %r\n" % (msg,))
207 ui.error(" %r\n" % (msg,))
207 elif not msg:
208 elif not msg:
208 ui.error(_(" empty string\n"))
209 ui.error(_(" empty string\n"))
209 else:
210 else:
210 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
211 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
211 except error.CensoredNodeError as inst:
212 except error.CensoredNodeError as inst:
212 ui.error(_("abort: file censored %s!\n") % inst)
213 ui.error(_("abort: file censored %s!\n") % inst)
213 except error.StorageError as inst:
214 except error.StorageError as inst:
214 ui.error(_("abort: %s!\n") % inst)
215 ui.error(_("abort: %s!\n") % inst)
215 if inst.hint:
216 if inst.hint:
216 ui.error(_("(%s)\n") % inst.hint)
217 ui.error(_("(%s)\n") % inst.hint)
217 except error.InterventionRequired as inst:
218 except error.InterventionRequired as inst:
218 ui.error("%s\n" % inst)
219 ui.error("%s\n" % inst)
219 if inst.hint:
220 if inst.hint:
220 ui.error(_("(%s)\n") % inst.hint)
221 ui.error(_("(%s)\n") % inst.hint)
221 return 1
222 return 1
222 except error.WdirUnsupported:
223 except error.WdirUnsupported:
223 ui.error(_("abort: working directory revision cannot be specified\n"))
224 ui.error(_("abort: working directory revision cannot be specified\n"))
224 except error.Abort as inst:
225 except error.Abort as inst:
225 ui.error(_("abort: %s\n") % inst)
226 ui.error(_("abort: %s\n") % inst)
226 if inst.hint:
227 if inst.hint:
227 ui.error(_("(%s)\n") % inst.hint)
228 ui.error(_("(%s)\n") % inst.hint)
228 except ImportError as inst:
229 except ImportError as inst:
229 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
230 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
230 m = stringutil.forcebytestr(inst).split()[-1]
231 m = stringutil.forcebytestr(inst).split()[-1]
231 if m in "mpatch bdiff".split():
232 if m in "mpatch bdiff".split():
232 ui.error(_("(did you forget to compile extensions?)\n"))
233 ui.error(_("(did you forget to compile extensions?)\n"))
233 elif m in "zlib".split():
234 elif m in "zlib".split():
234 ui.error(_("(is your Python install correct?)\n"))
235 ui.error(_("(is your Python install correct?)\n"))
235 except (IOError, OSError) as inst:
236 except (IOError, OSError) as inst:
236 if util.safehasattr(inst, "code"): # HTTPError
237 if util.safehasattr(inst, "code"): # HTTPError
237 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
238 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
238 elif util.safehasattr(inst, "reason"): # URLError or SSLError
239 elif util.safehasattr(inst, "reason"): # URLError or SSLError
239 try: # usually it is in the form (errno, strerror)
240 try: # usually it is in the form (errno, strerror)
240 reason = inst.reason.args[1]
241 reason = inst.reason.args[1]
241 except (AttributeError, IndexError):
242 except (AttributeError, IndexError):
242 # it might be anything, for example a string
243 # it might be anything, for example a string
243 reason = inst.reason
244 reason = inst.reason
244 if isinstance(reason, pycompat.unicode):
245 if isinstance(reason, pycompat.unicode):
245 # SSLError of Python 2.7.9 contains a unicode
246 # SSLError of Python 2.7.9 contains a unicode
246 reason = encoding.unitolocal(reason)
247 reason = encoding.unitolocal(reason)
247 ui.error(_("abort: error: %s\n") % reason)
248 ui.error(_("abort: error: %s\n") % reason)
248 elif (util.safehasattr(inst, "args")
249 elif (util.safehasattr(inst, "args")
249 and inst.args and inst.args[0] == errno.EPIPE):
250 and inst.args and inst.args[0] == errno.EPIPE):
250 pass
251 pass
251 elif getattr(inst, "strerror", None): # common IOError or OSError
252 elif getattr(inst, "strerror", None): # common IOError or OSError
252 if getattr(inst, "filename", None) is not None:
253 if getattr(inst, "filename", None) is not None:
253 ui.error(_("abort: %s: '%s'\n") % (
254 ui.error(_("abort: %s: '%s'\n") % (
254 encoding.strtolocal(inst.strerror),
255 encoding.strtolocal(inst.strerror),
255 stringutil.forcebytestr(inst.filename)))
256 stringutil.forcebytestr(inst.filename)))
256 else:
257 else:
257 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 else: # suspicious IOError
259 else: # suspicious IOError
259 raise
260 raise
260 except MemoryError:
261 except MemoryError:
261 ui.error(_("abort: out of memory\n"))
262 ui.error(_("abort: out of memory\n"))
262 except SystemExit as inst:
263 except SystemExit as inst:
263 # Commands shouldn't sys.exit directly, but give a return code.
264 # Commands shouldn't sys.exit directly, but give a return code.
264 # Just in case catch this and and pass exit code to caller.
265 # Just in case catch this and and pass exit code to caller.
265 return inst.code
266 return inst.code
266
267
267 return -1
268 return -1
268
269
269 def checknewlabel(repo, lbl, kind):
270 def checknewlabel(repo, lbl, kind):
270 # Do not use the "kind" parameter in ui output.
271 # Do not use the "kind" parameter in ui output.
271 # It makes strings difficult to translate.
272 # It makes strings difficult to translate.
272 if lbl in ['tip', '.', 'null']:
273 if lbl in ['tip', '.', 'null']:
273 raise error.Abort(_("the name '%s' is reserved") % lbl)
274 raise error.Abort(_("the name '%s' is reserved") % lbl)
274 for c in (':', '\0', '\n', '\r'):
275 for c in (':', '\0', '\n', '\r'):
275 if c in lbl:
276 if c in lbl:
276 raise error.Abort(
277 raise error.Abort(
277 _("%r cannot be used in a name") % pycompat.bytestr(c))
278 _("%r cannot be used in a name") % pycompat.bytestr(c))
278 try:
279 try:
279 int(lbl)
280 int(lbl)
280 raise error.Abort(_("cannot use an integer as a name"))
281 raise error.Abort(_("cannot use an integer as a name"))
281 except ValueError:
282 except ValueError:
282 pass
283 pass
283 if lbl.strip() != lbl:
284 if lbl.strip() != lbl:
284 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
285 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
285
286
286 def checkfilename(f):
287 def checkfilename(f):
287 '''Check that the filename f is an acceptable filename for a tracked file'''
288 '''Check that the filename f is an acceptable filename for a tracked file'''
288 if '\r' in f or '\n' in f:
289 if '\r' in f or '\n' in f:
289 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
290 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
290 % pycompat.bytestr(f))
291 % pycompat.bytestr(f))
291
292
292 def checkportable(ui, f):
293 def checkportable(ui, f):
293 '''Check if filename f is portable and warn or abort depending on config'''
294 '''Check if filename f is portable and warn or abort depending on config'''
294 checkfilename(f)
295 checkfilename(f)
295 abort, warn = checkportabilityalert(ui)
296 abort, warn = checkportabilityalert(ui)
296 if abort or warn:
297 if abort or warn:
297 msg = util.checkwinfilename(f)
298 msg = util.checkwinfilename(f)
298 if msg:
299 if msg:
299 msg = "%s: %s" % (msg, procutil.shellquote(f))
300 msg = "%s: %s" % (msg, procutil.shellquote(f))
300 if abort:
301 if abort:
301 raise error.Abort(msg)
302 raise error.Abort(msg)
302 ui.warn(_("warning: %s\n") % msg)
303 ui.warn(_("warning: %s\n") % msg)
303
304
304 def checkportabilityalert(ui):
305 def checkportabilityalert(ui):
305 '''check if the user's config requests nothing, a warning, or abort for
306 '''check if the user's config requests nothing, a warning, or abort for
306 non-portable filenames'''
307 non-portable filenames'''
307 val = ui.config('ui', 'portablefilenames')
308 val = ui.config('ui', 'portablefilenames')
308 lval = val.lower()
309 lval = val.lower()
309 bval = stringutil.parsebool(val)
310 bval = stringutil.parsebool(val)
310 abort = pycompat.iswindows or lval == 'abort'
311 abort = pycompat.iswindows or lval == 'abort'
311 warn = bval or lval == 'warn'
312 warn = bval or lval == 'warn'
312 if bval is None and not (warn or abort or lval == 'ignore'):
313 if bval is None and not (warn or abort or lval == 'ignore'):
313 raise error.ConfigError(
314 raise error.ConfigError(
314 _("ui.portablefilenames value is invalid ('%s')") % val)
315 _("ui.portablefilenames value is invalid ('%s')") % val)
315 return abort, warn
316 return abort, warn
316
317
317 class casecollisionauditor(object):
318 class casecollisionauditor(object):
318 def __init__(self, ui, abort, dirstate):
319 def __init__(self, ui, abort, dirstate):
319 self._ui = ui
320 self._ui = ui
320 self._abort = abort
321 self._abort = abort
321 allfiles = '\0'.join(dirstate._map)
322 allfiles = '\0'.join(dirstate._map)
322 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
323 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
323 self._dirstate = dirstate
324 self._dirstate = dirstate
324 # The purpose of _newfiles is so that we don't complain about
325 # The purpose of _newfiles is so that we don't complain about
325 # case collisions if someone were to call this object with the
326 # case collisions if someone were to call this object with the
326 # same filename twice.
327 # same filename twice.
327 self._newfiles = set()
328 self._newfiles = set()
328
329
329 def __call__(self, f):
330 def __call__(self, f):
330 if f in self._newfiles:
331 if f in self._newfiles:
331 return
332 return
332 fl = encoding.lower(f)
333 fl = encoding.lower(f)
333 if fl in self._loweredfiles and f not in self._dirstate:
334 if fl in self._loweredfiles and f not in self._dirstate:
334 msg = _('possible case-folding collision for %s') % f
335 msg = _('possible case-folding collision for %s') % f
335 if self._abort:
336 if self._abort:
336 raise error.Abort(msg)
337 raise error.Abort(msg)
337 self._ui.warn(_("warning: %s\n") % msg)
338 self._ui.warn(_("warning: %s\n") % msg)
338 self._loweredfiles.add(fl)
339 self._loweredfiles.add(fl)
339 self._newfiles.add(f)
340 self._newfiles.add(f)
340
341
341 def filteredhash(repo, maxrev):
342 def filteredhash(repo, maxrev):
342 """build hash of filtered revisions in the current repoview.
343 """build hash of filtered revisions in the current repoview.
343
344
344 Multiple caches perform up-to-date validation by checking that the
345 Multiple caches perform up-to-date validation by checking that the
345 tiprev and tipnode stored in the cache file match the current repository.
346 tiprev and tipnode stored in the cache file match the current repository.
346 However, this is not sufficient for validating repoviews because the set
347 However, this is not sufficient for validating repoviews because the set
347 of revisions in the view may change without the repository tiprev and
348 of revisions in the view may change without the repository tiprev and
348 tipnode changing.
349 tipnode changing.
349
350
350 This function hashes all the revs filtered from the view and returns
351 This function hashes all the revs filtered from the view and returns
351 that SHA-1 digest.
352 that SHA-1 digest.
352 """
353 """
353 cl = repo.changelog
354 cl = repo.changelog
354 if not cl.filteredrevs:
355 if not cl.filteredrevs:
355 return None
356 return None
356 key = None
357 key = None
357 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
358 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
358 if revs:
359 if revs:
359 s = hashlib.sha1()
360 s = hashlib.sha1()
360 for rev in revs:
361 for rev in revs:
361 s.update('%d;' % rev)
362 s.update('%d;' % rev)
362 key = s.digest()
363 key = s.digest()
363 return key
364 return key
364
365
365 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
366 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
366 '''yield every hg repository under path, always recursively.
367 '''yield every hg repository under path, always recursively.
367 The recurse flag will only control recursion into repo working dirs'''
368 The recurse flag will only control recursion into repo working dirs'''
368 def errhandler(err):
369 def errhandler(err):
369 if err.filename == path:
370 if err.filename == path:
370 raise err
371 raise err
371 samestat = getattr(os.path, 'samestat', None)
372 samestat = getattr(os.path, 'samestat', None)
372 if followsym and samestat is not None:
373 if followsym and samestat is not None:
373 def adddir(dirlst, dirname):
374 def adddir(dirlst, dirname):
374 dirstat = os.stat(dirname)
375 dirstat = os.stat(dirname)
375 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
376 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
376 if not match:
377 if not match:
377 dirlst.append(dirstat)
378 dirlst.append(dirstat)
378 return not match
379 return not match
379 else:
380 else:
380 followsym = False
381 followsym = False
381
382
382 if (seen_dirs is None) and followsym:
383 if (seen_dirs is None) and followsym:
383 seen_dirs = []
384 seen_dirs = []
384 adddir(seen_dirs, path)
385 adddir(seen_dirs, path)
385 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
386 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
386 dirs.sort()
387 dirs.sort()
387 if '.hg' in dirs:
388 if '.hg' in dirs:
388 yield root # found a repository
389 yield root # found a repository
389 qroot = os.path.join(root, '.hg', 'patches')
390 qroot = os.path.join(root, '.hg', 'patches')
390 if os.path.isdir(os.path.join(qroot, '.hg')):
391 if os.path.isdir(os.path.join(qroot, '.hg')):
391 yield qroot # we have a patch queue repo here
392 yield qroot # we have a patch queue repo here
392 if recurse:
393 if recurse:
393 # avoid recursing inside the .hg directory
394 # avoid recursing inside the .hg directory
394 dirs.remove('.hg')
395 dirs.remove('.hg')
395 else:
396 else:
396 dirs[:] = [] # don't descend further
397 dirs[:] = [] # don't descend further
397 elif followsym:
398 elif followsym:
398 newdirs = []
399 newdirs = []
399 for d in dirs:
400 for d in dirs:
400 fname = os.path.join(root, d)
401 fname = os.path.join(root, d)
401 if adddir(seen_dirs, fname):
402 if adddir(seen_dirs, fname):
402 if os.path.islink(fname):
403 if os.path.islink(fname):
403 for hgname in walkrepos(fname, True, seen_dirs):
404 for hgname in walkrepos(fname, True, seen_dirs):
404 yield hgname
405 yield hgname
405 else:
406 else:
406 newdirs.append(d)
407 newdirs.append(d)
407 dirs[:] = newdirs
408 dirs[:] = newdirs
408
409
409 def binnode(ctx):
410 def binnode(ctx):
410 """Return binary node id for a given basectx"""
411 """Return binary node id for a given basectx"""
411 node = ctx.node()
412 node = ctx.node()
412 if node is None:
413 if node is None:
413 return wdirid
414 return wdirid
414 return node
415 return node
415
416
416 def intrev(ctx):
417 def intrev(ctx):
417 """Return integer for a given basectx that can be used in comparison or
418 """Return integer for a given basectx that can be used in comparison or
418 arithmetic operation"""
419 arithmetic operation"""
419 rev = ctx.rev()
420 rev = ctx.rev()
420 if rev is None:
421 if rev is None:
421 return wdirrev
422 return wdirrev
422 return rev
423 return rev
423
424
424 def formatchangeid(ctx):
425 def formatchangeid(ctx):
425 """Format changectx as '{rev}:{node|formatnode}', which is the default
426 """Format changectx as '{rev}:{node|formatnode}', which is the default
426 template provided by logcmdutil.changesettemplater"""
427 template provided by logcmdutil.changesettemplater"""
427 repo = ctx.repo()
428 repo = ctx.repo()
428 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
429 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
429
430
430 def formatrevnode(ui, rev, node):
431 def formatrevnode(ui, rev, node):
431 """Format given revision and node depending on the current verbosity"""
432 """Format given revision and node depending on the current verbosity"""
432 if ui.debugflag:
433 if ui.debugflag:
433 hexfunc = hex
434 hexfunc = hex
434 else:
435 else:
435 hexfunc = short
436 hexfunc = short
436 return '%d:%s' % (rev, hexfunc(node))
437 return '%d:%s' % (rev, hexfunc(node))
437
438
438 def resolvehexnodeidprefix(repo, prefix):
439 def resolvehexnodeidprefix(repo, prefix):
439 if (prefix.startswith('x') and
440 if (prefix.startswith('x') and
440 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
441 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
441 prefix = prefix[1:]
442 prefix = prefix[1:]
442 try:
443 try:
443 # Uses unfiltered repo because it's faster when prefix is ambiguous/
444 # Uses unfiltered repo because it's faster when prefix is ambiguous/
444 # This matches the shortesthexnodeidprefix() function below.
445 # This matches the shortesthexnodeidprefix() function below.
445 node = repo.unfiltered().changelog._partialmatch(prefix)
446 node = repo.unfiltered().changelog._partialmatch(prefix)
446 except error.AmbiguousPrefixLookupError:
447 except error.AmbiguousPrefixLookupError:
447 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
448 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
448 if revset:
449 if revset:
449 # Clear config to avoid infinite recursion
450 # Clear config to avoid infinite recursion
450 configoverrides = {('experimental',
451 configoverrides = {('experimental',
451 'revisions.disambiguatewithin'): None}
452 'revisions.disambiguatewithin'): None}
452 with repo.ui.configoverride(configoverrides):
453 with repo.ui.configoverride(configoverrides):
453 revs = repo.anyrevs([revset], user=True)
454 revs = repo.anyrevs([revset], user=True)
454 matches = []
455 matches = []
455 for rev in revs:
456 for rev in revs:
456 node = repo.changelog.node(rev)
457 node = repo.changelog.node(rev)
457 if hex(node).startswith(prefix):
458 if hex(node).startswith(prefix):
458 matches.append(node)
459 matches.append(node)
459 if len(matches) == 1:
460 if len(matches) == 1:
460 return matches[0]
461 return matches[0]
461 raise
462 raise
462 if node is None:
463 if node is None:
463 return
464 return
464 repo.changelog.rev(node) # make sure node isn't filtered
465 repo.changelog.rev(node) # make sure node isn't filtered
465 return node
466 return node
466
467
467 def mayberevnum(repo, prefix):
468 def mayberevnum(repo, prefix):
468 """Checks if the given prefix may be mistaken for a revision number"""
469 """Checks if the given prefix may be mistaken for a revision number"""
469 try:
470 try:
470 i = int(prefix)
471 i = int(prefix)
471 # if we are a pure int, then starting with zero will not be
472 # if we are a pure int, then starting with zero will not be
472 # confused as a rev; or, obviously, if the int is larger
473 # confused as a rev; or, obviously, if the int is larger
473 # than the value of the tip rev. We still need to disambiguate if
474 # than the value of the tip rev. We still need to disambiguate if
474 # prefix == '0', since that *is* a valid revnum.
475 # prefix == '0', since that *is* a valid revnum.
475 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
476 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
476 return False
477 return False
477 return True
478 return True
478 except ValueError:
479 except ValueError:
479 return False
480 return False
480
481
481 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
482 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
482 """Find the shortest unambiguous prefix that matches hexnode.
483 """Find the shortest unambiguous prefix that matches hexnode.
483
484
484 If "cache" is not None, it must be a dictionary that can be used for
485 If "cache" is not None, it must be a dictionary that can be used for
485 caching between calls to this method.
486 caching between calls to this method.
486 """
487 """
487 # _partialmatch() of filtered changelog could take O(len(repo)) time,
488 # _partialmatch() of filtered changelog could take O(len(repo)) time,
488 # which would be unacceptably slow. so we look for hash collision in
489 # which would be unacceptably slow. so we look for hash collision in
489 # unfiltered space, which means some hashes may be slightly longer.
490 # unfiltered space, which means some hashes may be slightly longer.
490
491
491 minlength=max(minlength, 1)
492 minlength=max(minlength, 1)
492
493
493 def disambiguate(prefix):
494 def disambiguate(prefix):
494 """Disambiguate against revnums."""
495 """Disambiguate against revnums."""
495 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
496 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
496 if mayberevnum(repo, prefix):
497 if mayberevnum(repo, prefix):
497 return 'x' + prefix
498 return 'x' + prefix
498 else:
499 else:
499 return prefix
500 return prefix
500
501
501 hexnode = hex(node)
502 hexnode = hex(node)
502 for length in range(len(prefix), len(hexnode) + 1):
503 for length in range(len(prefix), len(hexnode) + 1):
503 prefix = hexnode[:length]
504 prefix = hexnode[:length]
504 if not mayberevnum(repo, prefix):
505 if not mayberevnum(repo, prefix):
505 return prefix
506 return prefix
506
507
507 cl = repo.unfiltered().changelog
508 cl = repo.unfiltered().changelog
508 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
509 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
509 if revset:
510 if revset:
510 revs = None
511 revs = None
511 if cache is not None:
512 if cache is not None:
512 revs = cache.get('disambiguationrevset')
513 revs = cache.get('disambiguationrevset')
513 if revs is None:
514 if revs is None:
514 revs = repo.anyrevs([revset], user=True)
515 revs = repo.anyrevs([revset], user=True)
515 if cache is not None:
516 if cache is not None:
516 cache['disambiguationrevset'] = revs
517 cache['disambiguationrevset'] = revs
517 if cl.rev(node) in revs:
518 if cl.rev(node) in revs:
518 hexnode = hex(node)
519 hexnode = hex(node)
519 nodetree = None
520 nodetree = None
520 if cache is not None:
521 if cache is not None:
521 nodetree = cache.get('disambiguationnodetree')
522 nodetree = cache.get('disambiguationnodetree')
522 if not nodetree:
523 if not nodetree:
523 try:
524 try:
524 nodetree = parsers.nodetree(cl.index, len(revs))
525 nodetree = parsers.nodetree(cl.index, len(revs))
525 except AttributeError:
526 except AttributeError:
526 # no native nodetree
527 # no native nodetree
527 pass
528 pass
528 else:
529 else:
529 for r in revs:
530 for r in revs:
530 nodetree.insert(r)
531 nodetree.insert(r)
531 if cache is not None:
532 if cache is not None:
532 cache['disambiguationnodetree'] = nodetree
533 cache['disambiguationnodetree'] = nodetree
533 if nodetree is not None:
534 if nodetree is not None:
534 length = max(nodetree.shortest(node), minlength)
535 length = max(nodetree.shortest(node), minlength)
535 prefix = hexnode[:length]
536 prefix = hexnode[:length]
536 return disambiguate(prefix)
537 return disambiguate(prefix)
537 for length in range(minlength, len(hexnode) + 1):
538 for length in range(minlength, len(hexnode) + 1):
538 matches = []
539 matches = []
539 prefix = hexnode[:length]
540 prefix = hexnode[:length]
540 for rev in revs:
541 for rev in revs:
541 otherhexnode = repo[rev].hex()
542 otherhexnode = repo[rev].hex()
542 if prefix == otherhexnode[:length]:
543 if prefix == otherhexnode[:length]:
543 matches.append(otherhexnode)
544 matches.append(otherhexnode)
544 if len(matches) == 1:
545 if len(matches) == 1:
545 return disambiguate(prefix)
546 return disambiguate(prefix)
546
547
547 try:
548 try:
548 return disambiguate(cl.shortest(node, minlength))
549 return disambiguate(cl.shortest(node, minlength))
549 except error.LookupError:
550 except error.LookupError:
550 raise error.RepoLookupError()
551 raise error.RepoLookupError()
551
552
552 def isrevsymbol(repo, symbol):
553 def isrevsymbol(repo, symbol):
553 """Checks if a symbol exists in the repo.
554 """Checks if a symbol exists in the repo.
554
555
555 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
556 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
556 symbol is an ambiguous nodeid prefix.
557 symbol is an ambiguous nodeid prefix.
557 """
558 """
558 try:
559 try:
559 revsymbol(repo, symbol)
560 revsymbol(repo, symbol)
560 return True
561 return True
561 except error.RepoLookupError:
562 except error.RepoLookupError:
562 return False
563 return False
563
564
564 def revsymbol(repo, symbol):
565 def revsymbol(repo, symbol):
565 """Returns a context given a single revision symbol (as string).
566 """Returns a context given a single revision symbol (as string).
566
567
567 This is similar to revsingle(), but accepts only a single revision symbol,
568 This is similar to revsingle(), but accepts only a single revision symbol,
568 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
569 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
569 not "max(public())".
570 not "max(public())".
570 """
571 """
571 if not isinstance(symbol, bytes):
572 if not isinstance(symbol, bytes):
572 msg = ("symbol (%s of type %s) was not a string, did you mean "
573 msg = ("symbol (%s of type %s) was not a string, did you mean "
573 "repo[symbol]?" % (symbol, type(symbol)))
574 "repo[symbol]?" % (symbol, type(symbol)))
574 raise error.ProgrammingError(msg)
575 raise error.ProgrammingError(msg)
575 try:
576 try:
576 if symbol in ('.', 'tip', 'null'):
577 if symbol in ('.', 'tip', 'null'):
577 return repo[symbol]
578 return repo[symbol]
578
579
579 try:
580 try:
580 r = int(symbol)
581 r = int(symbol)
581 if '%d' % r != symbol:
582 if '%d' % r != symbol:
582 raise ValueError
583 raise ValueError
583 l = len(repo.changelog)
584 l = len(repo.changelog)
584 if r < 0:
585 if r < 0:
585 r += l
586 r += l
586 if r < 0 or r >= l and r != wdirrev:
587 if r < 0 or r >= l and r != wdirrev:
587 raise ValueError
588 raise ValueError
588 return repo[r]
589 return repo[r]
589 except error.FilteredIndexError:
590 except error.FilteredIndexError:
590 raise
591 raise
591 except (ValueError, OverflowError, IndexError):
592 except (ValueError, OverflowError, IndexError):
592 pass
593 pass
593
594
594 if len(symbol) == 40:
595 if len(symbol) == 40:
595 try:
596 try:
596 node = bin(symbol)
597 node = bin(symbol)
597 rev = repo.changelog.rev(node)
598 rev = repo.changelog.rev(node)
598 return repo[rev]
599 return repo[rev]
599 except error.FilteredLookupError:
600 except error.FilteredLookupError:
600 raise
601 raise
601 except (TypeError, LookupError):
602 except (TypeError, LookupError):
602 pass
603 pass
603
604
604 # look up bookmarks through the name interface
605 # look up bookmarks through the name interface
605 try:
606 try:
606 node = repo.names.singlenode(repo, symbol)
607 node = repo.names.singlenode(repo, symbol)
607 rev = repo.changelog.rev(node)
608 rev = repo.changelog.rev(node)
608 return repo[rev]
609 return repo[rev]
609 except KeyError:
610 except KeyError:
610 pass
611 pass
611
612
612 node = resolvehexnodeidprefix(repo, symbol)
613 node = resolvehexnodeidprefix(repo, symbol)
613 if node is not None:
614 if node is not None:
614 rev = repo.changelog.rev(node)
615 rev = repo.changelog.rev(node)
615 return repo[rev]
616 return repo[rev]
616
617
617 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
618 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
618
619
619 except error.WdirUnsupported:
620 except error.WdirUnsupported:
620 return repo[None]
621 return repo[None]
621 except (error.FilteredIndexError, error.FilteredLookupError,
622 except (error.FilteredIndexError, error.FilteredLookupError,
622 error.FilteredRepoLookupError):
623 error.FilteredRepoLookupError):
623 raise _filterederror(repo, symbol)
624 raise _filterederror(repo, symbol)
624
625
625 def _filterederror(repo, changeid):
626 def _filterederror(repo, changeid):
626 """build an exception to be raised about a filtered changeid
627 """build an exception to be raised about a filtered changeid
627
628
628 This is extracted in a function to help extensions (eg: evolve) to
629 This is extracted in a function to help extensions (eg: evolve) to
629 experiment with various message variants."""
630 experiment with various message variants."""
630 if repo.filtername.startswith('visible'):
631 if repo.filtername.startswith('visible'):
631
632
632 # Check if the changeset is obsolete
633 # Check if the changeset is obsolete
633 unfilteredrepo = repo.unfiltered()
634 unfilteredrepo = repo.unfiltered()
634 ctx = revsymbol(unfilteredrepo, changeid)
635 ctx = revsymbol(unfilteredrepo, changeid)
635
636
636 # If the changeset is obsolete, enrich the message with the reason
637 # If the changeset is obsolete, enrich the message with the reason
637 # that made this changeset not visible
638 # that made this changeset not visible
638 if ctx.obsolete():
639 if ctx.obsolete():
639 msg = obsutil._getfilteredreason(repo, changeid, ctx)
640 msg = obsutil._getfilteredreason(repo, changeid, ctx)
640 else:
641 else:
641 msg = _("hidden revision '%s'") % changeid
642 msg = _("hidden revision '%s'") % changeid
642
643
643 hint = _('use --hidden to access hidden revisions')
644 hint = _('use --hidden to access hidden revisions')
644
645
645 return error.FilteredRepoLookupError(msg, hint=hint)
646 return error.FilteredRepoLookupError(msg, hint=hint)
646 msg = _("filtered revision '%s' (not in '%s' subset)")
647 msg = _("filtered revision '%s' (not in '%s' subset)")
647 msg %= (changeid, repo.filtername)
648 msg %= (changeid, repo.filtername)
648 return error.FilteredRepoLookupError(msg)
649 return error.FilteredRepoLookupError(msg)
649
650
650 def revsingle(repo, revspec, default='.', localalias=None):
651 def revsingle(repo, revspec, default='.', localalias=None):
651 if not revspec and revspec != 0:
652 if not revspec and revspec != 0:
652 return repo[default]
653 return repo[default]
653
654
654 l = revrange(repo, [revspec], localalias=localalias)
655 l = revrange(repo, [revspec], localalias=localalias)
655 if not l:
656 if not l:
656 raise error.Abort(_('empty revision set'))
657 raise error.Abort(_('empty revision set'))
657 return repo[l.last()]
658 return repo[l.last()]
658
659
659 def _pairspec(revspec):
660 def _pairspec(revspec):
660 tree = revsetlang.parse(revspec)
661 tree = revsetlang.parse(revspec)
661 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
662 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
662
663
663 def revpair(repo, revs):
664 def revpair(repo, revs):
664 if not revs:
665 if not revs:
665 return repo['.'], repo[None]
666 return repo['.'], repo[None]
666
667
667 l = revrange(repo, revs)
668 l = revrange(repo, revs)
668
669
669 if not l:
670 if not l:
670 raise error.Abort(_('empty revision range'))
671 raise error.Abort(_('empty revision range'))
671
672
672 first = l.first()
673 first = l.first()
673 second = l.last()
674 second = l.last()
674
675
675 if (first == second and len(revs) >= 2
676 if (first == second and len(revs) >= 2
676 and not all(revrange(repo, [r]) for r in revs)):
677 and not all(revrange(repo, [r]) for r in revs)):
677 raise error.Abort(_('empty revision on one side of range'))
678 raise error.Abort(_('empty revision on one side of range'))
678
679
679 # if top-level is range expression, the result must always be a pair
680 # if top-level is range expression, the result must always be a pair
680 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
681 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
681 return repo[first], repo[None]
682 return repo[first], repo[None]
682
683
683 return repo[first], repo[second]
684 return repo[first], repo[second]
684
685
685 def revrange(repo, specs, localalias=None):
686 def revrange(repo, specs, localalias=None):
686 """Execute 1 to many revsets and return the union.
687 """Execute 1 to many revsets and return the union.
687
688
688 This is the preferred mechanism for executing revsets using user-specified
689 This is the preferred mechanism for executing revsets using user-specified
689 config options, such as revset aliases.
690 config options, such as revset aliases.
690
691
691 The revsets specified by ``specs`` will be executed via a chained ``OR``
692 The revsets specified by ``specs`` will be executed via a chained ``OR``
692 expression. If ``specs`` is empty, an empty result is returned.
693 expression. If ``specs`` is empty, an empty result is returned.
693
694
694 ``specs`` can contain integers, in which case they are assumed to be
695 ``specs`` can contain integers, in which case they are assumed to be
695 revision numbers.
696 revision numbers.
696
697
697 It is assumed the revsets are already formatted. If you have arguments
698 It is assumed the revsets are already formatted. If you have arguments
698 that need to be expanded in the revset, call ``revsetlang.formatspec()``
699 that need to be expanded in the revset, call ``revsetlang.formatspec()``
699 and pass the result as an element of ``specs``.
700 and pass the result as an element of ``specs``.
700
701
701 Specifying a single revset is allowed.
702 Specifying a single revset is allowed.
702
703
703 Returns a ``revset.abstractsmartset`` which is a list-like interface over
704 Returns a ``revset.abstractsmartset`` which is a list-like interface over
704 integer revisions.
705 integer revisions.
705 """
706 """
706 allspecs = []
707 allspecs = []
707 for spec in specs:
708 for spec in specs:
708 if isinstance(spec, int):
709 if isinstance(spec, int):
709 spec = revsetlang.formatspec('%d', spec)
710 spec = revsetlang.formatspec('%d', spec)
710 allspecs.append(spec)
711 allspecs.append(spec)
711 return repo.anyrevs(allspecs, user=True, localalias=localalias)
712 return repo.anyrevs(allspecs, user=True, localalias=localalias)
712
713
713 def meaningfulparents(repo, ctx):
714 def meaningfulparents(repo, ctx):
714 """Return list of meaningful (or all if debug) parentrevs for rev.
715 """Return list of meaningful (or all if debug) parentrevs for rev.
715
716
716 For merges (two non-nullrev revisions) both parents are meaningful.
717 For merges (two non-nullrev revisions) both parents are meaningful.
717 Otherwise the first parent revision is considered meaningful if it
718 Otherwise the first parent revision is considered meaningful if it
718 is not the preceding revision.
719 is not the preceding revision.
719 """
720 """
720 parents = ctx.parents()
721 parents = ctx.parents()
721 if len(parents) > 1:
722 if len(parents) > 1:
722 return parents
723 return parents
723 if repo.ui.debugflag:
724 if repo.ui.debugflag:
724 return [parents[0], repo[nullrev]]
725 return [parents[0], repo[nullrev]]
725 if parents[0].rev() >= intrev(ctx) - 1:
726 if parents[0].rev() >= intrev(ctx) - 1:
726 return []
727 return []
727 return parents
728 return parents
728
729
729 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
730 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
730 """Return a function that produced paths for presenting to the user.
731 """Return a function that produced paths for presenting to the user.
731
732
732 The returned function takes a repo-relative path and produces a path
733 The returned function takes a repo-relative path and produces a path
733 that can be presented in the UI.
734 that can be presented in the UI.
734
735
735 Depending on the value of ui.relative-paths, either a repo-relative or
736 Depending on the value of ui.relative-paths, either a repo-relative or
736 cwd-relative path will be produced.
737 cwd-relative path will be produced.
737
738
738 legacyrelativevalue is the value to use if ui.relative-paths=legacy
739 legacyrelativevalue is the value to use if ui.relative-paths=legacy
739
740
740 If forcerelativevalue is not None, then that value will be used regardless
741 If forcerelativevalue is not None, then that value will be used regardless
741 of what ui.relative-paths is set to.
742 of what ui.relative-paths is set to.
742 """
743 """
743 if forcerelativevalue is not None:
744 if forcerelativevalue is not None:
744 relative = forcerelativevalue
745 relative = forcerelativevalue
745 else:
746 else:
746 config = repo.ui.config('ui', 'relative-paths')
747 config = repo.ui.config('ui', 'relative-paths')
747 if config == 'legacy':
748 if config == 'legacy':
748 relative = legacyrelativevalue
749 relative = legacyrelativevalue
749 else:
750 else:
750 relative = stringutil.parsebool(config)
751 relative = stringutil.parsebool(config)
751 if relative is None:
752 if relative is None:
752 raise error.ConfigError(
753 raise error.ConfigError(
753 _("ui.relative-paths is not a boolean ('%s')") % config)
754 _("ui.relative-paths is not a boolean ('%s')") % config)
754
755
755 if relative:
756 if relative:
756 cwd = repo.getcwd()
757 cwd = repo.getcwd()
757 pathto = repo.pathto
758 pathto = repo.pathto
758 return lambda f: pathto(f, cwd)
759 return lambda f: pathto(f, cwd)
759 elif repo.ui.configbool('ui', 'slash'):
760 elif repo.ui.configbool('ui', 'slash'):
760 return lambda f: f
761 return lambda f: f
761 else:
762 else:
762 return util.localpath
763 return util.localpath
763
764
764 def subdiruipathfn(subpath, uipathfn):
765 def subdiruipathfn(subpath, uipathfn):
765 '''Create a new uipathfn that treats the file as relative to subpath.'''
766 '''Create a new uipathfn that treats the file as relative to subpath.'''
766 return lambda f: uipathfn(posixpath.join(subpath, f))
767 return lambda f: uipathfn(posixpath.join(subpath, f))
767
768
768 def anypats(pats, opts):
769 def anypats(pats, opts):
769 '''Checks if any patterns, including --include and --exclude were given.
770 '''Checks if any patterns, including --include and --exclude were given.
770
771
771 Some commands (e.g. addremove) use this condition for deciding whether to
772 Some commands (e.g. addremove) use this condition for deciding whether to
772 print absolute or relative paths.
773 print absolute or relative paths.
773 '''
774 '''
774 return bool(pats or opts.get('include') or opts.get('exclude'))
775 return bool(pats or opts.get('include') or opts.get('exclude'))
775
776
776 def expandpats(pats):
777 def expandpats(pats):
777 '''Expand bare globs when running on windows.
778 '''Expand bare globs when running on windows.
778 On posix we assume it already has already been done by sh.'''
779 On posix we assume it already has already been done by sh.'''
779 if not util.expandglobs:
780 if not util.expandglobs:
780 return list(pats)
781 return list(pats)
781 ret = []
782 ret = []
782 for kindpat in pats:
783 for kindpat in pats:
783 kind, pat = matchmod._patsplit(kindpat, None)
784 kind, pat = matchmod._patsplit(kindpat, None)
784 if kind is None:
785 if kind is None:
785 try:
786 try:
786 globbed = glob.glob(pat)
787 globbed = glob.glob(pat)
787 except re.error:
788 except re.error:
788 globbed = [pat]
789 globbed = [pat]
789 if globbed:
790 if globbed:
790 ret.extend(globbed)
791 ret.extend(globbed)
791 continue
792 continue
792 ret.append(kindpat)
793 ret.append(kindpat)
793 return ret
794 return ret
794
795
795 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
796 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
796 badfn=None):
797 badfn=None):
797 '''Return a matcher and the patterns that were used.
798 '''Return a matcher and the patterns that were used.
798 The matcher will warn about bad matches, unless an alternate badfn callback
799 The matcher will warn about bad matches, unless an alternate badfn callback
799 is provided.'''
800 is provided.'''
800 if opts is None:
801 if opts is None:
801 opts = {}
802 opts = {}
802 if not globbed and default == 'relpath':
803 if not globbed and default == 'relpath':
803 pats = expandpats(pats or [])
804 pats = expandpats(pats or [])
804
805
805 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
806 uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
806 def bad(f, msg):
807 def bad(f, msg):
807 ctx.repo().ui.warn("%s: %s\n" % (uipathfn(f), msg))
808 ctx.repo().ui.warn("%s: %s\n" % (uipathfn(f), msg))
808
809
809 if badfn is None:
810 if badfn is None:
810 badfn = bad
811 badfn = bad
811
812
812 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
813 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
813 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
814 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
814
815
815 if m.always():
816 if m.always():
816 pats = []
817 pats = []
817 return m, pats
818 return m, pats
818
819
819 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
820 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
820 badfn=None):
821 badfn=None):
821 '''Return a matcher that will warn about bad matches.'''
822 '''Return a matcher that will warn about bad matches.'''
822 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
823 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
823
824
824 def matchall(repo):
825 def matchall(repo):
825 '''Return a matcher that will efficiently match everything.'''
826 '''Return a matcher that will efficiently match everything.'''
826 return matchmod.always()
827 return matchmod.always()
827
828
828 def matchfiles(repo, files, badfn=None):
829 def matchfiles(repo, files, badfn=None):
829 '''Return a matcher that will efficiently match exactly these files.'''
830 '''Return a matcher that will efficiently match exactly these files.'''
830 return matchmod.exact(files, badfn=badfn)
831 return matchmod.exact(files, badfn=badfn)
831
832
832 def parsefollowlinespattern(repo, rev, pat, msg):
833 def parsefollowlinespattern(repo, rev, pat, msg):
833 """Return a file name from `pat` pattern suitable for usage in followlines
834 """Return a file name from `pat` pattern suitable for usage in followlines
834 logic.
835 logic.
835 """
836 """
836 if not matchmod.patkind(pat):
837 if not matchmod.patkind(pat):
837 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
838 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
838 else:
839 else:
839 ctx = repo[rev]
840 ctx = repo[rev]
840 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
841 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
841 files = [f for f in ctx if m(f)]
842 files = [f for f in ctx if m(f)]
842 if len(files) != 1:
843 if len(files) != 1:
843 raise error.ParseError(msg)
844 raise error.ParseError(msg)
844 return files[0]
845 return files[0]
845
846
846 def getorigvfs(ui, repo):
847 def getorigvfs(ui, repo):
847 """return a vfs suitable to save 'orig' file
848 """return a vfs suitable to save 'orig' file
848
849
849 return None if no special directory is configured"""
850 return None if no special directory is configured"""
850 origbackuppath = ui.config('ui', 'origbackuppath')
851 origbackuppath = ui.config('ui', 'origbackuppath')
851 if not origbackuppath:
852 if not origbackuppath:
852 return None
853 return None
853 return vfs.vfs(repo.wvfs.join(origbackuppath))
854 return vfs.vfs(repo.wvfs.join(origbackuppath))
854
855
855 def backuppath(ui, repo, filepath):
856 def backuppath(ui, repo, filepath):
856 '''customize where working copy backup files (.orig files) are created
857 '''customize where working copy backup files (.orig files) are created
857
858
858 Fetch user defined path from config file: [ui] origbackuppath = <path>
859 Fetch user defined path from config file: [ui] origbackuppath = <path>
859 Fall back to default (filepath with .orig suffix) if not specified
860 Fall back to default (filepath with .orig suffix) if not specified
860
861
861 filepath is repo-relative
862 filepath is repo-relative
862
863
863 Returns an absolute path
864 Returns an absolute path
864 '''
865 '''
865 origvfs = getorigvfs(ui, repo)
866 origvfs = getorigvfs(ui, repo)
866 if origvfs is None:
867 if origvfs is None:
867 return repo.wjoin(filepath + ".orig")
868 return repo.wjoin(filepath + ".orig")
868
869
869 origbackupdir = origvfs.dirname(filepath)
870 origbackupdir = origvfs.dirname(filepath)
870 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
871 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
871 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
872 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
872
873
873 # Remove any files that conflict with the backup file's path
874 # Remove any files that conflict with the backup file's path
874 for f in reversed(list(util.finddirs(filepath))):
875 for f in reversed(list(util.finddirs(filepath))):
875 if origvfs.isfileorlink(f):
876 if origvfs.isfileorlink(f):
876 ui.note(_('removing conflicting file: %s\n')
877 ui.note(_('removing conflicting file: %s\n')
877 % origvfs.join(f))
878 % origvfs.join(f))
878 origvfs.unlink(f)
879 origvfs.unlink(f)
879 break
880 break
880
881
881 origvfs.makedirs(origbackupdir)
882 origvfs.makedirs(origbackupdir)
882
883
883 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
884 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
884 ui.note(_('removing conflicting directory: %s\n')
885 ui.note(_('removing conflicting directory: %s\n')
885 % origvfs.join(filepath))
886 % origvfs.join(filepath))
886 origvfs.rmtree(filepath, forcibly=True)
887 origvfs.rmtree(filepath, forcibly=True)
887
888
888 return origvfs.join(filepath)
889 return origvfs.join(filepath)
889
890
890 class _containsnode(object):
891 class _containsnode(object):
891 """proxy __contains__(node) to container.__contains__ which accepts revs"""
892 """proxy __contains__(node) to container.__contains__ which accepts revs"""
892
893
893 def __init__(self, repo, revcontainer):
894 def __init__(self, repo, revcontainer):
894 self._torev = repo.changelog.rev
895 self._torev = repo.changelog.rev
895 self._revcontains = revcontainer.__contains__
896 self._revcontains = revcontainer.__contains__
896
897
897 def __contains__(self, node):
898 def __contains__(self, node):
898 return self._revcontains(self._torev(node))
899 return self._revcontains(self._torev(node))
899
900
900 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
901 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
901 fixphase=False, targetphase=None, backup=True):
902 fixphase=False, targetphase=None, backup=True):
902 """do common cleanups when old nodes are replaced by new nodes
903 """do common cleanups when old nodes are replaced by new nodes
903
904
904 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
905 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
905 (we might also want to move working directory parent in the future)
906 (we might also want to move working directory parent in the future)
906
907
907 By default, bookmark moves are calculated automatically from 'replacements',
908 By default, bookmark moves are calculated automatically from 'replacements',
908 but 'moves' can be used to override that. Also, 'moves' may include
909 but 'moves' can be used to override that. Also, 'moves' may include
909 additional bookmark moves that should not have associated obsmarkers.
910 additional bookmark moves that should not have associated obsmarkers.
910
911
911 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
912 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
912 have replacements. operation is a string, like "rebase".
913 have replacements. operation is a string, like "rebase".
913
914
914 metadata is dictionary containing metadata to be stored in obsmarker if
915 metadata is dictionary containing metadata to be stored in obsmarker if
915 obsolescence is enabled.
916 obsolescence is enabled.
916 """
917 """
917 assert fixphase or targetphase is None
918 assert fixphase or targetphase is None
918 if not replacements and not moves:
919 if not replacements and not moves:
919 return
920 return
920
921
921 # translate mapping's other forms
922 # translate mapping's other forms
922 if not util.safehasattr(replacements, 'items'):
923 if not util.safehasattr(replacements, 'items'):
923 replacements = {(n,): () for n in replacements}
924 replacements = {(n,): () for n in replacements}
924 else:
925 else:
925 # upgrading non tuple "source" to tuple ones for BC
926 # upgrading non tuple "source" to tuple ones for BC
926 repls = {}
927 repls = {}
927 for key, value in replacements.items():
928 for key, value in replacements.items():
928 if not isinstance(key, tuple):
929 if not isinstance(key, tuple):
929 key = (key,)
930 key = (key,)
930 repls[key] = value
931 repls[key] = value
931 replacements = repls
932 replacements = repls
932
933
933 # Unfiltered repo is needed since nodes in replacements might be hidden.
934 # Unfiltered repo is needed since nodes in replacements might be hidden.
934 unfi = repo.unfiltered()
935 unfi = repo.unfiltered()
935
936
936 # Calculate bookmark movements
937 # Calculate bookmark movements
937 if moves is None:
938 if moves is None:
938 moves = {}
939 moves = {}
939 for oldnodes, newnodes in replacements.items():
940 for oldnodes, newnodes in replacements.items():
940 for oldnode in oldnodes:
941 for oldnode in oldnodes:
941 if oldnode in moves:
942 if oldnode in moves:
942 continue
943 continue
943 if len(newnodes) > 1:
944 if len(newnodes) > 1:
944 # usually a split, take the one with biggest rev number
945 # usually a split, take the one with biggest rev number
945 newnode = next(unfi.set('max(%ln)', newnodes)).node()
946 newnode = next(unfi.set('max(%ln)', newnodes)).node()
946 elif len(newnodes) == 0:
947 elif len(newnodes) == 0:
947 # move bookmark backwards
948 # move bookmark backwards
948 allreplaced = []
949 allreplaced = []
949 for rep in replacements:
950 for rep in replacements:
950 allreplaced.extend(rep)
951 allreplaced.extend(rep)
951 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
952 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
952 allreplaced))
953 allreplaced))
953 if roots:
954 if roots:
954 newnode = roots[0].node()
955 newnode = roots[0].node()
955 else:
956 else:
956 newnode = nullid
957 newnode = nullid
957 else:
958 else:
958 newnode = newnodes[0]
959 newnode = newnodes[0]
959 moves[oldnode] = newnode
960 moves[oldnode] = newnode
960
961
961 allnewnodes = [n for ns in replacements.values() for n in ns]
962 allnewnodes = [n for ns in replacements.values() for n in ns]
962 toretract = {}
963 toretract = {}
963 toadvance = {}
964 toadvance = {}
964 if fixphase:
965 if fixphase:
965 precursors = {}
966 precursors = {}
966 for oldnodes, newnodes in replacements.items():
967 for oldnodes, newnodes in replacements.items():
967 for oldnode in oldnodes:
968 for oldnode in oldnodes:
968 for newnode in newnodes:
969 for newnode in newnodes:
969 precursors.setdefault(newnode, []).append(oldnode)
970 precursors.setdefault(newnode, []).append(oldnode)
970
971
971 allnewnodes.sort(key=lambda n: unfi[n].rev())
972 allnewnodes.sort(key=lambda n: unfi[n].rev())
972 newphases = {}
973 newphases = {}
973 def phase(ctx):
974 def phase(ctx):
974 return newphases.get(ctx.node(), ctx.phase())
975 return newphases.get(ctx.node(), ctx.phase())
975 for newnode in allnewnodes:
976 for newnode in allnewnodes:
976 ctx = unfi[newnode]
977 ctx = unfi[newnode]
977 parentphase = max(phase(p) for p in ctx.parents())
978 parentphase = max(phase(p) for p in ctx.parents())
978 if targetphase is None:
979 if targetphase is None:
979 oldphase = max(unfi[oldnode].phase()
980 oldphase = max(unfi[oldnode].phase()
980 for oldnode in precursors[newnode])
981 for oldnode in precursors[newnode])
981 newphase = max(oldphase, parentphase)
982 newphase = max(oldphase, parentphase)
982 else:
983 else:
983 newphase = max(targetphase, parentphase)
984 newphase = max(targetphase, parentphase)
984 newphases[newnode] = newphase
985 newphases[newnode] = newphase
985 if newphase > ctx.phase():
986 if newphase > ctx.phase():
986 toretract.setdefault(newphase, []).append(newnode)
987 toretract.setdefault(newphase, []).append(newnode)
987 elif newphase < ctx.phase():
988 elif newphase < ctx.phase():
988 toadvance.setdefault(newphase, []).append(newnode)
989 toadvance.setdefault(newphase, []).append(newnode)
989
990
990 with repo.transaction('cleanup') as tr:
991 with repo.transaction('cleanup') as tr:
991 # Move bookmarks
992 # Move bookmarks
992 bmarks = repo._bookmarks
993 bmarks = repo._bookmarks
993 bmarkchanges = []
994 bmarkchanges = []
994 for oldnode, newnode in moves.items():
995 for oldnode, newnode in moves.items():
995 oldbmarks = repo.nodebookmarks(oldnode)
996 oldbmarks = repo.nodebookmarks(oldnode)
996 if not oldbmarks:
997 if not oldbmarks:
997 continue
998 continue
998 from . import bookmarks # avoid import cycle
999 from . import bookmarks # avoid import cycle
999 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
1000 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
1000 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1001 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1001 hex(oldnode), hex(newnode)))
1002 hex(oldnode), hex(newnode)))
1002 # Delete divergent bookmarks being parents of related newnodes
1003 # Delete divergent bookmarks being parents of related newnodes
1003 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
1004 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
1004 allnewnodes, newnode, oldnode)
1005 allnewnodes, newnode, oldnode)
1005 deletenodes = _containsnode(repo, deleterevs)
1006 deletenodes = _containsnode(repo, deleterevs)
1006 for name in oldbmarks:
1007 for name in oldbmarks:
1007 bmarkchanges.append((name, newnode))
1008 bmarkchanges.append((name, newnode))
1008 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1009 for b in bookmarks.divergent2delete(repo, deletenodes, name):
1009 bmarkchanges.append((b, None))
1010 bmarkchanges.append((b, None))
1010
1011
1011 if bmarkchanges:
1012 if bmarkchanges:
1012 bmarks.applychanges(repo, tr, bmarkchanges)
1013 bmarks.applychanges(repo, tr, bmarkchanges)
1013
1014
1014 for phase, nodes in toretract.items():
1015 for phase, nodes in toretract.items():
1015 phases.retractboundary(repo, tr, phase, nodes)
1016 phases.retractboundary(repo, tr, phase, nodes)
1016 for phase, nodes in toadvance.items():
1017 for phase, nodes in toadvance.items():
1017 phases.advanceboundary(repo, tr, phase, nodes)
1018 phases.advanceboundary(repo, tr, phase, nodes)
1018
1019
1019 mayusearchived = repo.ui.config('experimental', 'cleanup-as-archived')
1020 mayusearchived = repo.ui.config('experimental', 'cleanup-as-archived')
1020 # Obsolete or strip nodes
1021 # Obsolete or strip nodes
1021 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1022 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1022 # If a node is already obsoleted, and we want to obsolete it
1023 # If a node is already obsoleted, and we want to obsolete it
1023 # without a successor, skip that obssolete request since it's
1024 # without a successor, skip that obssolete request since it's
1024 # unnecessary. That's the "if s or not isobs(n)" check below.
1025 # unnecessary. That's the "if s or not isobs(n)" check below.
1025 # Also sort the node in topology order, that might be useful for
1026 # Also sort the node in topology order, that might be useful for
1026 # some obsstore logic.
1027 # some obsstore logic.
1027 # NOTE: the sorting might belong to createmarkers.
1028 # NOTE: the sorting might belong to createmarkers.
1028 torev = unfi.changelog.rev
1029 torev = unfi.changelog.rev
1029 sortfunc = lambda ns: torev(ns[0][0])
1030 sortfunc = lambda ns: torev(ns[0][0])
1030 rels = []
1031 rels = []
1031 for ns, s in sorted(replacements.items(), key=sortfunc):
1032 for ns, s in sorted(replacements.items(), key=sortfunc):
1032 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1033 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1033 rels.append(rel)
1034 rels.append(rel)
1034 if rels:
1035 if rels:
1035 obsolete.createmarkers(repo, rels, operation=operation,
1036 obsolete.createmarkers(repo, rels, operation=operation,
1036 metadata=metadata)
1037 metadata=metadata)
1037 elif phases.supportinternal(repo) and mayusearchived:
1038 elif phases.supportinternal(repo) and mayusearchived:
1038 # this assume we do not have "unstable" nodes above the cleaned ones
1039 # this assume we do not have "unstable" nodes above the cleaned ones
1039 allreplaced = set()
1040 allreplaced = set()
1040 for ns in replacements.keys():
1041 for ns in replacements.keys():
1041 allreplaced.update(ns)
1042 allreplaced.update(ns)
1042 if backup:
1043 if backup:
1043 from . import repair # avoid import cycle
1044 from . import repair # avoid import cycle
1044 node = min(allreplaced, key=repo.changelog.rev)
1045 node = min(allreplaced, key=repo.changelog.rev)
1045 repair.backupbundle(repo, allreplaced, allreplaced, node,
1046 repair.backupbundle(repo, allreplaced, allreplaced, node,
1046 operation)
1047 operation)
1047 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1048 phases.retractboundary(repo, tr, phases.archived, allreplaced)
1048 else:
1049 else:
1049 from . import repair # avoid import cycle
1050 from . import repair # avoid import cycle
1050 tostrip = list(n for ns in replacements for n in ns)
1051 tostrip = list(n for ns in replacements for n in ns)
1051 if tostrip:
1052 if tostrip:
1052 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1053 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1053 backup=backup)
1054 backup=backup)
1054
1055
1055 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1056 def addremove(repo, matcher, prefix, uipathfn, opts=None):
1056 if opts is None:
1057 if opts is None:
1057 opts = {}
1058 opts = {}
1058 m = matcher
1059 m = matcher
1059 dry_run = opts.get('dry_run')
1060 dry_run = opts.get('dry_run')
1060 try:
1061 try:
1061 similarity = float(opts.get('similarity') or 0)
1062 similarity = float(opts.get('similarity') or 0)
1062 except ValueError:
1063 except ValueError:
1063 raise error.Abort(_('similarity must be a number'))
1064 raise error.Abort(_('similarity must be a number'))
1064 if similarity < 0 or similarity > 100:
1065 if similarity < 0 or similarity > 100:
1065 raise error.Abort(_('similarity must be between 0 and 100'))
1066 raise error.Abort(_('similarity must be between 0 and 100'))
1066 similarity /= 100.0
1067 similarity /= 100.0
1067
1068
1068 ret = 0
1069 ret = 0
1069
1070
1070 wctx = repo[None]
1071 wctx = repo[None]
1071 for subpath in sorted(wctx.substate):
1072 for subpath in sorted(wctx.substate):
1072 submatch = matchmod.subdirmatcher(subpath, m)
1073 submatch = matchmod.subdirmatcher(subpath, m)
1073 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1074 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1074 sub = wctx.sub(subpath)
1075 sub = wctx.sub(subpath)
1075 subprefix = repo.wvfs.reljoin(prefix, subpath)
1076 subprefix = repo.wvfs.reljoin(prefix, subpath)
1076 subuipathfn = subdiruipathfn(subpath, uipathfn)
1077 subuipathfn = subdiruipathfn(subpath, uipathfn)
1077 try:
1078 try:
1078 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1079 if sub.addremove(submatch, subprefix, subuipathfn, opts):
1079 ret = 1
1080 ret = 1
1080 except error.LookupError:
1081 except error.LookupError:
1081 repo.ui.status(_("skipping missing subrepository: %s\n")
1082 repo.ui.status(_("skipping missing subrepository: %s\n")
1082 % uipathfn(subpath))
1083 % uipathfn(subpath))
1083
1084
1084 rejected = []
1085 rejected = []
1085 def badfn(f, msg):
1086 def badfn(f, msg):
1086 if f in m.files():
1087 if f in m.files():
1087 m.bad(f, msg)
1088 m.bad(f, msg)
1088 rejected.append(f)
1089 rejected.append(f)
1089
1090
1090 badmatch = matchmod.badmatch(m, badfn)
1091 badmatch = matchmod.badmatch(m, badfn)
1091 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1092 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1092 badmatch)
1093 badmatch)
1093
1094
1094 unknownset = set(unknown + forgotten)
1095 unknownset = set(unknown + forgotten)
1095 toprint = unknownset.copy()
1096 toprint = unknownset.copy()
1096 toprint.update(deleted)
1097 toprint.update(deleted)
1097 for abs in sorted(toprint):
1098 for abs in sorted(toprint):
1098 if repo.ui.verbose or not m.exact(abs):
1099 if repo.ui.verbose or not m.exact(abs):
1099 if abs in unknownset:
1100 if abs in unknownset:
1100 status = _('adding %s\n') % uipathfn(abs)
1101 status = _('adding %s\n') % uipathfn(abs)
1101 label = 'ui.addremove.added'
1102 label = 'ui.addremove.added'
1102 else:
1103 else:
1103 status = _('removing %s\n') % uipathfn(abs)
1104 status = _('removing %s\n') % uipathfn(abs)
1104 label = 'ui.addremove.removed'
1105 label = 'ui.addremove.removed'
1105 repo.ui.status(status, label=label)
1106 repo.ui.status(status, label=label)
1106
1107
1107 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1108 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1108 similarity, uipathfn)
1109 similarity, uipathfn)
1109
1110
1110 if not dry_run:
1111 if not dry_run:
1111 _markchanges(repo, unknown + forgotten, deleted, renames)
1112 _markchanges(repo, unknown + forgotten, deleted, renames)
1112
1113
1113 for f in rejected:
1114 for f in rejected:
1114 if f in m.files():
1115 if f in m.files():
1115 return 1
1116 return 1
1116 return ret
1117 return ret
1117
1118
1118 def marktouched(repo, files, similarity=0.0):
1119 def marktouched(repo, files, similarity=0.0):
1119 '''Assert that files have somehow been operated upon. files are relative to
1120 '''Assert that files have somehow been operated upon. files are relative to
1120 the repo root.'''
1121 the repo root.'''
1121 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1122 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1122 rejected = []
1123 rejected = []
1123
1124
1124 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1125 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1125
1126
1126 if repo.ui.verbose:
1127 if repo.ui.verbose:
1127 unknownset = set(unknown + forgotten)
1128 unknownset = set(unknown + forgotten)
1128 toprint = unknownset.copy()
1129 toprint = unknownset.copy()
1129 toprint.update(deleted)
1130 toprint.update(deleted)
1130 for abs in sorted(toprint):
1131 for abs in sorted(toprint):
1131 if abs in unknownset:
1132 if abs in unknownset:
1132 status = _('adding %s\n') % abs
1133 status = _('adding %s\n') % abs
1133 else:
1134 else:
1134 status = _('removing %s\n') % abs
1135 status = _('removing %s\n') % abs
1135 repo.ui.status(status)
1136 repo.ui.status(status)
1136
1137
1137 # TODO: We should probably have the caller pass in uipathfn and apply it to
1138 # TODO: We should probably have the caller pass in uipathfn and apply it to
1138 # the messages above too. legacyrelativevalue=True is consistent with how
1139 # the messages above too. legacyrelativevalue=True is consistent with how
1139 # it used to work.
1140 # it used to work.
1140 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1141 uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1141 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1142 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1142 similarity, uipathfn)
1143 similarity, uipathfn)
1143
1144
1144 _markchanges(repo, unknown + forgotten, deleted, renames)
1145 _markchanges(repo, unknown + forgotten, deleted, renames)
1145
1146
1146 for f in rejected:
1147 for f in rejected:
1147 if f in m.files():
1148 if f in m.files():
1148 return 1
1149 return 1
1149 return 0
1150 return 0
1150
1151
1151 def _interestingfiles(repo, matcher):
1152 def _interestingfiles(repo, matcher):
1152 '''Walk dirstate with matcher, looking for files that addremove would care
1153 '''Walk dirstate with matcher, looking for files that addremove would care
1153 about.
1154 about.
1154
1155
1155 This is different from dirstate.status because it doesn't care about
1156 This is different from dirstate.status because it doesn't care about
1156 whether files are modified or clean.'''
1157 whether files are modified or clean.'''
1157 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1158 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1158 audit_path = pathutil.pathauditor(repo.root, cached=True)
1159 audit_path = pathutil.pathauditor(repo.root, cached=True)
1159
1160
1160 ctx = repo[None]
1161 ctx = repo[None]
1161 dirstate = repo.dirstate
1162 dirstate = repo.dirstate
1162 matcher = repo.narrowmatch(matcher, includeexact=True)
1163 matcher = repo.narrowmatch(matcher, includeexact=True)
1163 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1164 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1164 unknown=True, ignored=False, full=False)
1165 unknown=True, ignored=False, full=False)
1165 for abs, st in walkresults.iteritems():
1166 for abs, st in walkresults.iteritems():
1166 dstate = dirstate[abs]
1167 dstate = dirstate[abs]
1167 if dstate == '?' and audit_path.check(abs):
1168 if dstate == '?' and audit_path.check(abs):
1168 unknown.append(abs)
1169 unknown.append(abs)
1169 elif dstate != 'r' and not st:
1170 elif dstate != 'r' and not st:
1170 deleted.append(abs)
1171 deleted.append(abs)
1171 elif dstate == 'r' and st:
1172 elif dstate == 'r' and st:
1172 forgotten.append(abs)
1173 forgotten.append(abs)
1173 # for finding renames
1174 # for finding renames
1174 elif dstate == 'r' and not st:
1175 elif dstate == 'r' and not st:
1175 removed.append(abs)
1176 removed.append(abs)
1176 elif dstate == 'a':
1177 elif dstate == 'a':
1177 added.append(abs)
1178 added.append(abs)
1178
1179
1179 return added, unknown, deleted, removed, forgotten
1180 return added, unknown, deleted, removed, forgotten
1180
1181
1181 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1182 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1182 '''Find renames from removed files to added ones.'''
1183 '''Find renames from removed files to added ones.'''
1183 renames = {}
1184 renames = {}
1184 if similarity > 0:
1185 if similarity > 0:
1185 for old, new, score in similar.findrenames(repo, added, removed,
1186 for old, new, score in similar.findrenames(repo, added, removed,
1186 similarity):
1187 similarity):
1187 if (repo.ui.verbose or not matcher.exact(old)
1188 if (repo.ui.verbose or not matcher.exact(old)
1188 or not matcher.exact(new)):
1189 or not matcher.exact(new)):
1189 repo.ui.status(_('recording removal of %s as rename to %s '
1190 repo.ui.status(_('recording removal of %s as rename to %s '
1190 '(%d%% similar)\n') %
1191 '(%d%% similar)\n') %
1191 (uipathfn(old), uipathfn(new),
1192 (uipathfn(old), uipathfn(new),
1192 score * 100))
1193 score * 100))
1193 renames[new] = old
1194 renames[new] = old
1194 return renames
1195 return renames
1195
1196
1196 def _markchanges(repo, unknown, deleted, renames):
1197 def _markchanges(repo, unknown, deleted, renames):
1197 '''Marks the files in unknown as added, the files in deleted as removed,
1198 '''Marks the files in unknown as added, the files in deleted as removed,
1198 and the files in renames as copied.'''
1199 and the files in renames as copied.'''
1199 wctx = repo[None]
1200 wctx = repo[None]
1200 with repo.wlock():
1201 with repo.wlock():
1201 wctx.forget(deleted)
1202 wctx.forget(deleted)
1202 wctx.add(unknown)
1203 wctx.add(unknown)
1203 for new, old in renames.iteritems():
1204 for new, old in renames.iteritems():
1204 wctx.copy(old, new)
1205 wctx.copy(old, new)
1205
1206
1206 def getrenamedfn(repo, endrev=None):
1207 def getrenamedfn(repo, endrev=None):
1207 rcache = {}
1208 rcache = {}
1208 if endrev is None:
1209 if endrev is None:
1209 endrev = len(repo)
1210 endrev = len(repo)
1210
1211
1211 def getrenamed(fn, rev):
1212 def getrenamed(fn, rev):
1212 '''looks up all renames for a file (up to endrev) the first
1213 '''looks up all renames for a file (up to endrev) the first
1213 time the file is given. It indexes on the changerev and only
1214 time the file is given. It indexes on the changerev and only
1214 parses the manifest if linkrev != changerev.
1215 parses the manifest if linkrev != changerev.
1215 Returns rename info for fn at changerev rev.'''
1216 Returns rename info for fn at changerev rev.'''
1216 if fn not in rcache:
1217 if fn not in rcache:
1217 rcache[fn] = {}
1218 rcache[fn] = {}
1218 fl = repo.file(fn)
1219 fl = repo.file(fn)
1219 for i in fl:
1220 for i in fl:
1220 lr = fl.linkrev(i)
1221 lr = fl.linkrev(i)
1221 renamed = fl.renamed(fl.node(i))
1222 renamed = fl.renamed(fl.node(i))
1222 rcache[fn][lr] = renamed and renamed[0]
1223 rcache[fn][lr] = renamed and renamed[0]
1223 if lr >= endrev:
1224 if lr >= endrev:
1224 break
1225 break
1225 if rev in rcache[fn]:
1226 if rev in rcache[fn]:
1226 return rcache[fn][rev]
1227 return rcache[fn][rev]
1227
1228
1228 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1229 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1229 # filectx logic.
1230 # filectx logic.
1230 try:
1231 try:
1231 return repo[rev][fn].copysource()
1232 return repo[rev][fn].copysource()
1232 except error.LookupError:
1233 except error.LookupError:
1233 return None
1234 return None
1234
1235
1235 return getrenamed
1236 return getrenamed
1236
1237
1237 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1238 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1238 """Update the dirstate to reflect the intent of copying src to dst. For
1239 """Update the dirstate to reflect the intent of copying src to dst. For
1239 different reasons it might not end with dst being marked as copied from src.
1240 different reasons it might not end with dst being marked as copied from src.
1240 """
1241 """
1241 origsrc = repo.dirstate.copied(src) or src
1242 origsrc = repo.dirstate.copied(src) or src
1242 if dst == origsrc: # copying back a copy?
1243 if dst == origsrc: # copying back a copy?
1243 if repo.dirstate[dst] not in 'mn' and not dryrun:
1244 if repo.dirstate[dst] not in 'mn' and not dryrun:
1244 repo.dirstate.normallookup(dst)
1245 repo.dirstate.normallookup(dst)
1245 else:
1246 else:
1246 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1247 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1247 if not ui.quiet:
1248 if not ui.quiet:
1248 ui.warn(_("%s has not been committed yet, so no copy "
1249 ui.warn(_("%s has not been committed yet, so no copy "
1249 "data will be stored for %s.\n")
1250 "data will be stored for %s.\n")
1250 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1251 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1251 if repo.dirstate[dst] in '?r' and not dryrun:
1252 if repo.dirstate[dst] in '?r' and not dryrun:
1252 wctx.add([dst])
1253 wctx.add([dst])
1253 elif not dryrun:
1254 elif not dryrun:
1254 wctx.copy(origsrc, dst)
1255 wctx.copy(origsrc, dst)
1255
1256
1257 def movedirstate(repo, newctx, match=None):
1258 """Move the dirstate to newctx and adjust it as necessary."""
1259 oldctx = repo['.']
1260 ds = repo.dirstate
1261 ds.setparents(newctx.node(), nullid)
1262 copies = dict(ds.copies())
1263 s = newctx.status(oldctx, match=match)
1264 for f in s.modified:
1265 if ds[f] == 'r':
1266 # modified + removed -> removed
1267 continue
1268 ds.normallookup(f)
1269
1270 for f in s.added:
1271 if ds[f] == 'r':
1272 # added + removed -> unknown
1273 ds.drop(f)
1274 elif ds[f] != 'a':
1275 ds.add(f)
1276
1277 for f in s.removed:
1278 if ds[f] == 'a':
1279 # removed + added -> normal
1280 ds.normallookup(f)
1281 elif ds[f] != 'r':
1282 ds.remove(f)
1283
1284 # Merge old parent and old working dir copies
1285 oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1286 oldcopies.update(copies)
1287 copies = dict((dst, oldcopies.get(src, src))
1288 for dst, src in oldcopies.iteritems())
1289 # Adjust the dirstate copies
1290 for dst, src in copies.iteritems():
1291 if (src not in newctx or dst in newctx or ds[dst] != 'a'):
1292 src = None
1293 ds.copy(src, dst)
1294
1256 def writerequires(opener, requirements):
1295 def writerequires(opener, requirements):
1257 with opener('requires', 'w', atomictemp=True) as fp:
1296 with opener('requires', 'w', atomictemp=True) as fp:
1258 for r in sorted(requirements):
1297 for r in sorted(requirements):
1259 fp.write("%s\n" % r)
1298 fp.write("%s\n" % r)
1260
1299
1261 class filecachesubentry(object):
1300 class filecachesubentry(object):
1262 def __init__(self, path, stat):
1301 def __init__(self, path, stat):
1263 self.path = path
1302 self.path = path
1264 self.cachestat = None
1303 self.cachestat = None
1265 self._cacheable = None
1304 self._cacheable = None
1266
1305
1267 if stat:
1306 if stat:
1268 self.cachestat = filecachesubentry.stat(self.path)
1307 self.cachestat = filecachesubentry.stat(self.path)
1269
1308
1270 if self.cachestat:
1309 if self.cachestat:
1271 self._cacheable = self.cachestat.cacheable()
1310 self._cacheable = self.cachestat.cacheable()
1272 else:
1311 else:
1273 # None means we don't know yet
1312 # None means we don't know yet
1274 self._cacheable = None
1313 self._cacheable = None
1275
1314
1276 def refresh(self):
1315 def refresh(self):
1277 if self.cacheable():
1316 if self.cacheable():
1278 self.cachestat = filecachesubentry.stat(self.path)
1317 self.cachestat = filecachesubentry.stat(self.path)
1279
1318
1280 def cacheable(self):
1319 def cacheable(self):
1281 if self._cacheable is not None:
1320 if self._cacheable is not None:
1282 return self._cacheable
1321 return self._cacheable
1283
1322
1284 # we don't know yet, assume it is for now
1323 # we don't know yet, assume it is for now
1285 return True
1324 return True
1286
1325
1287 def changed(self):
1326 def changed(self):
1288 # no point in going further if we can't cache it
1327 # no point in going further if we can't cache it
1289 if not self.cacheable():
1328 if not self.cacheable():
1290 return True
1329 return True
1291
1330
1292 newstat = filecachesubentry.stat(self.path)
1331 newstat = filecachesubentry.stat(self.path)
1293
1332
1294 # we may not know if it's cacheable yet, check again now
1333 # we may not know if it's cacheable yet, check again now
1295 if newstat and self._cacheable is None:
1334 if newstat and self._cacheable is None:
1296 self._cacheable = newstat.cacheable()
1335 self._cacheable = newstat.cacheable()
1297
1336
1298 # check again
1337 # check again
1299 if not self._cacheable:
1338 if not self._cacheable:
1300 return True
1339 return True
1301
1340
1302 if self.cachestat != newstat:
1341 if self.cachestat != newstat:
1303 self.cachestat = newstat
1342 self.cachestat = newstat
1304 return True
1343 return True
1305 else:
1344 else:
1306 return False
1345 return False
1307
1346
1308 @staticmethod
1347 @staticmethod
1309 def stat(path):
1348 def stat(path):
1310 try:
1349 try:
1311 return util.cachestat(path)
1350 return util.cachestat(path)
1312 except OSError as e:
1351 except OSError as e:
1313 if e.errno != errno.ENOENT:
1352 if e.errno != errno.ENOENT:
1314 raise
1353 raise
1315
1354
1316 class filecacheentry(object):
1355 class filecacheentry(object):
1317 def __init__(self, paths, stat=True):
1356 def __init__(self, paths, stat=True):
1318 self._entries = []
1357 self._entries = []
1319 for path in paths:
1358 for path in paths:
1320 self._entries.append(filecachesubentry(path, stat))
1359 self._entries.append(filecachesubentry(path, stat))
1321
1360
1322 def changed(self):
1361 def changed(self):
1323 '''true if any entry has changed'''
1362 '''true if any entry has changed'''
1324 for entry in self._entries:
1363 for entry in self._entries:
1325 if entry.changed():
1364 if entry.changed():
1326 return True
1365 return True
1327 return False
1366 return False
1328
1367
1329 def refresh(self):
1368 def refresh(self):
1330 for entry in self._entries:
1369 for entry in self._entries:
1331 entry.refresh()
1370 entry.refresh()
1332
1371
1333 class filecache(object):
1372 class filecache(object):
1334 """A property like decorator that tracks files under .hg/ for updates.
1373 """A property like decorator that tracks files under .hg/ for updates.
1335
1374
1336 On first access, the files defined as arguments are stat()ed and the
1375 On first access, the files defined as arguments are stat()ed and the
1337 results cached. The decorated function is called. The results are stashed
1376 results cached. The decorated function is called. The results are stashed
1338 away in a ``_filecache`` dict on the object whose method is decorated.
1377 away in a ``_filecache`` dict on the object whose method is decorated.
1339
1378
1340 On subsequent access, the cached result is used as it is set to the
1379 On subsequent access, the cached result is used as it is set to the
1341 instance dictionary.
1380 instance dictionary.
1342
1381
1343 On external property set/delete operations, the caller must update the
1382 On external property set/delete operations, the caller must update the
1344 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1383 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1345 instead of directly setting <attr>.
1384 instead of directly setting <attr>.
1346
1385
1347 When using the property API, the cached data is always used if available.
1386 When using the property API, the cached data is always used if available.
1348 No stat() is performed to check if the file has changed.
1387 No stat() is performed to check if the file has changed.
1349
1388
1350 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1389 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1351 can populate an entry before the property's getter is called. In this case,
1390 can populate an entry before the property's getter is called. In this case,
1352 entries in ``_filecache`` will be used during property operations,
1391 entries in ``_filecache`` will be used during property operations,
1353 if available. If the underlying file changes, it is up to external callers
1392 if available. If the underlying file changes, it is up to external callers
1354 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1393 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1355 method result as well as possibly calling ``del obj._filecache[attr]`` to
1394 method result as well as possibly calling ``del obj._filecache[attr]`` to
1356 remove the ``filecacheentry``.
1395 remove the ``filecacheentry``.
1357 """
1396 """
1358
1397
1359 def __init__(self, *paths):
1398 def __init__(self, *paths):
1360 self.paths = paths
1399 self.paths = paths
1361
1400
1362 def join(self, obj, fname):
1401 def join(self, obj, fname):
1363 """Used to compute the runtime path of a cached file.
1402 """Used to compute the runtime path of a cached file.
1364
1403
1365 Users should subclass filecache and provide their own version of this
1404 Users should subclass filecache and provide their own version of this
1366 function to call the appropriate join function on 'obj' (an instance
1405 function to call the appropriate join function on 'obj' (an instance
1367 of the class that its member function was decorated).
1406 of the class that its member function was decorated).
1368 """
1407 """
1369 raise NotImplementedError
1408 raise NotImplementedError
1370
1409
1371 def __call__(self, func):
1410 def __call__(self, func):
1372 self.func = func
1411 self.func = func
1373 self.sname = func.__name__
1412 self.sname = func.__name__
1374 self.name = pycompat.sysbytes(self.sname)
1413 self.name = pycompat.sysbytes(self.sname)
1375 return self
1414 return self
1376
1415
1377 def __get__(self, obj, type=None):
1416 def __get__(self, obj, type=None):
1378 # if accessed on the class, return the descriptor itself.
1417 # if accessed on the class, return the descriptor itself.
1379 if obj is None:
1418 if obj is None:
1380 return self
1419 return self
1381
1420
1382 assert self.sname not in obj.__dict__
1421 assert self.sname not in obj.__dict__
1383
1422
1384 entry = obj._filecache.get(self.name)
1423 entry = obj._filecache.get(self.name)
1385
1424
1386 if entry:
1425 if entry:
1387 if entry.changed():
1426 if entry.changed():
1388 entry.obj = self.func(obj)
1427 entry.obj = self.func(obj)
1389 else:
1428 else:
1390 paths = [self.join(obj, path) for path in self.paths]
1429 paths = [self.join(obj, path) for path in self.paths]
1391
1430
1392 # We stat -before- creating the object so our cache doesn't lie if
1431 # We stat -before- creating the object so our cache doesn't lie if
1393 # a writer modified between the time we read and stat
1432 # a writer modified between the time we read and stat
1394 entry = filecacheentry(paths, True)
1433 entry = filecacheentry(paths, True)
1395 entry.obj = self.func(obj)
1434 entry.obj = self.func(obj)
1396
1435
1397 obj._filecache[self.name] = entry
1436 obj._filecache[self.name] = entry
1398
1437
1399 obj.__dict__[self.sname] = entry.obj
1438 obj.__dict__[self.sname] = entry.obj
1400 return entry.obj
1439 return entry.obj
1401
1440
1402 # don't implement __set__(), which would make __dict__ lookup as slow as
1441 # don't implement __set__(), which would make __dict__ lookup as slow as
1403 # function call.
1442 # function call.
1404
1443
1405 def set(self, obj, value):
1444 def set(self, obj, value):
1406 if self.name not in obj._filecache:
1445 if self.name not in obj._filecache:
1407 # we add an entry for the missing value because X in __dict__
1446 # we add an entry for the missing value because X in __dict__
1408 # implies X in _filecache
1447 # implies X in _filecache
1409 paths = [self.join(obj, path) for path in self.paths]
1448 paths = [self.join(obj, path) for path in self.paths]
1410 ce = filecacheentry(paths, False)
1449 ce = filecacheentry(paths, False)
1411 obj._filecache[self.name] = ce
1450 obj._filecache[self.name] = ce
1412 else:
1451 else:
1413 ce = obj._filecache[self.name]
1452 ce = obj._filecache[self.name]
1414
1453
1415 ce.obj = value # update cached copy
1454 ce.obj = value # update cached copy
1416 obj.__dict__[self.sname] = value # update copy returned by obj.x
1455 obj.__dict__[self.sname] = value # update copy returned by obj.x
1417
1456
1418 def extdatasource(repo, source):
1457 def extdatasource(repo, source):
1419 """Gather a map of rev -> value dict from the specified source
1458 """Gather a map of rev -> value dict from the specified source
1420
1459
1421 A source spec is treated as a URL, with a special case shell: type
1460 A source spec is treated as a URL, with a special case shell: type
1422 for parsing the output from a shell command.
1461 for parsing the output from a shell command.
1423
1462
1424 The data is parsed as a series of newline-separated records where
1463 The data is parsed as a series of newline-separated records where
1425 each record is a revision specifier optionally followed by a space
1464 each record is a revision specifier optionally followed by a space
1426 and a freeform string value. If the revision is known locally, it
1465 and a freeform string value. If the revision is known locally, it
1427 is converted to a rev, otherwise the record is skipped.
1466 is converted to a rev, otherwise the record is skipped.
1428
1467
1429 Note that both key and value are treated as UTF-8 and converted to
1468 Note that both key and value are treated as UTF-8 and converted to
1430 the local encoding. This allows uniformity between local and
1469 the local encoding. This allows uniformity between local and
1431 remote data sources.
1470 remote data sources.
1432 """
1471 """
1433
1472
1434 spec = repo.ui.config("extdata", source)
1473 spec = repo.ui.config("extdata", source)
1435 if not spec:
1474 if not spec:
1436 raise error.Abort(_("unknown extdata source '%s'") % source)
1475 raise error.Abort(_("unknown extdata source '%s'") % source)
1437
1476
1438 data = {}
1477 data = {}
1439 src = proc = None
1478 src = proc = None
1440 try:
1479 try:
1441 if spec.startswith("shell:"):
1480 if spec.startswith("shell:"):
1442 # external commands should be run relative to the repo root
1481 # external commands should be run relative to the repo root
1443 cmd = spec[6:]
1482 cmd = spec[6:]
1444 proc = subprocess.Popen(procutil.tonativestr(cmd),
1483 proc = subprocess.Popen(procutil.tonativestr(cmd),
1445 shell=True, bufsize=-1,
1484 shell=True, bufsize=-1,
1446 close_fds=procutil.closefds,
1485 close_fds=procutil.closefds,
1447 stdout=subprocess.PIPE,
1486 stdout=subprocess.PIPE,
1448 cwd=procutil.tonativestr(repo.root))
1487 cwd=procutil.tonativestr(repo.root))
1449 src = proc.stdout
1488 src = proc.stdout
1450 else:
1489 else:
1451 # treat as a URL or file
1490 # treat as a URL or file
1452 src = url.open(repo.ui, spec)
1491 src = url.open(repo.ui, spec)
1453 for l in src:
1492 for l in src:
1454 if " " in l:
1493 if " " in l:
1455 k, v = l.strip().split(" ", 1)
1494 k, v = l.strip().split(" ", 1)
1456 else:
1495 else:
1457 k, v = l.strip(), ""
1496 k, v = l.strip(), ""
1458
1497
1459 k = encoding.tolocal(k)
1498 k = encoding.tolocal(k)
1460 try:
1499 try:
1461 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1500 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1462 except (error.LookupError, error.RepoLookupError):
1501 except (error.LookupError, error.RepoLookupError):
1463 pass # we ignore data for nodes that don't exist locally
1502 pass # we ignore data for nodes that don't exist locally
1464 finally:
1503 finally:
1465 if proc:
1504 if proc:
1466 proc.communicate()
1505 proc.communicate()
1467 if src:
1506 if src:
1468 src.close()
1507 src.close()
1469 if proc and proc.returncode != 0:
1508 if proc and proc.returncode != 0:
1470 raise error.Abort(_("extdata command '%s' failed: %s")
1509 raise error.Abort(_("extdata command '%s' failed: %s")
1471 % (cmd, procutil.explainexit(proc.returncode)))
1510 % (cmd, procutil.explainexit(proc.returncode)))
1472
1511
1473 return data
1512 return data
1474
1513
1475 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1514 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1476 if lock is None:
1515 if lock is None:
1477 raise error.LockInheritanceContractViolation(
1516 raise error.LockInheritanceContractViolation(
1478 'lock can only be inherited while held')
1517 'lock can only be inherited while held')
1479 if environ is None:
1518 if environ is None:
1480 environ = {}
1519 environ = {}
1481 with lock.inherit() as locker:
1520 with lock.inherit() as locker:
1482 environ[envvar] = locker
1521 environ[envvar] = locker
1483 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1522 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1484
1523
1485 def wlocksub(repo, cmd, *args, **kwargs):
1524 def wlocksub(repo, cmd, *args, **kwargs):
1486 """run cmd as a subprocess that allows inheriting repo's wlock
1525 """run cmd as a subprocess that allows inheriting repo's wlock
1487
1526
1488 This can only be called while the wlock is held. This takes all the
1527 This can only be called while the wlock is held. This takes all the
1489 arguments that ui.system does, and returns the exit code of the
1528 arguments that ui.system does, and returns the exit code of the
1490 subprocess."""
1529 subprocess."""
1491 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1530 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1492 **kwargs)
1531 **kwargs)
1493
1532
1494 class progress(object):
1533 class progress(object):
1495 def __init__(self, ui, updatebar, topic, unit="", total=None):
1534 def __init__(self, ui, updatebar, topic, unit="", total=None):
1496 self.ui = ui
1535 self.ui = ui
1497 self.pos = 0
1536 self.pos = 0
1498 self.topic = topic
1537 self.topic = topic
1499 self.unit = unit
1538 self.unit = unit
1500 self.total = total
1539 self.total = total
1501 self.debug = ui.configbool('progress', 'debug')
1540 self.debug = ui.configbool('progress', 'debug')
1502 self._updatebar = updatebar
1541 self._updatebar = updatebar
1503
1542
1504 def __enter__(self):
1543 def __enter__(self):
1505 return self
1544 return self
1506
1545
1507 def __exit__(self, exc_type, exc_value, exc_tb):
1546 def __exit__(self, exc_type, exc_value, exc_tb):
1508 self.complete()
1547 self.complete()
1509
1548
1510 def update(self, pos, item="", total=None):
1549 def update(self, pos, item="", total=None):
1511 assert pos is not None
1550 assert pos is not None
1512 if total:
1551 if total:
1513 self.total = total
1552 self.total = total
1514 self.pos = pos
1553 self.pos = pos
1515 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1554 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1516 if self.debug:
1555 if self.debug:
1517 self._printdebug(item)
1556 self._printdebug(item)
1518
1557
1519 def increment(self, step=1, item="", total=None):
1558 def increment(self, step=1, item="", total=None):
1520 self.update(self.pos + step, item, total)
1559 self.update(self.pos + step, item, total)
1521
1560
1522 def complete(self):
1561 def complete(self):
1523 self.pos = None
1562 self.pos = None
1524 self.unit = ""
1563 self.unit = ""
1525 self.total = None
1564 self.total = None
1526 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1565 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1527
1566
1528 def _printdebug(self, item):
1567 def _printdebug(self, item):
1529 if self.unit:
1568 if self.unit:
1530 unit = ' ' + self.unit
1569 unit = ' ' + self.unit
1531 if item:
1570 if item:
1532 item = ' ' + item
1571 item = ' ' + item
1533
1572
1534 if self.total:
1573 if self.total:
1535 pct = 100.0 * self.pos / self.total
1574 pct = 100.0 * self.pos / self.total
1536 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1575 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1537 % (self.topic, item, self.pos, self.total, unit, pct))
1576 % (self.topic, item, self.pos, self.total, unit, pct))
1538 else:
1577 else:
1539 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1578 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1540
1579
1541 def gdinitconfig(ui):
1580 def gdinitconfig(ui):
1542 """helper function to know if a repo should be created as general delta
1581 """helper function to know if a repo should be created as general delta
1543 """
1582 """
1544 # experimental config: format.generaldelta
1583 # experimental config: format.generaldelta
1545 return (ui.configbool('format', 'generaldelta')
1584 return (ui.configbool('format', 'generaldelta')
1546 or ui.configbool('format', 'usegeneraldelta'))
1585 or ui.configbool('format', 'usegeneraldelta'))
1547
1586
1548 def gddeltaconfig(ui):
1587 def gddeltaconfig(ui):
1549 """helper function to know if incoming delta should be optimised
1588 """helper function to know if incoming delta should be optimised
1550 """
1589 """
1551 # experimental config: format.generaldelta
1590 # experimental config: format.generaldelta
1552 return ui.configbool('format', 'generaldelta')
1591 return ui.configbool('format', 'generaldelta')
1553
1592
1554 class simplekeyvaluefile(object):
1593 class simplekeyvaluefile(object):
1555 """A simple file with key=value lines
1594 """A simple file with key=value lines
1556
1595
1557 Keys must be alphanumerics and start with a letter, values must not
1596 Keys must be alphanumerics and start with a letter, values must not
1558 contain '\n' characters"""
1597 contain '\n' characters"""
1559 firstlinekey = '__firstline'
1598 firstlinekey = '__firstline'
1560
1599
1561 def __init__(self, vfs, path, keys=None):
1600 def __init__(self, vfs, path, keys=None):
1562 self.vfs = vfs
1601 self.vfs = vfs
1563 self.path = path
1602 self.path = path
1564
1603
1565 def read(self, firstlinenonkeyval=False):
1604 def read(self, firstlinenonkeyval=False):
1566 """Read the contents of a simple key-value file
1605 """Read the contents of a simple key-value file
1567
1606
1568 'firstlinenonkeyval' indicates whether the first line of file should
1607 'firstlinenonkeyval' indicates whether the first line of file should
1569 be treated as a key-value pair or reuturned fully under the
1608 be treated as a key-value pair or reuturned fully under the
1570 __firstline key."""
1609 __firstline key."""
1571 lines = self.vfs.readlines(self.path)
1610 lines = self.vfs.readlines(self.path)
1572 d = {}
1611 d = {}
1573 if firstlinenonkeyval:
1612 if firstlinenonkeyval:
1574 if not lines:
1613 if not lines:
1575 e = _("empty simplekeyvalue file")
1614 e = _("empty simplekeyvalue file")
1576 raise error.CorruptedState(e)
1615 raise error.CorruptedState(e)
1577 # we don't want to include '\n' in the __firstline
1616 # we don't want to include '\n' in the __firstline
1578 d[self.firstlinekey] = lines[0][:-1]
1617 d[self.firstlinekey] = lines[0][:-1]
1579 del lines[0]
1618 del lines[0]
1580
1619
1581 try:
1620 try:
1582 # the 'if line.strip()' part prevents us from failing on empty
1621 # the 'if line.strip()' part prevents us from failing on empty
1583 # lines which only contain '\n' therefore are not skipped
1622 # lines which only contain '\n' therefore are not skipped
1584 # by 'if line'
1623 # by 'if line'
1585 updatedict = dict(line[:-1].split('=', 1) for line in lines
1624 updatedict = dict(line[:-1].split('=', 1) for line in lines
1586 if line.strip())
1625 if line.strip())
1587 if self.firstlinekey in updatedict:
1626 if self.firstlinekey in updatedict:
1588 e = _("%r can't be used as a key")
1627 e = _("%r can't be used as a key")
1589 raise error.CorruptedState(e % self.firstlinekey)
1628 raise error.CorruptedState(e % self.firstlinekey)
1590 d.update(updatedict)
1629 d.update(updatedict)
1591 except ValueError as e:
1630 except ValueError as e:
1592 raise error.CorruptedState(str(e))
1631 raise error.CorruptedState(str(e))
1593 return d
1632 return d
1594
1633
1595 def write(self, data, firstline=None):
1634 def write(self, data, firstline=None):
1596 """Write key=>value mapping to a file
1635 """Write key=>value mapping to a file
1597 data is a dict. Keys must be alphanumerical and start with a letter.
1636 data is a dict. Keys must be alphanumerical and start with a letter.
1598 Values must not contain newline characters.
1637 Values must not contain newline characters.
1599
1638
1600 If 'firstline' is not None, it is written to file before
1639 If 'firstline' is not None, it is written to file before
1601 everything else, as it is, not in a key=value form"""
1640 everything else, as it is, not in a key=value form"""
1602 lines = []
1641 lines = []
1603 if firstline is not None:
1642 if firstline is not None:
1604 lines.append('%s\n' % firstline)
1643 lines.append('%s\n' % firstline)
1605
1644
1606 for k, v in data.items():
1645 for k, v in data.items():
1607 if k == self.firstlinekey:
1646 if k == self.firstlinekey:
1608 e = "key name '%s' is reserved" % self.firstlinekey
1647 e = "key name '%s' is reserved" % self.firstlinekey
1609 raise error.ProgrammingError(e)
1648 raise error.ProgrammingError(e)
1610 if not k[0:1].isalpha():
1649 if not k[0:1].isalpha():
1611 e = "keys must start with a letter in a key-value file"
1650 e = "keys must start with a letter in a key-value file"
1612 raise error.ProgrammingError(e)
1651 raise error.ProgrammingError(e)
1613 if not k.isalnum():
1652 if not k.isalnum():
1614 e = "invalid key name in a simple key-value file"
1653 e = "invalid key name in a simple key-value file"
1615 raise error.ProgrammingError(e)
1654 raise error.ProgrammingError(e)
1616 if '\n' in v:
1655 if '\n' in v:
1617 e = "invalid value in a simple key-value file"
1656 e = "invalid value in a simple key-value file"
1618 raise error.ProgrammingError(e)
1657 raise error.ProgrammingError(e)
1619 lines.append("%s=%s\n" % (k, v))
1658 lines.append("%s=%s\n" % (k, v))
1620 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1659 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1621 fp.write(''.join(lines))
1660 fp.write(''.join(lines))
1622
1661
1623 _reportobsoletedsource = [
1662 _reportobsoletedsource = [
1624 'debugobsolete',
1663 'debugobsolete',
1625 'pull',
1664 'pull',
1626 'push',
1665 'push',
1627 'serve',
1666 'serve',
1628 'unbundle',
1667 'unbundle',
1629 ]
1668 ]
1630
1669
1631 _reportnewcssource = [
1670 _reportnewcssource = [
1632 'pull',
1671 'pull',
1633 'unbundle',
1672 'unbundle',
1634 ]
1673 ]
1635
1674
1636 def prefetchfiles(repo, revs, match):
1675 def prefetchfiles(repo, revs, match):
1637 """Invokes the registered file prefetch functions, allowing extensions to
1676 """Invokes the registered file prefetch functions, allowing extensions to
1638 ensure the corresponding files are available locally, before the command
1677 ensure the corresponding files are available locally, before the command
1639 uses them."""
1678 uses them."""
1640 if match:
1679 if match:
1641 # The command itself will complain about files that don't exist, so
1680 # The command itself will complain about files that don't exist, so
1642 # don't duplicate the message.
1681 # don't duplicate the message.
1643 match = matchmod.badmatch(match, lambda fn, msg: None)
1682 match = matchmod.badmatch(match, lambda fn, msg: None)
1644 else:
1683 else:
1645 match = matchall(repo)
1684 match = matchall(repo)
1646
1685
1647 fileprefetchhooks(repo, revs, match)
1686 fileprefetchhooks(repo, revs, match)
1648
1687
1649 # a list of (repo, revs, match) prefetch functions
1688 # a list of (repo, revs, match) prefetch functions
1650 fileprefetchhooks = util.hooks()
1689 fileprefetchhooks = util.hooks()
1651
1690
1652 # A marker that tells the evolve extension to suppress its own reporting
1691 # A marker that tells the evolve extension to suppress its own reporting
1653 _reportstroubledchangesets = True
1692 _reportstroubledchangesets = True
1654
1693
1655 def registersummarycallback(repo, otr, txnname=''):
1694 def registersummarycallback(repo, otr, txnname=''):
1656 """register a callback to issue a summary after the transaction is closed
1695 """register a callback to issue a summary after the transaction is closed
1657 """
1696 """
1658 def txmatch(sources):
1697 def txmatch(sources):
1659 return any(txnname.startswith(source) for source in sources)
1698 return any(txnname.startswith(source) for source in sources)
1660
1699
1661 categories = []
1700 categories = []
1662
1701
1663 def reportsummary(func):
1702 def reportsummary(func):
1664 """decorator for report callbacks."""
1703 """decorator for report callbacks."""
1665 # The repoview life cycle is shorter than the one of the actual
1704 # The repoview life cycle is shorter than the one of the actual
1666 # underlying repository. So the filtered object can die before the
1705 # underlying repository. So the filtered object can die before the
1667 # weakref is used leading to troubles. We keep a reference to the
1706 # weakref is used leading to troubles. We keep a reference to the
1668 # unfiltered object and restore the filtering when retrieving the
1707 # unfiltered object and restore the filtering when retrieving the
1669 # repository through the weakref.
1708 # repository through the weakref.
1670 filtername = repo.filtername
1709 filtername = repo.filtername
1671 reporef = weakref.ref(repo.unfiltered())
1710 reporef = weakref.ref(repo.unfiltered())
1672 def wrapped(tr):
1711 def wrapped(tr):
1673 repo = reporef()
1712 repo = reporef()
1674 if filtername:
1713 if filtername:
1675 repo = repo.filtered(filtername)
1714 repo = repo.filtered(filtername)
1676 func(repo, tr)
1715 func(repo, tr)
1677 newcat = '%02i-txnreport' % len(categories)
1716 newcat = '%02i-txnreport' % len(categories)
1678 otr.addpostclose(newcat, wrapped)
1717 otr.addpostclose(newcat, wrapped)
1679 categories.append(newcat)
1718 categories.append(newcat)
1680 return wrapped
1719 return wrapped
1681
1720
1682 if txmatch(_reportobsoletedsource):
1721 if txmatch(_reportobsoletedsource):
1683 @reportsummary
1722 @reportsummary
1684 def reportobsoleted(repo, tr):
1723 def reportobsoleted(repo, tr):
1685 obsoleted = obsutil.getobsoleted(repo, tr)
1724 obsoleted = obsutil.getobsoleted(repo, tr)
1686 if obsoleted:
1725 if obsoleted:
1687 repo.ui.status(_('obsoleted %i changesets\n')
1726 repo.ui.status(_('obsoleted %i changesets\n')
1688 % len(obsoleted))
1727 % len(obsoleted))
1689
1728
1690 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1729 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1691 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1730 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1692 instabilitytypes = [
1731 instabilitytypes = [
1693 ('orphan', 'orphan'),
1732 ('orphan', 'orphan'),
1694 ('phase-divergent', 'phasedivergent'),
1733 ('phase-divergent', 'phasedivergent'),
1695 ('content-divergent', 'contentdivergent'),
1734 ('content-divergent', 'contentdivergent'),
1696 ]
1735 ]
1697
1736
1698 def getinstabilitycounts(repo):
1737 def getinstabilitycounts(repo):
1699 filtered = repo.changelog.filteredrevs
1738 filtered = repo.changelog.filteredrevs
1700 counts = {}
1739 counts = {}
1701 for instability, revset in instabilitytypes:
1740 for instability, revset in instabilitytypes:
1702 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1741 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1703 filtered)
1742 filtered)
1704 return counts
1743 return counts
1705
1744
1706 oldinstabilitycounts = getinstabilitycounts(repo)
1745 oldinstabilitycounts = getinstabilitycounts(repo)
1707 @reportsummary
1746 @reportsummary
1708 def reportnewinstabilities(repo, tr):
1747 def reportnewinstabilities(repo, tr):
1709 newinstabilitycounts = getinstabilitycounts(repo)
1748 newinstabilitycounts = getinstabilitycounts(repo)
1710 for instability, revset in instabilitytypes:
1749 for instability, revset in instabilitytypes:
1711 delta = (newinstabilitycounts[instability] -
1750 delta = (newinstabilitycounts[instability] -
1712 oldinstabilitycounts[instability])
1751 oldinstabilitycounts[instability])
1713 msg = getinstabilitymessage(delta, instability)
1752 msg = getinstabilitymessage(delta, instability)
1714 if msg:
1753 if msg:
1715 repo.ui.warn(msg)
1754 repo.ui.warn(msg)
1716
1755
1717 if txmatch(_reportnewcssource):
1756 if txmatch(_reportnewcssource):
1718 @reportsummary
1757 @reportsummary
1719 def reportnewcs(repo, tr):
1758 def reportnewcs(repo, tr):
1720 """Report the range of new revisions pulled/unbundled."""
1759 """Report the range of new revisions pulled/unbundled."""
1721 origrepolen = tr.changes.get('origrepolen', len(repo))
1760 origrepolen = tr.changes.get('origrepolen', len(repo))
1722 unfi = repo.unfiltered()
1761 unfi = repo.unfiltered()
1723 if origrepolen >= len(unfi):
1762 if origrepolen >= len(unfi):
1724 return
1763 return
1725
1764
1726 # Compute the bounds of new visible revisions' range.
1765 # Compute the bounds of new visible revisions' range.
1727 revs = smartset.spanset(repo, start=origrepolen)
1766 revs = smartset.spanset(repo, start=origrepolen)
1728 if revs:
1767 if revs:
1729 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1768 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1730
1769
1731 if minrev == maxrev:
1770 if minrev == maxrev:
1732 revrange = minrev
1771 revrange = minrev
1733 else:
1772 else:
1734 revrange = '%s:%s' % (minrev, maxrev)
1773 revrange = '%s:%s' % (minrev, maxrev)
1735 draft = len(repo.revs('%ld and draft()', revs))
1774 draft = len(repo.revs('%ld and draft()', revs))
1736 secret = len(repo.revs('%ld and secret()', revs))
1775 secret = len(repo.revs('%ld and secret()', revs))
1737 if not (draft or secret):
1776 if not (draft or secret):
1738 msg = _('new changesets %s\n') % revrange
1777 msg = _('new changesets %s\n') % revrange
1739 elif draft and secret:
1778 elif draft and secret:
1740 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1779 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1741 msg %= (revrange, draft, secret)
1780 msg %= (revrange, draft, secret)
1742 elif draft:
1781 elif draft:
1743 msg = _('new changesets %s (%d drafts)\n')
1782 msg = _('new changesets %s (%d drafts)\n')
1744 msg %= (revrange, draft)
1783 msg %= (revrange, draft)
1745 elif secret:
1784 elif secret:
1746 msg = _('new changesets %s (%d secrets)\n')
1785 msg = _('new changesets %s (%d secrets)\n')
1747 msg %= (revrange, secret)
1786 msg %= (revrange, secret)
1748 else:
1787 else:
1749 errormsg = 'entered unreachable condition'
1788 errormsg = 'entered unreachable condition'
1750 raise error.ProgrammingError(errormsg)
1789 raise error.ProgrammingError(errormsg)
1751 repo.ui.status(msg)
1790 repo.ui.status(msg)
1752
1791
1753 # search new changesets directly pulled as obsolete
1792 # search new changesets directly pulled as obsolete
1754 duplicates = tr.changes.get('revduplicates', ())
1793 duplicates = tr.changes.get('revduplicates', ())
1755 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1794 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1756 origrepolen, duplicates)
1795 origrepolen, duplicates)
1757 cl = repo.changelog
1796 cl = repo.changelog
1758 extinctadded = [r for r in obsadded if r not in cl]
1797 extinctadded = [r for r in obsadded if r not in cl]
1759 if extinctadded:
1798 if extinctadded:
1760 # They are not just obsolete, but obsolete and invisible
1799 # They are not just obsolete, but obsolete and invisible
1761 # we call them "extinct" internally but the terms have not been
1800 # we call them "extinct" internally but the terms have not been
1762 # exposed to users.
1801 # exposed to users.
1763 msg = '(%d other changesets obsolete on arrival)\n'
1802 msg = '(%d other changesets obsolete on arrival)\n'
1764 repo.ui.status(msg % len(extinctadded))
1803 repo.ui.status(msg % len(extinctadded))
1765
1804
1766 @reportsummary
1805 @reportsummary
1767 def reportphasechanges(repo, tr):
1806 def reportphasechanges(repo, tr):
1768 """Report statistics of phase changes for changesets pre-existing
1807 """Report statistics of phase changes for changesets pre-existing
1769 pull/unbundle.
1808 pull/unbundle.
1770 """
1809 """
1771 origrepolen = tr.changes.get('origrepolen', len(repo))
1810 origrepolen = tr.changes.get('origrepolen', len(repo))
1772 phasetracking = tr.changes.get('phases', {})
1811 phasetracking = tr.changes.get('phases', {})
1773 if not phasetracking:
1812 if not phasetracking:
1774 return
1813 return
1775 published = [
1814 published = [
1776 rev for rev, (old, new) in phasetracking.iteritems()
1815 rev for rev, (old, new) in phasetracking.iteritems()
1777 if new == phases.public and rev < origrepolen
1816 if new == phases.public and rev < origrepolen
1778 ]
1817 ]
1779 if not published:
1818 if not published:
1780 return
1819 return
1781 repo.ui.status(_('%d local changesets published\n')
1820 repo.ui.status(_('%d local changesets published\n')
1782 % len(published))
1821 % len(published))
1783
1822
1784 def getinstabilitymessage(delta, instability):
1823 def getinstabilitymessage(delta, instability):
1785 """function to return the message to show warning about new instabilities
1824 """function to return the message to show warning about new instabilities
1786
1825
1787 exists as a separate function so that extension can wrap to show more
1826 exists as a separate function so that extension can wrap to show more
1788 information like how to fix instabilities"""
1827 information like how to fix instabilities"""
1789 if delta > 0:
1828 if delta > 0:
1790 return _('%i new %s changesets\n') % (delta, instability)
1829 return _('%i new %s changesets\n') % (delta, instability)
1791
1830
1792 def nodesummaries(repo, nodes, maxnumnodes=4):
1831 def nodesummaries(repo, nodes, maxnumnodes=4):
1793 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1832 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1794 return ' '.join(short(h) for h in nodes)
1833 return ' '.join(short(h) for h in nodes)
1795 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1834 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1796 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1835 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1797
1836
1798 def enforcesinglehead(repo, tr, desc):
1837 def enforcesinglehead(repo, tr, desc):
1799 """check that no named branch has multiple heads"""
1838 """check that no named branch has multiple heads"""
1800 if desc in ('strip', 'repair'):
1839 if desc in ('strip', 'repair'):
1801 # skip the logic during strip
1840 # skip the logic during strip
1802 return
1841 return
1803 visible = repo.filtered('visible')
1842 visible = repo.filtered('visible')
1804 # possible improvement: we could restrict the check to affected branch
1843 # possible improvement: we could restrict the check to affected branch
1805 for name, heads in visible.branchmap().iteritems():
1844 for name, heads in visible.branchmap().iteritems():
1806 if len(heads) > 1:
1845 if len(heads) > 1:
1807 msg = _('rejecting multiple heads on branch "%s"')
1846 msg = _('rejecting multiple heads on branch "%s"')
1808 msg %= name
1847 msg %= name
1809 hint = _('%d heads: %s')
1848 hint = _('%d heads: %s')
1810 hint %= (len(heads), nodesummaries(repo, heads))
1849 hint %= (len(heads), nodesummaries(repo, heads))
1811 raise error.Abort(msg, hint=hint)
1850 raise error.Abort(msg, hint=hint)
1812
1851
1813 def wrapconvertsink(sink):
1852 def wrapconvertsink(sink):
1814 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1853 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1815 before it is used, whether or not the convert extension was formally loaded.
1854 before it is used, whether or not the convert extension was formally loaded.
1816 """
1855 """
1817 return sink
1856 return sink
1818
1857
1819 def unhidehashlikerevs(repo, specs, hiddentype):
1858 def unhidehashlikerevs(repo, specs, hiddentype):
1820 """parse the user specs and unhide changesets whose hash or revision number
1859 """parse the user specs and unhide changesets whose hash or revision number
1821 is passed.
1860 is passed.
1822
1861
1823 hiddentype can be: 1) 'warn': warn while unhiding changesets
1862 hiddentype can be: 1) 'warn': warn while unhiding changesets
1824 2) 'nowarn': don't warn while unhiding changesets
1863 2) 'nowarn': don't warn while unhiding changesets
1825
1864
1826 returns a repo object with the required changesets unhidden
1865 returns a repo object with the required changesets unhidden
1827 """
1866 """
1828 if not repo.filtername or not repo.ui.configbool('experimental',
1867 if not repo.filtername or not repo.ui.configbool('experimental',
1829 'directaccess'):
1868 'directaccess'):
1830 return repo
1869 return repo
1831
1870
1832 if repo.filtername not in ('visible', 'visible-hidden'):
1871 if repo.filtername not in ('visible', 'visible-hidden'):
1833 return repo
1872 return repo
1834
1873
1835 symbols = set()
1874 symbols = set()
1836 for spec in specs:
1875 for spec in specs:
1837 try:
1876 try:
1838 tree = revsetlang.parse(spec)
1877 tree = revsetlang.parse(spec)
1839 except error.ParseError: # will be reported by scmutil.revrange()
1878 except error.ParseError: # will be reported by scmutil.revrange()
1840 continue
1879 continue
1841
1880
1842 symbols.update(revsetlang.gethashlikesymbols(tree))
1881 symbols.update(revsetlang.gethashlikesymbols(tree))
1843
1882
1844 if not symbols:
1883 if not symbols:
1845 return repo
1884 return repo
1846
1885
1847 revs = _getrevsfromsymbols(repo, symbols)
1886 revs = _getrevsfromsymbols(repo, symbols)
1848
1887
1849 if not revs:
1888 if not revs:
1850 return repo
1889 return repo
1851
1890
1852 if hiddentype == 'warn':
1891 if hiddentype == 'warn':
1853 unfi = repo.unfiltered()
1892 unfi = repo.unfiltered()
1854 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1893 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1855 repo.ui.warn(_("warning: accessing hidden changesets for write "
1894 repo.ui.warn(_("warning: accessing hidden changesets for write "
1856 "operation: %s\n") % revstr)
1895 "operation: %s\n") % revstr)
1857
1896
1858 # we have to use new filtername to separate branch/tags cache until we can
1897 # we have to use new filtername to separate branch/tags cache until we can
1859 # disbale these cache when revisions are dynamically pinned.
1898 # disbale these cache when revisions are dynamically pinned.
1860 return repo.filtered('visible-hidden', revs)
1899 return repo.filtered('visible-hidden', revs)
1861
1900
1862 def _getrevsfromsymbols(repo, symbols):
1901 def _getrevsfromsymbols(repo, symbols):
1863 """parse the list of symbols and returns a set of revision numbers of hidden
1902 """parse the list of symbols and returns a set of revision numbers of hidden
1864 changesets present in symbols"""
1903 changesets present in symbols"""
1865 revs = set()
1904 revs = set()
1866 unfi = repo.unfiltered()
1905 unfi = repo.unfiltered()
1867 unficl = unfi.changelog
1906 unficl = unfi.changelog
1868 cl = repo.changelog
1907 cl = repo.changelog
1869 tiprev = len(unficl)
1908 tiprev = len(unficl)
1870 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1909 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1871 for s in symbols:
1910 for s in symbols:
1872 try:
1911 try:
1873 n = int(s)
1912 n = int(s)
1874 if n <= tiprev:
1913 if n <= tiprev:
1875 if not allowrevnums:
1914 if not allowrevnums:
1876 continue
1915 continue
1877 else:
1916 else:
1878 if n not in cl:
1917 if n not in cl:
1879 revs.add(n)
1918 revs.add(n)
1880 continue
1919 continue
1881 except ValueError:
1920 except ValueError:
1882 pass
1921 pass
1883
1922
1884 try:
1923 try:
1885 s = resolvehexnodeidprefix(unfi, s)
1924 s = resolvehexnodeidprefix(unfi, s)
1886 except (error.LookupError, error.WdirUnsupported):
1925 except (error.LookupError, error.WdirUnsupported):
1887 s = None
1926 s = None
1888
1927
1889 if s is not None:
1928 if s is not None:
1890 rev = unficl.rev(s)
1929 rev = unficl.rev(s)
1891 if rev not in cl:
1930 if rev not in cl:
1892 revs.add(rev)
1931 revs.add(rev)
1893
1932
1894 return revs
1933 return revs
1895
1934
1896 def bookmarkrevs(repo, mark):
1935 def bookmarkrevs(repo, mark):
1897 """
1936 """
1898 Select revisions reachable by a given bookmark
1937 Select revisions reachable by a given bookmark
1899 """
1938 """
1900 return repo.revs("ancestors(bookmark(%s)) - "
1939 return repo.revs("ancestors(bookmark(%s)) - "
1901 "ancestors(head() and not bookmark(%s)) - "
1940 "ancestors(head() and not bookmark(%s)) - "
1902 "ancestors(bookmark() and not bookmark(%s))",
1941 "ancestors(bookmark() and not bookmark(%s))",
1903 mark, mark, mark)
1942 mark, mark, mark)
General Comments 0
You need to be logged in to leave comments. Login now