##// END OF EJS Templates
revsymbol: stop delegating to repo.__getitem__ for unhandled symbols (API)...
Martin von Zweigbergk -
r37549:6639ac97 default
parent child Browse files
Show More
@@ -1,1543 +1,1543 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirrev,
26 wdirrev,
27 )
27 )
28
28
29 from . import (
29 from . import (
30 encoding,
30 encoding,
31 error,
31 error,
32 match as matchmod,
32 match as matchmod,
33 obsolete,
33 obsolete,
34 obsutil,
34 obsutil,
35 pathutil,
35 pathutil,
36 phases,
36 phases,
37 pycompat,
37 pycompat,
38 revsetlang,
38 revsetlang,
39 similar,
39 similar,
40 url,
40 url,
41 util,
41 util,
42 vfs,
42 vfs,
43 )
43 )
44
44
45 from .utils import (
45 from .utils import (
46 procutil,
46 procutil,
47 stringutil,
47 stringutil,
48 )
48 )
49
49
50 if pycompat.iswindows:
50 if pycompat.iswindows:
51 from . import scmwindows as scmplatform
51 from . import scmwindows as scmplatform
52 else:
52 else:
53 from . import scmposix as scmplatform
53 from . import scmposix as scmplatform
54
54
55 termsize = scmplatform.termsize
55 termsize = scmplatform.termsize
56
56
57 class status(tuple):
57 class status(tuple):
58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
59 and 'ignored' properties are only relevant to the working copy.
59 and 'ignored' properties are only relevant to the working copy.
60 '''
60 '''
61
61
62 __slots__ = ()
62 __slots__ = ()
63
63
64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
65 clean):
65 clean):
66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
67 ignored, clean))
67 ignored, clean))
68
68
69 @property
69 @property
70 def modified(self):
70 def modified(self):
71 '''files that have been modified'''
71 '''files that have been modified'''
72 return self[0]
72 return self[0]
73
73
74 @property
74 @property
75 def added(self):
75 def added(self):
76 '''files that have been added'''
76 '''files that have been added'''
77 return self[1]
77 return self[1]
78
78
79 @property
79 @property
80 def removed(self):
80 def removed(self):
81 '''files that have been removed'''
81 '''files that have been removed'''
82 return self[2]
82 return self[2]
83
83
84 @property
84 @property
85 def deleted(self):
85 def deleted(self):
86 '''files that are in the dirstate, but have been deleted from the
86 '''files that are in the dirstate, but have been deleted from the
87 working copy (aka "missing")
87 working copy (aka "missing")
88 '''
88 '''
89 return self[3]
89 return self[3]
90
90
91 @property
91 @property
92 def unknown(self):
92 def unknown(self):
93 '''files not in the dirstate that are not ignored'''
93 '''files not in the dirstate that are not ignored'''
94 return self[4]
94 return self[4]
95
95
96 @property
96 @property
97 def ignored(self):
97 def ignored(self):
98 '''files not in the dirstate that are ignored (by _dirignore())'''
98 '''files not in the dirstate that are ignored (by _dirignore())'''
99 return self[5]
99 return self[5]
100
100
101 @property
101 @property
102 def clean(self):
102 def clean(self):
103 '''files that have not been modified'''
103 '''files that have not been modified'''
104 return self[6]
104 return self[6]
105
105
106 def __repr__(self, *args, **kwargs):
106 def __repr__(self, *args, **kwargs):
107 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
107 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
108 'unknown=%r, ignored=%r, clean=%r>') % self)
108 'unknown=%r, ignored=%r, clean=%r>') % self)
109
109
110 def itersubrepos(ctx1, ctx2):
110 def itersubrepos(ctx1, ctx2):
111 """find subrepos in ctx1 or ctx2"""
111 """find subrepos in ctx1 or ctx2"""
112 # Create a (subpath, ctx) mapping where we prefer subpaths from
112 # Create a (subpath, ctx) mapping where we prefer subpaths from
113 # ctx1. The subpaths from ctx2 are important when the .hgsub file
113 # ctx1. The subpaths from ctx2 are important when the .hgsub file
114 # has been modified (in ctx2) but not yet committed (in ctx1).
114 # has been modified (in ctx2) but not yet committed (in ctx1).
115 subpaths = dict.fromkeys(ctx2.substate, ctx2)
115 subpaths = dict.fromkeys(ctx2.substate, ctx2)
116 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
116 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
117
117
118 missing = set()
118 missing = set()
119
119
120 for subpath in ctx2.substate:
120 for subpath in ctx2.substate:
121 if subpath not in ctx1.substate:
121 if subpath not in ctx1.substate:
122 del subpaths[subpath]
122 del subpaths[subpath]
123 missing.add(subpath)
123 missing.add(subpath)
124
124
125 for subpath, ctx in sorted(subpaths.iteritems()):
125 for subpath, ctx in sorted(subpaths.iteritems()):
126 yield subpath, ctx.sub(subpath)
126 yield subpath, ctx.sub(subpath)
127
127
128 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
128 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
129 # status and diff will have an accurate result when it does
129 # status and diff will have an accurate result when it does
130 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
130 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
131 # against itself.
131 # against itself.
132 for subpath in missing:
132 for subpath in missing:
133 yield subpath, ctx2.nullsub(subpath, ctx1)
133 yield subpath, ctx2.nullsub(subpath, ctx1)
134
134
135 def nochangesfound(ui, repo, excluded=None):
135 def nochangesfound(ui, repo, excluded=None):
136 '''Report no changes for push/pull, excluded is None or a list of
136 '''Report no changes for push/pull, excluded is None or a list of
137 nodes excluded from the push/pull.
137 nodes excluded from the push/pull.
138 '''
138 '''
139 secretlist = []
139 secretlist = []
140 if excluded:
140 if excluded:
141 for n in excluded:
141 for n in excluded:
142 ctx = repo[n]
142 ctx = repo[n]
143 if ctx.phase() >= phases.secret and not ctx.extinct():
143 if ctx.phase() >= phases.secret and not ctx.extinct():
144 secretlist.append(n)
144 secretlist.append(n)
145
145
146 if secretlist:
146 if secretlist:
147 ui.status(_("no changes found (ignored %d secret changesets)\n")
147 ui.status(_("no changes found (ignored %d secret changesets)\n")
148 % len(secretlist))
148 % len(secretlist))
149 else:
149 else:
150 ui.status(_("no changes found\n"))
150 ui.status(_("no changes found\n"))
151
151
152 def callcatch(ui, func):
152 def callcatch(ui, func):
153 """call func() with global exception handling
153 """call func() with global exception handling
154
154
155 return func() if no exception happens. otherwise do some error handling
155 return func() if no exception happens. otherwise do some error handling
156 and return an exit code accordingly. does not handle all exceptions.
156 and return an exit code accordingly. does not handle all exceptions.
157 """
157 """
158 try:
158 try:
159 try:
159 try:
160 return func()
160 return func()
161 except: # re-raises
161 except: # re-raises
162 ui.traceback()
162 ui.traceback()
163 raise
163 raise
164 # Global exception handling, alphabetically
164 # Global exception handling, alphabetically
165 # Mercurial-specific first, followed by built-in and library exceptions
165 # Mercurial-specific first, followed by built-in and library exceptions
166 except error.LockHeld as inst:
166 except error.LockHeld as inst:
167 if inst.errno == errno.ETIMEDOUT:
167 if inst.errno == errno.ETIMEDOUT:
168 reason = _('timed out waiting for lock held by %r') % inst.locker
168 reason = _('timed out waiting for lock held by %r') % inst.locker
169 else:
169 else:
170 reason = _('lock held by %r') % inst.locker
170 reason = _('lock held by %r') % inst.locker
171 ui.warn(_("abort: %s: %s\n")
171 ui.warn(_("abort: %s: %s\n")
172 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
172 % (inst.desc or stringutil.forcebytestr(inst.filename), reason))
173 if not inst.locker:
173 if not inst.locker:
174 ui.warn(_("(lock might be very busy)\n"))
174 ui.warn(_("(lock might be very busy)\n"))
175 except error.LockUnavailable as inst:
175 except error.LockUnavailable as inst:
176 ui.warn(_("abort: could not lock %s: %s\n") %
176 ui.warn(_("abort: could not lock %s: %s\n") %
177 (inst.desc or stringutil.forcebytestr(inst.filename),
177 (inst.desc or stringutil.forcebytestr(inst.filename),
178 encoding.strtolocal(inst.strerror)))
178 encoding.strtolocal(inst.strerror)))
179 except error.OutOfBandError as inst:
179 except error.OutOfBandError as inst:
180 if inst.args:
180 if inst.args:
181 msg = _("abort: remote error:\n")
181 msg = _("abort: remote error:\n")
182 else:
182 else:
183 msg = _("abort: remote error\n")
183 msg = _("abort: remote error\n")
184 ui.warn(msg)
184 ui.warn(msg)
185 if inst.args:
185 if inst.args:
186 ui.warn(''.join(inst.args))
186 ui.warn(''.join(inst.args))
187 if inst.hint:
187 if inst.hint:
188 ui.warn('(%s)\n' % inst.hint)
188 ui.warn('(%s)\n' % inst.hint)
189 except error.RepoError as inst:
189 except error.RepoError as inst:
190 ui.warn(_("abort: %s!\n") % inst)
190 ui.warn(_("abort: %s!\n") % inst)
191 if inst.hint:
191 if inst.hint:
192 ui.warn(_("(%s)\n") % inst.hint)
192 ui.warn(_("(%s)\n") % inst.hint)
193 except error.ResponseError as inst:
193 except error.ResponseError as inst:
194 ui.warn(_("abort: %s") % inst.args[0])
194 ui.warn(_("abort: %s") % inst.args[0])
195 msg = inst.args[1]
195 msg = inst.args[1]
196 if isinstance(msg, type(u'')):
196 if isinstance(msg, type(u'')):
197 msg = pycompat.sysbytes(msg)
197 msg = pycompat.sysbytes(msg)
198 if not isinstance(msg, bytes):
198 if not isinstance(msg, bytes):
199 ui.warn(" %r\n" % (msg,))
199 ui.warn(" %r\n" % (msg,))
200 elif not msg:
200 elif not msg:
201 ui.warn(_(" empty string\n"))
201 ui.warn(_(" empty string\n"))
202 else:
202 else:
203 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
203 ui.warn("\n%r\n" % stringutil.ellipsis(msg))
204 except error.CensoredNodeError as inst:
204 except error.CensoredNodeError as inst:
205 ui.warn(_("abort: file censored %s!\n") % inst)
205 ui.warn(_("abort: file censored %s!\n") % inst)
206 except error.RevlogError as inst:
206 except error.RevlogError as inst:
207 ui.warn(_("abort: %s!\n") % inst)
207 ui.warn(_("abort: %s!\n") % inst)
208 except error.InterventionRequired as inst:
208 except error.InterventionRequired as inst:
209 ui.warn("%s\n" % inst)
209 ui.warn("%s\n" % inst)
210 if inst.hint:
210 if inst.hint:
211 ui.warn(_("(%s)\n") % inst.hint)
211 ui.warn(_("(%s)\n") % inst.hint)
212 return 1
212 return 1
213 except error.WdirUnsupported:
213 except error.WdirUnsupported:
214 ui.warn(_("abort: working directory revision cannot be specified\n"))
214 ui.warn(_("abort: working directory revision cannot be specified\n"))
215 except error.Abort as inst:
215 except error.Abort as inst:
216 ui.warn(_("abort: %s\n") % inst)
216 ui.warn(_("abort: %s\n") % inst)
217 if inst.hint:
217 if inst.hint:
218 ui.warn(_("(%s)\n") % inst.hint)
218 ui.warn(_("(%s)\n") % inst.hint)
219 except ImportError as inst:
219 except ImportError as inst:
220 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
220 ui.warn(_("abort: %s!\n") % stringutil.forcebytestr(inst))
221 m = stringutil.forcebytestr(inst).split()[-1]
221 m = stringutil.forcebytestr(inst).split()[-1]
222 if m in "mpatch bdiff".split():
222 if m in "mpatch bdiff".split():
223 ui.warn(_("(did you forget to compile extensions?)\n"))
223 ui.warn(_("(did you forget to compile extensions?)\n"))
224 elif m in "zlib".split():
224 elif m in "zlib".split():
225 ui.warn(_("(is your Python install correct?)\n"))
225 ui.warn(_("(is your Python install correct?)\n"))
226 except IOError as inst:
226 except IOError as inst:
227 if util.safehasattr(inst, "code"):
227 if util.safehasattr(inst, "code"):
228 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
228 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst))
229 elif util.safehasattr(inst, "reason"):
229 elif util.safehasattr(inst, "reason"):
230 try: # usually it is in the form (errno, strerror)
230 try: # usually it is in the form (errno, strerror)
231 reason = inst.reason.args[1]
231 reason = inst.reason.args[1]
232 except (AttributeError, IndexError):
232 except (AttributeError, IndexError):
233 # it might be anything, for example a string
233 # it might be anything, for example a string
234 reason = inst.reason
234 reason = inst.reason
235 if isinstance(reason, unicode):
235 if isinstance(reason, unicode):
236 # SSLError of Python 2.7.9 contains a unicode
236 # SSLError of Python 2.7.9 contains a unicode
237 reason = encoding.unitolocal(reason)
237 reason = encoding.unitolocal(reason)
238 ui.warn(_("abort: error: %s\n") % reason)
238 ui.warn(_("abort: error: %s\n") % reason)
239 elif (util.safehasattr(inst, "args")
239 elif (util.safehasattr(inst, "args")
240 and inst.args and inst.args[0] == errno.EPIPE):
240 and inst.args and inst.args[0] == errno.EPIPE):
241 pass
241 pass
242 elif getattr(inst, "strerror", None):
242 elif getattr(inst, "strerror", None):
243 if getattr(inst, "filename", None):
243 if getattr(inst, "filename", None):
244 ui.warn(_("abort: %s: %s\n") % (
244 ui.warn(_("abort: %s: %s\n") % (
245 encoding.strtolocal(inst.strerror),
245 encoding.strtolocal(inst.strerror),
246 stringutil.forcebytestr(inst.filename)))
246 stringutil.forcebytestr(inst.filename)))
247 else:
247 else:
248 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
248 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
249 else:
249 else:
250 raise
250 raise
251 except OSError as inst:
251 except OSError as inst:
252 if getattr(inst, "filename", None) is not None:
252 if getattr(inst, "filename", None) is not None:
253 ui.warn(_("abort: %s: '%s'\n") % (
253 ui.warn(_("abort: %s: '%s'\n") % (
254 encoding.strtolocal(inst.strerror),
254 encoding.strtolocal(inst.strerror),
255 stringutil.forcebytestr(inst.filename)))
255 stringutil.forcebytestr(inst.filename)))
256 else:
256 else:
257 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 except MemoryError:
258 except MemoryError:
259 ui.warn(_("abort: out of memory\n"))
259 ui.warn(_("abort: out of memory\n"))
260 except SystemExit as inst:
260 except SystemExit as inst:
261 # Commands shouldn't sys.exit directly, but give a return code.
261 # Commands shouldn't sys.exit directly, but give a return code.
262 # Just in case catch this and and pass exit code to caller.
262 # Just in case catch this and and pass exit code to caller.
263 return inst.code
263 return inst.code
264 except socket.error as inst:
264 except socket.error as inst:
265 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
265 ui.warn(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
266
266
267 return -1
267 return -1
268
268
269 def checknewlabel(repo, lbl, kind):
269 def checknewlabel(repo, lbl, kind):
270 # Do not use the "kind" parameter in ui output.
270 # Do not use the "kind" parameter in ui output.
271 # It makes strings difficult to translate.
271 # It makes strings difficult to translate.
272 if lbl in ['tip', '.', 'null']:
272 if lbl in ['tip', '.', 'null']:
273 raise error.Abort(_("the name '%s' is reserved") % lbl)
273 raise error.Abort(_("the name '%s' is reserved") % lbl)
274 for c in (':', '\0', '\n', '\r'):
274 for c in (':', '\0', '\n', '\r'):
275 if c in lbl:
275 if c in lbl:
276 raise error.Abort(
276 raise error.Abort(
277 _("%r cannot be used in a name") % pycompat.bytestr(c))
277 _("%r cannot be used in a name") % pycompat.bytestr(c))
278 try:
278 try:
279 int(lbl)
279 int(lbl)
280 raise error.Abort(_("cannot use an integer as a name"))
280 raise error.Abort(_("cannot use an integer as a name"))
281 except ValueError:
281 except ValueError:
282 pass
282 pass
283 if lbl.strip() != lbl:
283 if lbl.strip() != lbl:
284 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
284 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
285
285
286 def checkfilename(f):
286 def checkfilename(f):
287 '''Check that the filename f is an acceptable filename for a tracked file'''
287 '''Check that the filename f is an acceptable filename for a tracked file'''
288 if '\r' in f or '\n' in f:
288 if '\r' in f or '\n' in f:
289 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
289 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
290
290
291 def checkportable(ui, f):
291 def checkportable(ui, f):
292 '''Check if filename f is portable and warn or abort depending on config'''
292 '''Check if filename f is portable and warn or abort depending on config'''
293 checkfilename(f)
293 checkfilename(f)
294 abort, warn = checkportabilityalert(ui)
294 abort, warn = checkportabilityalert(ui)
295 if abort or warn:
295 if abort or warn:
296 msg = util.checkwinfilename(f)
296 msg = util.checkwinfilename(f)
297 if msg:
297 if msg:
298 msg = "%s: %s" % (msg, procutil.shellquote(f))
298 msg = "%s: %s" % (msg, procutil.shellquote(f))
299 if abort:
299 if abort:
300 raise error.Abort(msg)
300 raise error.Abort(msg)
301 ui.warn(_("warning: %s\n") % msg)
301 ui.warn(_("warning: %s\n") % msg)
302
302
303 def checkportabilityalert(ui):
303 def checkportabilityalert(ui):
304 '''check if the user's config requests nothing, a warning, or abort for
304 '''check if the user's config requests nothing, a warning, or abort for
305 non-portable filenames'''
305 non-portable filenames'''
306 val = ui.config('ui', 'portablefilenames')
306 val = ui.config('ui', 'portablefilenames')
307 lval = val.lower()
307 lval = val.lower()
308 bval = stringutil.parsebool(val)
308 bval = stringutil.parsebool(val)
309 abort = pycompat.iswindows or lval == 'abort'
309 abort = pycompat.iswindows or lval == 'abort'
310 warn = bval or lval == 'warn'
310 warn = bval or lval == 'warn'
311 if bval is None and not (warn or abort or lval == 'ignore'):
311 if bval is None and not (warn or abort or lval == 'ignore'):
312 raise error.ConfigError(
312 raise error.ConfigError(
313 _("ui.portablefilenames value is invalid ('%s')") % val)
313 _("ui.portablefilenames value is invalid ('%s')") % val)
314 return abort, warn
314 return abort, warn
315
315
316 class casecollisionauditor(object):
316 class casecollisionauditor(object):
317 def __init__(self, ui, abort, dirstate):
317 def __init__(self, ui, abort, dirstate):
318 self._ui = ui
318 self._ui = ui
319 self._abort = abort
319 self._abort = abort
320 allfiles = '\0'.join(dirstate._map)
320 allfiles = '\0'.join(dirstate._map)
321 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
321 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
322 self._dirstate = dirstate
322 self._dirstate = dirstate
323 # The purpose of _newfiles is so that we don't complain about
323 # The purpose of _newfiles is so that we don't complain about
324 # case collisions if someone were to call this object with the
324 # case collisions if someone were to call this object with the
325 # same filename twice.
325 # same filename twice.
326 self._newfiles = set()
326 self._newfiles = set()
327
327
328 def __call__(self, f):
328 def __call__(self, f):
329 if f in self._newfiles:
329 if f in self._newfiles:
330 return
330 return
331 fl = encoding.lower(f)
331 fl = encoding.lower(f)
332 if fl in self._loweredfiles and f not in self._dirstate:
332 if fl in self._loweredfiles and f not in self._dirstate:
333 msg = _('possible case-folding collision for %s') % f
333 msg = _('possible case-folding collision for %s') % f
334 if self._abort:
334 if self._abort:
335 raise error.Abort(msg)
335 raise error.Abort(msg)
336 self._ui.warn(_("warning: %s\n") % msg)
336 self._ui.warn(_("warning: %s\n") % msg)
337 self._loweredfiles.add(fl)
337 self._loweredfiles.add(fl)
338 self._newfiles.add(f)
338 self._newfiles.add(f)
339
339
340 def filteredhash(repo, maxrev):
340 def filteredhash(repo, maxrev):
341 """build hash of filtered revisions in the current repoview.
341 """build hash of filtered revisions in the current repoview.
342
342
343 Multiple caches perform up-to-date validation by checking that the
343 Multiple caches perform up-to-date validation by checking that the
344 tiprev and tipnode stored in the cache file match the current repository.
344 tiprev and tipnode stored in the cache file match the current repository.
345 However, this is not sufficient for validating repoviews because the set
345 However, this is not sufficient for validating repoviews because the set
346 of revisions in the view may change without the repository tiprev and
346 of revisions in the view may change without the repository tiprev and
347 tipnode changing.
347 tipnode changing.
348
348
349 This function hashes all the revs filtered from the view and returns
349 This function hashes all the revs filtered from the view and returns
350 that SHA-1 digest.
350 that SHA-1 digest.
351 """
351 """
352 cl = repo.changelog
352 cl = repo.changelog
353 if not cl.filteredrevs:
353 if not cl.filteredrevs:
354 return None
354 return None
355 key = None
355 key = None
356 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
356 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
357 if revs:
357 if revs:
358 s = hashlib.sha1()
358 s = hashlib.sha1()
359 for rev in revs:
359 for rev in revs:
360 s.update('%d;' % rev)
360 s.update('%d;' % rev)
361 key = s.digest()
361 key = s.digest()
362 return key
362 return key
363
363
364 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
364 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
365 '''yield every hg repository under path, always recursively.
365 '''yield every hg repository under path, always recursively.
366 The recurse flag will only control recursion into repo working dirs'''
366 The recurse flag will only control recursion into repo working dirs'''
367 def errhandler(err):
367 def errhandler(err):
368 if err.filename == path:
368 if err.filename == path:
369 raise err
369 raise err
370 samestat = getattr(os.path, 'samestat', None)
370 samestat = getattr(os.path, 'samestat', None)
371 if followsym and samestat is not None:
371 if followsym and samestat is not None:
372 def adddir(dirlst, dirname):
372 def adddir(dirlst, dirname):
373 dirstat = os.stat(dirname)
373 dirstat = os.stat(dirname)
374 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
374 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
375 if not match:
375 if not match:
376 dirlst.append(dirstat)
376 dirlst.append(dirstat)
377 return not match
377 return not match
378 else:
378 else:
379 followsym = False
379 followsym = False
380
380
381 if (seen_dirs is None) and followsym:
381 if (seen_dirs is None) and followsym:
382 seen_dirs = []
382 seen_dirs = []
383 adddir(seen_dirs, path)
383 adddir(seen_dirs, path)
384 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
384 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
385 dirs.sort()
385 dirs.sort()
386 if '.hg' in dirs:
386 if '.hg' in dirs:
387 yield root # found a repository
387 yield root # found a repository
388 qroot = os.path.join(root, '.hg', 'patches')
388 qroot = os.path.join(root, '.hg', 'patches')
389 if os.path.isdir(os.path.join(qroot, '.hg')):
389 if os.path.isdir(os.path.join(qroot, '.hg')):
390 yield qroot # we have a patch queue repo here
390 yield qroot # we have a patch queue repo here
391 if recurse:
391 if recurse:
392 # avoid recursing inside the .hg directory
392 # avoid recursing inside the .hg directory
393 dirs.remove('.hg')
393 dirs.remove('.hg')
394 else:
394 else:
395 dirs[:] = [] # don't descend further
395 dirs[:] = [] # don't descend further
396 elif followsym:
396 elif followsym:
397 newdirs = []
397 newdirs = []
398 for d in dirs:
398 for d in dirs:
399 fname = os.path.join(root, d)
399 fname = os.path.join(root, d)
400 if adddir(seen_dirs, fname):
400 if adddir(seen_dirs, fname):
401 if os.path.islink(fname):
401 if os.path.islink(fname):
402 for hgname in walkrepos(fname, True, seen_dirs):
402 for hgname in walkrepos(fname, True, seen_dirs):
403 yield hgname
403 yield hgname
404 else:
404 else:
405 newdirs.append(d)
405 newdirs.append(d)
406 dirs[:] = newdirs
406 dirs[:] = newdirs
407
407
408 def binnode(ctx):
408 def binnode(ctx):
409 """Return binary node id for a given basectx"""
409 """Return binary node id for a given basectx"""
410 node = ctx.node()
410 node = ctx.node()
411 if node is None:
411 if node is None:
412 return wdirid
412 return wdirid
413 return node
413 return node
414
414
415 def intrev(ctx):
415 def intrev(ctx):
416 """Return integer for a given basectx that can be used in comparison or
416 """Return integer for a given basectx that can be used in comparison or
417 arithmetic operation"""
417 arithmetic operation"""
418 rev = ctx.rev()
418 rev = ctx.rev()
419 if rev is None:
419 if rev is None:
420 return wdirrev
420 return wdirrev
421 return rev
421 return rev
422
422
423 def formatchangeid(ctx):
423 def formatchangeid(ctx):
424 """Format changectx as '{rev}:{node|formatnode}', which is the default
424 """Format changectx as '{rev}:{node|formatnode}', which is the default
425 template provided by logcmdutil.changesettemplater"""
425 template provided by logcmdutil.changesettemplater"""
426 repo = ctx.repo()
426 repo = ctx.repo()
427 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
427 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
428
428
429 def formatrevnode(ui, rev, node):
429 def formatrevnode(ui, rev, node):
430 """Format given revision and node depending on the current verbosity"""
430 """Format given revision and node depending on the current verbosity"""
431 if ui.debugflag:
431 if ui.debugflag:
432 hexfunc = hex
432 hexfunc = hex
433 else:
433 else:
434 hexfunc = short
434 hexfunc = short
435 return '%d:%s' % (rev, hexfunc(node))
435 return '%d:%s' % (rev, hexfunc(node))
436
436
437 def resolvepartialhexnodeid(repo, prefix):
437 def resolvepartialhexnodeid(repo, prefix):
438 # Uses unfiltered repo because it's faster when then prefix is ambiguous/
438 # Uses unfiltered repo because it's faster when then prefix is ambiguous/
439 # This matches the "shortest" template function.
439 # This matches the "shortest" template function.
440 node = repo.unfiltered().changelog._partialmatch(prefix)
440 node = repo.unfiltered().changelog._partialmatch(prefix)
441 if node is None:
441 if node is None:
442 return
442 return
443 repo.changelog.rev(node) # make sure node isn't filtered
443 repo.changelog.rev(node) # make sure node isn't filtered
444 return node
444 return node
445
445
446 def isrevsymbol(repo, symbol):
446 def isrevsymbol(repo, symbol):
447 try:
447 try:
448 revsymbol(repo, symbol)
448 revsymbol(repo, symbol)
449 return True
449 return True
450 except error.RepoLookupError:
450 except error.RepoLookupError:
451 return False
451 return False
452
452
453 def revsymbol(repo, symbol):
453 def revsymbol(repo, symbol):
454 """Returns a context given a single revision symbol (as string).
454 """Returns a context given a single revision symbol (as string).
455
455
456 This is similar to revsingle(), but accepts only a single revision symbol,
456 This is similar to revsingle(), but accepts only a single revision symbol,
457 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
457 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
458 not "max(public())".
458 not "max(public())".
459 """
459 """
460 if not isinstance(symbol, bytes):
460 if not isinstance(symbol, bytes):
461 msg = ("symbol (%s of type %s) was not a string, did you mean "
461 msg = ("symbol (%s of type %s) was not a string, did you mean "
462 "repo[symbol]?" % (symbol, type(symbol)))
462 "repo[symbol]?" % (symbol, type(symbol)))
463 raise error.ProgrammingError(msg)
463 raise error.ProgrammingError(msg)
464 try:
464 try:
465 if symbol in ('.', 'tip', 'null'):
465 if symbol in ('.', 'tip', 'null'):
466 return repo[symbol]
466 return repo[symbol]
467
467
468 try:
468 try:
469 r = int(symbol)
469 r = int(symbol)
470 if '%d' % r != symbol:
470 if '%d' % r != symbol:
471 raise ValueError
471 raise ValueError
472 l = len(repo.changelog)
472 l = len(repo.changelog)
473 if r < 0:
473 if r < 0:
474 r += l
474 r += l
475 if r < 0 or r >= l and r != wdirrev:
475 if r < 0 or r >= l and r != wdirrev:
476 raise ValueError
476 raise ValueError
477 return repo[r]
477 return repo[r]
478 except error.FilteredIndexError:
478 except error.FilteredIndexError:
479 raise
479 raise
480 except (ValueError, OverflowError, IndexError):
480 except (ValueError, OverflowError, IndexError):
481 pass
481 pass
482
482
483 if len(symbol) == 40:
483 if len(symbol) == 40:
484 try:
484 try:
485 node = bin(symbol)
485 node = bin(symbol)
486 rev = repo.changelog.rev(node)
486 rev = repo.changelog.rev(node)
487 return repo[rev]
487 return repo[rev]
488 except error.FilteredLookupError:
488 except error.FilteredLookupError:
489 raise
489 raise
490 except (TypeError, LookupError):
490 except (TypeError, LookupError):
491 pass
491 pass
492
492
493 # look up bookmarks through the name interface
493 # look up bookmarks through the name interface
494 try:
494 try:
495 node = repo.names.singlenode(repo, symbol)
495 node = repo.names.singlenode(repo, symbol)
496 rev = repo.changelog.rev(node)
496 rev = repo.changelog.rev(node)
497 return repo[rev]
497 return repo[rev]
498 except KeyError:
498 except KeyError:
499 pass
499 pass
500
500
501 node = repo.unfiltered().changelog._partialmatch(symbol)
501 node = repo.unfiltered().changelog._partialmatch(symbol)
502 if node is not None:
502 if node is not None:
503 rev = repo.changelog.rev(node)
503 rev = repo.changelog.rev(node)
504 return repo[rev]
504 return repo[rev]
505
505
506 return repo[symbol]
506 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
507
507
508 except error.WdirUnsupported:
508 except error.WdirUnsupported:
509 return repo[None]
509 return repo[None]
510 except (error.FilteredIndexError, error.FilteredLookupError,
510 except (error.FilteredIndexError, error.FilteredLookupError,
511 error.FilteredRepoLookupError):
511 error.FilteredRepoLookupError):
512 raise _filterederror(repo, symbol)
512 raise _filterederror(repo, symbol)
513
513
514 def _filterederror(repo, changeid):
514 def _filterederror(repo, changeid):
515 """build an exception to be raised about a filtered changeid
515 """build an exception to be raised about a filtered changeid
516
516
517 This is extracted in a function to help extensions (eg: evolve) to
517 This is extracted in a function to help extensions (eg: evolve) to
518 experiment with various message variants."""
518 experiment with various message variants."""
519 if repo.filtername.startswith('visible'):
519 if repo.filtername.startswith('visible'):
520
520
521 # Check if the changeset is obsolete
521 # Check if the changeset is obsolete
522 unfilteredrepo = repo.unfiltered()
522 unfilteredrepo = repo.unfiltered()
523 ctx = revsymbol(unfilteredrepo, changeid)
523 ctx = revsymbol(unfilteredrepo, changeid)
524
524
525 # If the changeset is obsolete, enrich the message with the reason
525 # If the changeset is obsolete, enrich the message with the reason
526 # that made this changeset not visible
526 # that made this changeset not visible
527 if ctx.obsolete():
527 if ctx.obsolete():
528 msg = obsutil._getfilteredreason(repo, changeid, ctx)
528 msg = obsutil._getfilteredreason(repo, changeid, ctx)
529 else:
529 else:
530 msg = _("hidden revision '%s'") % changeid
530 msg = _("hidden revision '%s'") % changeid
531
531
532 hint = _('use --hidden to access hidden revisions')
532 hint = _('use --hidden to access hidden revisions')
533
533
534 return error.FilteredRepoLookupError(msg, hint=hint)
534 return error.FilteredRepoLookupError(msg, hint=hint)
535 msg = _("filtered revision '%s' (not in '%s' subset)")
535 msg = _("filtered revision '%s' (not in '%s' subset)")
536 msg %= (changeid, repo.filtername)
536 msg %= (changeid, repo.filtername)
537 return error.FilteredRepoLookupError(msg)
537 return error.FilteredRepoLookupError(msg)
538
538
539 def revsingle(repo, revspec, default='.', localalias=None):
539 def revsingle(repo, revspec, default='.', localalias=None):
540 if not revspec and revspec != 0:
540 if not revspec and revspec != 0:
541 return repo[default]
541 return repo[default]
542
542
543 l = revrange(repo, [revspec], localalias=localalias)
543 l = revrange(repo, [revspec], localalias=localalias)
544 if not l:
544 if not l:
545 raise error.Abort(_('empty revision set'))
545 raise error.Abort(_('empty revision set'))
546 return repo[l.last()]
546 return repo[l.last()]
547
547
548 def _pairspec(revspec):
548 def _pairspec(revspec):
549 tree = revsetlang.parse(revspec)
549 tree = revsetlang.parse(revspec)
550 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
550 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
551
551
552 def revpairnodes(repo, revs):
552 def revpairnodes(repo, revs):
553 repo.ui.deprecwarn("revpairnodes is deprecated, please use revpair", "4.6")
553 repo.ui.deprecwarn("revpairnodes is deprecated, please use revpair", "4.6")
554 ctx1, ctx2 = revpair(repo, revs)
554 ctx1, ctx2 = revpair(repo, revs)
555 return ctx1.node(), ctx2.node()
555 return ctx1.node(), ctx2.node()
556
556
557 def revpair(repo, revs):
557 def revpair(repo, revs):
558 if not revs:
558 if not revs:
559 return repo['.'], repo[None]
559 return repo['.'], repo[None]
560
560
561 l = revrange(repo, revs)
561 l = revrange(repo, revs)
562
562
563 if not l:
563 if not l:
564 first = second = None
564 first = second = None
565 elif l.isascending():
565 elif l.isascending():
566 first = l.min()
566 first = l.min()
567 second = l.max()
567 second = l.max()
568 elif l.isdescending():
568 elif l.isdescending():
569 first = l.max()
569 first = l.max()
570 second = l.min()
570 second = l.min()
571 else:
571 else:
572 first = l.first()
572 first = l.first()
573 second = l.last()
573 second = l.last()
574
574
575 if first is None:
575 if first is None:
576 raise error.Abort(_('empty revision range'))
576 raise error.Abort(_('empty revision range'))
577 if (first == second and len(revs) >= 2
577 if (first == second and len(revs) >= 2
578 and not all(revrange(repo, [r]) for r in revs)):
578 and not all(revrange(repo, [r]) for r in revs)):
579 raise error.Abort(_('empty revision on one side of range'))
579 raise error.Abort(_('empty revision on one side of range'))
580
580
581 # if top-level is range expression, the result must always be a pair
581 # if top-level is range expression, the result must always be a pair
582 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
582 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
583 return repo[first], repo[None]
583 return repo[first], repo[None]
584
584
585 return repo[first], repo[second]
585 return repo[first], repo[second]
586
586
587 def revrange(repo, specs, localalias=None):
587 def revrange(repo, specs, localalias=None):
588 """Execute 1 to many revsets and return the union.
588 """Execute 1 to many revsets and return the union.
589
589
590 This is the preferred mechanism for executing revsets using user-specified
590 This is the preferred mechanism for executing revsets using user-specified
591 config options, such as revset aliases.
591 config options, such as revset aliases.
592
592
593 The revsets specified by ``specs`` will be executed via a chained ``OR``
593 The revsets specified by ``specs`` will be executed via a chained ``OR``
594 expression. If ``specs`` is empty, an empty result is returned.
594 expression. If ``specs`` is empty, an empty result is returned.
595
595
596 ``specs`` can contain integers, in which case they are assumed to be
596 ``specs`` can contain integers, in which case they are assumed to be
597 revision numbers.
597 revision numbers.
598
598
599 It is assumed the revsets are already formatted. If you have arguments
599 It is assumed the revsets are already formatted. If you have arguments
600 that need to be expanded in the revset, call ``revsetlang.formatspec()``
600 that need to be expanded in the revset, call ``revsetlang.formatspec()``
601 and pass the result as an element of ``specs``.
601 and pass the result as an element of ``specs``.
602
602
603 Specifying a single revset is allowed.
603 Specifying a single revset is allowed.
604
604
605 Returns a ``revset.abstractsmartset`` which is a list-like interface over
605 Returns a ``revset.abstractsmartset`` which is a list-like interface over
606 integer revisions.
606 integer revisions.
607 """
607 """
608 allspecs = []
608 allspecs = []
609 for spec in specs:
609 for spec in specs:
610 if isinstance(spec, int):
610 if isinstance(spec, int):
611 spec = revsetlang.formatspec('rev(%d)', spec)
611 spec = revsetlang.formatspec('rev(%d)', spec)
612 allspecs.append(spec)
612 allspecs.append(spec)
613 return repo.anyrevs(allspecs, user=True, localalias=localalias)
613 return repo.anyrevs(allspecs, user=True, localalias=localalias)
614
614
615 def meaningfulparents(repo, ctx):
615 def meaningfulparents(repo, ctx):
616 """Return list of meaningful (or all if debug) parentrevs for rev.
616 """Return list of meaningful (or all if debug) parentrevs for rev.
617
617
618 For merges (two non-nullrev revisions) both parents are meaningful.
618 For merges (two non-nullrev revisions) both parents are meaningful.
619 Otherwise the first parent revision is considered meaningful if it
619 Otherwise the first parent revision is considered meaningful if it
620 is not the preceding revision.
620 is not the preceding revision.
621 """
621 """
622 parents = ctx.parents()
622 parents = ctx.parents()
623 if len(parents) > 1:
623 if len(parents) > 1:
624 return parents
624 return parents
625 if repo.ui.debugflag:
625 if repo.ui.debugflag:
626 return [parents[0], repo['null']]
626 return [parents[0], repo['null']]
627 if parents[0].rev() >= intrev(ctx) - 1:
627 if parents[0].rev() >= intrev(ctx) - 1:
628 return []
628 return []
629 return parents
629 return parents
630
630
631 def expandpats(pats):
631 def expandpats(pats):
632 '''Expand bare globs when running on windows.
632 '''Expand bare globs when running on windows.
633 On posix we assume it already has already been done by sh.'''
633 On posix we assume it already has already been done by sh.'''
634 if not util.expandglobs:
634 if not util.expandglobs:
635 return list(pats)
635 return list(pats)
636 ret = []
636 ret = []
637 for kindpat in pats:
637 for kindpat in pats:
638 kind, pat = matchmod._patsplit(kindpat, None)
638 kind, pat = matchmod._patsplit(kindpat, None)
639 if kind is None:
639 if kind is None:
640 try:
640 try:
641 globbed = glob.glob(pat)
641 globbed = glob.glob(pat)
642 except re.error:
642 except re.error:
643 globbed = [pat]
643 globbed = [pat]
644 if globbed:
644 if globbed:
645 ret.extend(globbed)
645 ret.extend(globbed)
646 continue
646 continue
647 ret.append(kindpat)
647 ret.append(kindpat)
648 return ret
648 return ret
649
649
650 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
650 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
651 badfn=None):
651 badfn=None):
652 '''Return a matcher and the patterns that were used.
652 '''Return a matcher and the patterns that were used.
653 The matcher will warn about bad matches, unless an alternate badfn callback
653 The matcher will warn about bad matches, unless an alternate badfn callback
654 is provided.'''
654 is provided.'''
655 if pats == ("",):
655 if pats == ("",):
656 pats = []
656 pats = []
657 if opts is None:
657 if opts is None:
658 opts = {}
658 opts = {}
659 if not globbed and default == 'relpath':
659 if not globbed and default == 'relpath':
660 pats = expandpats(pats or [])
660 pats = expandpats(pats or [])
661
661
662 def bad(f, msg):
662 def bad(f, msg):
663 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
663 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
664
664
665 if badfn is None:
665 if badfn is None:
666 badfn = bad
666 badfn = bad
667
667
668 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
668 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
669 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
669 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
670
670
671 if m.always():
671 if m.always():
672 pats = []
672 pats = []
673 return m, pats
673 return m, pats
674
674
675 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
675 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
676 badfn=None):
676 badfn=None):
677 '''Return a matcher that will warn about bad matches.'''
677 '''Return a matcher that will warn about bad matches.'''
678 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
678 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
679
679
680 def matchall(repo):
680 def matchall(repo):
681 '''Return a matcher that will efficiently match everything.'''
681 '''Return a matcher that will efficiently match everything.'''
682 return matchmod.always(repo.root, repo.getcwd())
682 return matchmod.always(repo.root, repo.getcwd())
683
683
684 def matchfiles(repo, files, badfn=None):
684 def matchfiles(repo, files, badfn=None):
685 '''Return a matcher that will efficiently match exactly these files.'''
685 '''Return a matcher that will efficiently match exactly these files.'''
686 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
686 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
687
687
688 def parsefollowlinespattern(repo, rev, pat, msg):
688 def parsefollowlinespattern(repo, rev, pat, msg):
689 """Return a file name from `pat` pattern suitable for usage in followlines
689 """Return a file name from `pat` pattern suitable for usage in followlines
690 logic.
690 logic.
691 """
691 """
692 if not matchmod.patkind(pat):
692 if not matchmod.patkind(pat):
693 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
693 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
694 else:
694 else:
695 ctx = repo[rev]
695 ctx = repo[rev]
696 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
696 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
697 files = [f for f in ctx if m(f)]
697 files = [f for f in ctx if m(f)]
698 if len(files) != 1:
698 if len(files) != 1:
699 raise error.ParseError(msg)
699 raise error.ParseError(msg)
700 return files[0]
700 return files[0]
701
701
702 def origpath(ui, repo, filepath):
702 def origpath(ui, repo, filepath):
703 '''customize where .orig files are created
703 '''customize where .orig files are created
704
704
705 Fetch user defined path from config file: [ui] origbackuppath = <path>
705 Fetch user defined path from config file: [ui] origbackuppath = <path>
706 Fall back to default (filepath with .orig suffix) if not specified
706 Fall back to default (filepath with .orig suffix) if not specified
707 '''
707 '''
708 origbackuppath = ui.config('ui', 'origbackuppath')
708 origbackuppath = ui.config('ui', 'origbackuppath')
709 if not origbackuppath:
709 if not origbackuppath:
710 return filepath + ".orig"
710 return filepath + ".orig"
711
711
712 # Convert filepath from an absolute path into a path inside the repo.
712 # Convert filepath from an absolute path into a path inside the repo.
713 filepathfromroot = util.normpath(os.path.relpath(filepath,
713 filepathfromroot = util.normpath(os.path.relpath(filepath,
714 start=repo.root))
714 start=repo.root))
715
715
716 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
716 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
717 origbackupdir = origvfs.dirname(filepathfromroot)
717 origbackupdir = origvfs.dirname(filepathfromroot)
718 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
718 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
719 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
719 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
720
720
721 # Remove any files that conflict with the backup file's path
721 # Remove any files that conflict with the backup file's path
722 for f in reversed(list(util.finddirs(filepathfromroot))):
722 for f in reversed(list(util.finddirs(filepathfromroot))):
723 if origvfs.isfileorlink(f):
723 if origvfs.isfileorlink(f):
724 ui.note(_('removing conflicting file: %s\n')
724 ui.note(_('removing conflicting file: %s\n')
725 % origvfs.join(f))
725 % origvfs.join(f))
726 origvfs.unlink(f)
726 origvfs.unlink(f)
727 break
727 break
728
728
729 origvfs.makedirs(origbackupdir)
729 origvfs.makedirs(origbackupdir)
730
730
731 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
731 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
732 ui.note(_('removing conflicting directory: %s\n')
732 ui.note(_('removing conflicting directory: %s\n')
733 % origvfs.join(filepathfromroot))
733 % origvfs.join(filepathfromroot))
734 origvfs.rmtree(filepathfromroot, forcibly=True)
734 origvfs.rmtree(filepathfromroot, forcibly=True)
735
735
736 return origvfs.join(filepathfromroot)
736 return origvfs.join(filepathfromroot)
737
737
738 class _containsnode(object):
738 class _containsnode(object):
739 """proxy __contains__(node) to container.__contains__ which accepts revs"""
739 """proxy __contains__(node) to container.__contains__ which accepts revs"""
740
740
741 def __init__(self, repo, revcontainer):
741 def __init__(self, repo, revcontainer):
742 self._torev = repo.changelog.rev
742 self._torev = repo.changelog.rev
743 self._revcontains = revcontainer.__contains__
743 self._revcontains = revcontainer.__contains__
744
744
745 def __contains__(self, node):
745 def __contains__(self, node):
746 return self._revcontains(self._torev(node))
746 return self._revcontains(self._torev(node))
747
747
748 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
748 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
749 """do common cleanups when old nodes are replaced by new nodes
749 """do common cleanups when old nodes are replaced by new nodes
750
750
751 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
751 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
752 (we might also want to move working directory parent in the future)
752 (we might also want to move working directory parent in the future)
753
753
754 By default, bookmark moves are calculated automatically from 'replacements',
754 By default, bookmark moves are calculated automatically from 'replacements',
755 but 'moves' can be used to override that. Also, 'moves' may include
755 but 'moves' can be used to override that. Also, 'moves' may include
756 additional bookmark moves that should not have associated obsmarkers.
756 additional bookmark moves that should not have associated obsmarkers.
757
757
758 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
758 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
759 have replacements. operation is a string, like "rebase".
759 have replacements. operation is a string, like "rebase".
760
760
761 metadata is dictionary containing metadata to be stored in obsmarker if
761 metadata is dictionary containing metadata to be stored in obsmarker if
762 obsolescence is enabled.
762 obsolescence is enabled.
763 """
763 """
764 if not replacements and not moves:
764 if not replacements and not moves:
765 return
765 return
766
766
767 # translate mapping's other forms
767 # translate mapping's other forms
768 if not util.safehasattr(replacements, 'items'):
768 if not util.safehasattr(replacements, 'items'):
769 replacements = {n: () for n in replacements}
769 replacements = {n: () for n in replacements}
770
770
771 # Calculate bookmark movements
771 # Calculate bookmark movements
772 if moves is None:
772 if moves is None:
773 moves = {}
773 moves = {}
774 # Unfiltered repo is needed since nodes in replacements might be hidden.
774 # Unfiltered repo is needed since nodes in replacements might be hidden.
775 unfi = repo.unfiltered()
775 unfi = repo.unfiltered()
776 for oldnode, newnodes in replacements.items():
776 for oldnode, newnodes in replacements.items():
777 if oldnode in moves:
777 if oldnode in moves:
778 continue
778 continue
779 if len(newnodes) > 1:
779 if len(newnodes) > 1:
780 # usually a split, take the one with biggest rev number
780 # usually a split, take the one with biggest rev number
781 newnode = next(unfi.set('max(%ln)', newnodes)).node()
781 newnode = next(unfi.set('max(%ln)', newnodes)).node()
782 elif len(newnodes) == 0:
782 elif len(newnodes) == 0:
783 # move bookmark backwards
783 # move bookmark backwards
784 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
784 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
785 list(replacements)))
785 list(replacements)))
786 if roots:
786 if roots:
787 newnode = roots[0].node()
787 newnode = roots[0].node()
788 else:
788 else:
789 newnode = nullid
789 newnode = nullid
790 else:
790 else:
791 newnode = newnodes[0]
791 newnode = newnodes[0]
792 moves[oldnode] = newnode
792 moves[oldnode] = newnode
793
793
794 with repo.transaction('cleanup') as tr:
794 with repo.transaction('cleanup') as tr:
795 # Move bookmarks
795 # Move bookmarks
796 bmarks = repo._bookmarks
796 bmarks = repo._bookmarks
797 bmarkchanges = []
797 bmarkchanges = []
798 allnewnodes = [n for ns in replacements.values() for n in ns]
798 allnewnodes = [n for ns in replacements.values() for n in ns]
799 for oldnode, newnode in moves.items():
799 for oldnode, newnode in moves.items():
800 oldbmarks = repo.nodebookmarks(oldnode)
800 oldbmarks = repo.nodebookmarks(oldnode)
801 if not oldbmarks:
801 if not oldbmarks:
802 continue
802 continue
803 from . import bookmarks # avoid import cycle
803 from . import bookmarks # avoid import cycle
804 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
804 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
805 (util.rapply(pycompat.maybebytestr, oldbmarks),
805 (util.rapply(pycompat.maybebytestr, oldbmarks),
806 hex(oldnode), hex(newnode)))
806 hex(oldnode), hex(newnode)))
807 # Delete divergent bookmarks being parents of related newnodes
807 # Delete divergent bookmarks being parents of related newnodes
808 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
808 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
809 allnewnodes, newnode, oldnode)
809 allnewnodes, newnode, oldnode)
810 deletenodes = _containsnode(repo, deleterevs)
810 deletenodes = _containsnode(repo, deleterevs)
811 for name in oldbmarks:
811 for name in oldbmarks:
812 bmarkchanges.append((name, newnode))
812 bmarkchanges.append((name, newnode))
813 for b in bookmarks.divergent2delete(repo, deletenodes, name):
813 for b in bookmarks.divergent2delete(repo, deletenodes, name):
814 bmarkchanges.append((b, None))
814 bmarkchanges.append((b, None))
815
815
816 if bmarkchanges:
816 if bmarkchanges:
817 bmarks.applychanges(repo, tr, bmarkchanges)
817 bmarks.applychanges(repo, tr, bmarkchanges)
818
818
819 # Obsolete or strip nodes
819 # Obsolete or strip nodes
820 if obsolete.isenabled(repo, obsolete.createmarkersopt):
820 if obsolete.isenabled(repo, obsolete.createmarkersopt):
821 # If a node is already obsoleted, and we want to obsolete it
821 # If a node is already obsoleted, and we want to obsolete it
822 # without a successor, skip that obssolete request since it's
822 # without a successor, skip that obssolete request since it's
823 # unnecessary. That's the "if s or not isobs(n)" check below.
823 # unnecessary. That's the "if s or not isobs(n)" check below.
824 # Also sort the node in topology order, that might be useful for
824 # Also sort the node in topology order, that might be useful for
825 # some obsstore logic.
825 # some obsstore logic.
826 # NOTE: the filtering and sorting might belong to createmarkers.
826 # NOTE: the filtering and sorting might belong to createmarkers.
827 isobs = unfi.obsstore.successors.__contains__
827 isobs = unfi.obsstore.successors.__contains__
828 torev = unfi.changelog.rev
828 torev = unfi.changelog.rev
829 sortfunc = lambda ns: torev(ns[0])
829 sortfunc = lambda ns: torev(ns[0])
830 rels = [(unfi[n], tuple(unfi[m] for m in s))
830 rels = [(unfi[n], tuple(unfi[m] for m in s))
831 for n, s in sorted(replacements.items(), key=sortfunc)
831 for n, s in sorted(replacements.items(), key=sortfunc)
832 if s or not isobs(n)]
832 if s or not isobs(n)]
833 if rels:
833 if rels:
834 obsolete.createmarkers(repo, rels, operation=operation,
834 obsolete.createmarkers(repo, rels, operation=operation,
835 metadata=metadata)
835 metadata=metadata)
836 else:
836 else:
837 from . import repair # avoid import cycle
837 from . import repair # avoid import cycle
838 tostrip = list(replacements)
838 tostrip = list(replacements)
839 if tostrip:
839 if tostrip:
840 repair.delayedstrip(repo.ui, repo, tostrip, operation)
840 repair.delayedstrip(repo.ui, repo, tostrip, operation)
841
841
842 def addremove(repo, matcher, prefix, opts=None):
842 def addremove(repo, matcher, prefix, opts=None):
843 if opts is None:
843 if opts is None:
844 opts = {}
844 opts = {}
845 m = matcher
845 m = matcher
846 dry_run = opts.get('dry_run')
846 dry_run = opts.get('dry_run')
847 try:
847 try:
848 similarity = float(opts.get('similarity') or 0)
848 similarity = float(opts.get('similarity') or 0)
849 except ValueError:
849 except ValueError:
850 raise error.Abort(_('similarity must be a number'))
850 raise error.Abort(_('similarity must be a number'))
851 if similarity < 0 or similarity > 100:
851 if similarity < 0 or similarity > 100:
852 raise error.Abort(_('similarity must be between 0 and 100'))
852 raise error.Abort(_('similarity must be between 0 and 100'))
853 similarity /= 100.0
853 similarity /= 100.0
854
854
855 ret = 0
855 ret = 0
856 join = lambda f: os.path.join(prefix, f)
856 join = lambda f: os.path.join(prefix, f)
857
857
858 wctx = repo[None]
858 wctx = repo[None]
859 for subpath in sorted(wctx.substate):
859 for subpath in sorted(wctx.substate):
860 submatch = matchmod.subdirmatcher(subpath, m)
860 submatch = matchmod.subdirmatcher(subpath, m)
861 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
861 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
862 sub = wctx.sub(subpath)
862 sub = wctx.sub(subpath)
863 try:
863 try:
864 if sub.addremove(submatch, prefix, opts):
864 if sub.addremove(submatch, prefix, opts):
865 ret = 1
865 ret = 1
866 except error.LookupError:
866 except error.LookupError:
867 repo.ui.status(_("skipping missing subrepository: %s\n")
867 repo.ui.status(_("skipping missing subrepository: %s\n")
868 % join(subpath))
868 % join(subpath))
869
869
870 rejected = []
870 rejected = []
871 def badfn(f, msg):
871 def badfn(f, msg):
872 if f in m.files():
872 if f in m.files():
873 m.bad(f, msg)
873 m.bad(f, msg)
874 rejected.append(f)
874 rejected.append(f)
875
875
876 badmatch = matchmod.badmatch(m, badfn)
876 badmatch = matchmod.badmatch(m, badfn)
877 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
877 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
878 badmatch)
878 badmatch)
879
879
880 unknownset = set(unknown + forgotten)
880 unknownset = set(unknown + forgotten)
881 toprint = unknownset.copy()
881 toprint = unknownset.copy()
882 toprint.update(deleted)
882 toprint.update(deleted)
883 for abs in sorted(toprint):
883 for abs in sorted(toprint):
884 if repo.ui.verbose or not m.exact(abs):
884 if repo.ui.verbose or not m.exact(abs):
885 if abs in unknownset:
885 if abs in unknownset:
886 status = _('adding %s\n') % m.uipath(abs)
886 status = _('adding %s\n') % m.uipath(abs)
887 else:
887 else:
888 status = _('removing %s\n') % m.uipath(abs)
888 status = _('removing %s\n') % m.uipath(abs)
889 repo.ui.status(status)
889 repo.ui.status(status)
890
890
891 renames = _findrenames(repo, m, added + unknown, removed + deleted,
891 renames = _findrenames(repo, m, added + unknown, removed + deleted,
892 similarity)
892 similarity)
893
893
894 if not dry_run:
894 if not dry_run:
895 _markchanges(repo, unknown + forgotten, deleted, renames)
895 _markchanges(repo, unknown + forgotten, deleted, renames)
896
896
897 for f in rejected:
897 for f in rejected:
898 if f in m.files():
898 if f in m.files():
899 return 1
899 return 1
900 return ret
900 return ret
901
901
902 def marktouched(repo, files, similarity=0.0):
902 def marktouched(repo, files, similarity=0.0):
903 '''Assert that files have somehow been operated upon. files are relative to
903 '''Assert that files have somehow been operated upon. files are relative to
904 the repo root.'''
904 the repo root.'''
905 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
905 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
906 rejected = []
906 rejected = []
907
907
908 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
908 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
909
909
910 if repo.ui.verbose:
910 if repo.ui.verbose:
911 unknownset = set(unknown + forgotten)
911 unknownset = set(unknown + forgotten)
912 toprint = unknownset.copy()
912 toprint = unknownset.copy()
913 toprint.update(deleted)
913 toprint.update(deleted)
914 for abs in sorted(toprint):
914 for abs in sorted(toprint):
915 if abs in unknownset:
915 if abs in unknownset:
916 status = _('adding %s\n') % abs
916 status = _('adding %s\n') % abs
917 else:
917 else:
918 status = _('removing %s\n') % abs
918 status = _('removing %s\n') % abs
919 repo.ui.status(status)
919 repo.ui.status(status)
920
920
921 renames = _findrenames(repo, m, added + unknown, removed + deleted,
921 renames = _findrenames(repo, m, added + unknown, removed + deleted,
922 similarity)
922 similarity)
923
923
924 _markchanges(repo, unknown + forgotten, deleted, renames)
924 _markchanges(repo, unknown + forgotten, deleted, renames)
925
925
926 for f in rejected:
926 for f in rejected:
927 if f in m.files():
927 if f in m.files():
928 return 1
928 return 1
929 return 0
929 return 0
930
930
931 def _interestingfiles(repo, matcher):
931 def _interestingfiles(repo, matcher):
932 '''Walk dirstate with matcher, looking for files that addremove would care
932 '''Walk dirstate with matcher, looking for files that addremove would care
933 about.
933 about.
934
934
935 This is different from dirstate.status because it doesn't care about
935 This is different from dirstate.status because it doesn't care about
936 whether files are modified or clean.'''
936 whether files are modified or clean.'''
937 added, unknown, deleted, removed, forgotten = [], [], [], [], []
937 added, unknown, deleted, removed, forgotten = [], [], [], [], []
938 audit_path = pathutil.pathauditor(repo.root, cached=True)
938 audit_path = pathutil.pathauditor(repo.root, cached=True)
939
939
940 ctx = repo[None]
940 ctx = repo[None]
941 dirstate = repo.dirstate
941 dirstate = repo.dirstate
942 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
942 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
943 unknown=True, ignored=False, full=False)
943 unknown=True, ignored=False, full=False)
944 for abs, st in walkresults.iteritems():
944 for abs, st in walkresults.iteritems():
945 dstate = dirstate[abs]
945 dstate = dirstate[abs]
946 if dstate == '?' and audit_path.check(abs):
946 if dstate == '?' and audit_path.check(abs):
947 unknown.append(abs)
947 unknown.append(abs)
948 elif dstate != 'r' and not st:
948 elif dstate != 'r' and not st:
949 deleted.append(abs)
949 deleted.append(abs)
950 elif dstate == 'r' and st:
950 elif dstate == 'r' and st:
951 forgotten.append(abs)
951 forgotten.append(abs)
952 # for finding renames
952 # for finding renames
953 elif dstate == 'r' and not st:
953 elif dstate == 'r' and not st:
954 removed.append(abs)
954 removed.append(abs)
955 elif dstate == 'a':
955 elif dstate == 'a':
956 added.append(abs)
956 added.append(abs)
957
957
958 return added, unknown, deleted, removed, forgotten
958 return added, unknown, deleted, removed, forgotten
959
959
960 def _findrenames(repo, matcher, added, removed, similarity):
960 def _findrenames(repo, matcher, added, removed, similarity):
961 '''Find renames from removed files to added ones.'''
961 '''Find renames from removed files to added ones.'''
962 renames = {}
962 renames = {}
963 if similarity > 0:
963 if similarity > 0:
964 for old, new, score in similar.findrenames(repo, added, removed,
964 for old, new, score in similar.findrenames(repo, added, removed,
965 similarity):
965 similarity):
966 if (repo.ui.verbose or not matcher.exact(old)
966 if (repo.ui.verbose or not matcher.exact(old)
967 or not matcher.exact(new)):
967 or not matcher.exact(new)):
968 repo.ui.status(_('recording removal of %s as rename to %s '
968 repo.ui.status(_('recording removal of %s as rename to %s '
969 '(%d%% similar)\n') %
969 '(%d%% similar)\n') %
970 (matcher.rel(old), matcher.rel(new),
970 (matcher.rel(old), matcher.rel(new),
971 score * 100))
971 score * 100))
972 renames[new] = old
972 renames[new] = old
973 return renames
973 return renames
974
974
975 def _markchanges(repo, unknown, deleted, renames):
975 def _markchanges(repo, unknown, deleted, renames):
976 '''Marks the files in unknown as added, the files in deleted as removed,
976 '''Marks the files in unknown as added, the files in deleted as removed,
977 and the files in renames as copied.'''
977 and the files in renames as copied.'''
978 wctx = repo[None]
978 wctx = repo[None]
979 with repo.wlock():
979 with repo.wlock():
980 wctx.forget(deleted)
980 wctx.forget(deleted)
981 wctx.add(unknown)
981 wctx.add(unknown)
982 for new, old in renames.iteritems():
982 for new, old in renames.iteritems():
983 wctx.copy(old, new)
983 wctx.copy(old, new)
984
984
985 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
985 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
986 """Update the dirstate to reflect the intent of copying src to dst. For
986 """Update the dirstate to reflect the intent of copying src to dst. For
987 different reasons it might not end with dst being marked as copied from src.
987 different reasons it might not end with dst being marked as copied from src.
988 """
988 """
989 origsrc = repo.dirstate.copied(src) or src
989 origsrc = repo.dirstate.copied(src) or src
990 if dst == origsrc: # copying back a copy?
990 if dst == origsrc: # copying back a copy?
991 if repo.dirstate[dst] not in 'mn' and not dryrun:
991 if repo.dirstate[dst] not in 'mn' and not dryrun:
992 repo.dirstate.normallookup(dst)
992 repo.dirstate.normallookup(dst)
993 else:
993 else:
994 if repo.dirstate[origsrc] == 'a' and origsrc == src:
994 if repo.dirstate[origsrc] == 'a' and origsrc == src:
995 if not ui.quiet:
995 if not ui.quiet:
996 ui.warn(_("%s has not been committed yet, so no copy "
996 ui.warn(_("%s has not been committed yet, so no copy "
997 "data will be stored for %s.\n")
997 "data will be stored for %s.\n")
998 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
998 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
999 if repo.dirstate[dst] in '?r' and not dryrun:
999 if repo.dirstate[dst] in '?r' and not dryrun:
1000 wctx.add([dst])
1000 wctx.add([dst])
1001 elif not dryrun:
1001 elif not dryrun:
1002 wctx.copy(origsrc, dst)
1002 wctx.copy(origsrc, dst)
1003
1003
1004 def readrequires(opener, supported):
1004 def readrequires(opener, supported):
1005 '''Reads and parses .hg/requires and checks if all entries found
1005 '''Reads and parses .hg/requires and checks if all entries found
1006 are in the list of supported features.'''
1006 are in the list of supported features.'''
1007 requirements = set(opener.read("requires").splitlines())
1007 requirements = set(opener.read("requires").splitlines())
1008 missings = []
1008 missings = []
1009 for r in requirements:
1009 for r in requirements:
1010 if r not in supported:
1010 if r not in supported:
1011 if not r or not r[0:1].isalnum():
1011 if not r or not r[0:1].isalnum():
1012 raise error.RequirementError(_(".hg/requires file is corrupt"))
1012 raise error.RequirementError(_(".hg/requires file is corrupt"))
1013 missings.append(r)
1013 missings.append(r)
1014 missings.sort()
1014 missings.sort()
1015 if missings:
1015 if missings:
1016 raise error.RequirementError(
1016 raise error.RequirementError(
1017 _("repository requires features unknown to this Mercurial: %s")
1017 _("repository requires features unknown to this Mercurial: %s")
1018 % " ".join(missings),
1018 % " ".join(missings),
1019 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1019 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1020 " for more information"))
1020 " for more information"))
1021 return requirements
1021 return requirements
1022
1022
1023 def writerequires(opener, requirements):
1023 def writerequires(opener, requirements):
1024 with opener('requires', 'w') as fp:
1024 with opener('requires', 'w') as fp:
1025 for r in sorted(requirements):
1025 for r in sorted(requirements):
1026 fp.write("%s\n" % r)
1026 fp.write("%s\n" % r)
1027
1027
1028 class filecachesubentry(object):
1028 class filecachesubentry(object):
1029 def __init__(self, path, stat):
1029 def __init__(self, path, stat):
1030 self.path = path
1030 self.path = path
1031 self.cachestat = None
1031 self.cachestat = None
1032 self._cacheable = None
1032 self._cacheable = None
1033
1033
1034 if stat:
1034 if stat:
1035 self.cachestat = filecachesubentry.stat(self.path)
1035 self.cachestat = filecachesubentry.stat(self.path)
1036
1036
1037 if self.cachestat:
1037 if self.cachestat:
1038 self._cacheable = self.cachestat.cacheable()
1038 self._cacheable = self.cachestat.cacheable()
1039 else:
1039 else:
1040 # None means we don't know yet
1040 # None means we don't know yet
1041 self._cacheable = None
1041 self._cacheable = None
1042
1042
1043 def refresh(self):
1043 def refresh(self):
1044 if self.cacheable():
1044 if self.cacheable():
1045 self.cachestat = filecachesubentry.stat(self.path)
1045 self.cachestat = filecachesubentry.stat(self.path)
1046
1046
1047 def cacheable(self):
1047 def cacheable(self):
1048 if self._cacheable is not None:
1048 if self._cacheable is not None:
1049 return self._cacheable
1049 return self._cacheable
1050
1050
1051 # we don't know yet, assume it is for now
1051 # we don't know yet, assume it is for now
1052 return True
1052 return True
1053
1053
1054 def changed(self):
1054 def changed(self):
1055 # no point in going further if we can't cache it
1055 # no point in going further if we can't cache it
1056 if not self.cacheable():
1056 if not self.cacheable():
1057 return True
1057 return True
1058
1058
1059 newstat = filecachesubentry.stat(self.path)
1059 newstat = filecachesubentry.stat(self.path)
1060
1060
1061 # we may not know if it's cacheable yet, check again now
1061 # we may not know if it's cacheable yet, check again now
1062 if newstat and self._cacheable is None:
1062 if newstat and self._cacheable is None:
1063 self._cacheable = newstat.cacheable()
1063 self._cacheable = newstat.cacheable()
1064
1064
1065 # check again
1065 # check again
1066 if not self._cacheable:
1066 if not self._cacheable:
1067 return True
1067 return True
1068
1068
1069 if self.cachestat != newstat:
1069 if self.cachestat != newstat:
1070 self.cachestat = newstat
1070 self.cachestat = newstat
1071 return True
1071 return True
1072 else:
1072 else:
1073 return False
1073 return False
1074
1074
1075 @staticmethod
1075 @staticmethod
1076 def stat(path):
1076 def stat(path):
1077 try:
1077 try:
1078 return util.cachestat(path)
1078 return util.cachestat(path)
1079 except OSError as e:
1079 except OSError as e:
1080 if e.errno != errno.ENOENT:
1080 if e.errno != errno.ENOENT:
1081 raise
1081 raise
1082
1082
1083 class filecacheentry(object):
1083 class filecacheentry(object):
1084 def __init__(self, paths, stat=True):
1084 def __init__(self, paths, stat=True):
1085 self._entries = []
1085 self._entries = []
1086 for path in paths:
1086 for path in paths:
1087 self._entries.append(filecachesubentry(path, stat))
1087 self._entries.append(filecachesubentry(path, stat))
1088
1088
1089 def changed(self):
1089 def changed(self):
1090 '''true if any entry has changed'''
1090 '''true if any entry has changed'''
1091 for entry in self._entries:
1091 for entry in self._entries:
1092 if entry.changed():
1092 if entry.changed():
1093 return True
1093 return True
1094 return False
1094 return False
1095
1095
1096 def refresh(self):
1096 def refresh(self):
1097 for entry in self._entries:
1097 for entry in self._entries:
1098 entry.refresh()
1098 entry.refresh()
1099
1099
1100 class filecache(object):
1100 class filecache(object):
1101 '''A property like decorator that tracks files under .hg/ for updates.
1101 '''A property like decorator that tracks files under .hg/ for updates.
1102
1102
1103 Records stat info when called in _filecache.
1103 Records stat info when called in _filecache.
1104
1104
1105 On subsequent calls, compares old stat info with new info, and recreates the
1105 On subsequent calls, compares old stat info with new info, and recreates the
1106 object when any of the files changes, updating the new stat info in
1106 object when any of the files changes, updating the new stat info in
1107 _filecache.
1107 _filecache.
1108
1108
1109 Mercurial either atomic renames or appends for files under .hg,
1109 Mercurial either atomic renames or appends for files under .hg,
1110 so to ensure the cache is reliable we need the filesystem to be able
1110 so to ensure the cache is reliable we need the filesystem to be able
1111 to tell us if a file has been replaced. If it can't, we fallback to
1111 to tell us if a file has been replaced. If it can't, we fallback to
1112 recreating the object on every call (essentially the same behavior as
1112 recreating the object on every call (essentially the same behavior as
1113 propertycache).
1113 propertycache).
1114
1114
1115 '''
1115 '''
1116 def __init__(self, *paths):
1116 def __init__(self, *paths):
1117 self.paths = paths
1117 self.paths = paths
1118
1118
1119 def join(self, obj, fname):
1119 def join(self, obj, fname):
1120 """Used to compute the runtime path of a cached file.
1120 """Used to compute the runtime path of a cached file.
1121
1121
1122 Users should subclass filecache and provide their own version of this
1122 Users should subclass filecache and provide their own version of this
1123 function to call the appropriate join function on 'obj' (an instance
1123 function to call the appropriate join function on 'obj' (an instance
1124 of the class that its member function was decorated).
1124 of the class that its member function was decorated).
1125 """
1125 """
1126 raise NotImplementedError
1126 raise NotImplementedError
1127
1127
1128 def __call__(self, func):
1128 def __call__(self, func):
1129 self.func = func
1129 self.func = func
1130 self.name = func.__name__.encode('ascii')
1130 self.name = func.__name__.encode('ascii')
1131 return self
1131 return self
1132
1132
1133 def __get__(self, obj, type=None):
1133 def __get__(self, obj, type=None):
1134 # if accessed on the class, return the descriptor itself.
1134 # if accessed on the class, return the descriptor itself.
1135 if obj is None:
1135 if obj is None:
1136 return self
1136 return self
1137 # do we need to check if the file changed?
1137 # do we need to check if the file changed?
1138 if self.name in obj.__dict__:
1138 if self.name in obj.__dict__:
1139 assert self.name in obj._filecache, self.name
1139 assert self.name in obj._filecache, self.name
1140 return obj.__dict__[self.name]
1140 return obj.__dict__[self.name]
1141
1141
1142 entry = obj._filecache.get(self.name)
1142 entry = obj._filecache.get(self.name)
1143
1143
1144 if entry:
1144 if entry:
1145 if entry.changed():
1145 if entry.changed():
1146 entry.obj = self.func(obj)
1146 entry.obj = self.func(obj)
1147 else:
1147 else:
1148 paths = [self.join(obj, path) for path in self.paths]
1148 paths = [self.join(obj, path) for path in self.paths]
1149
1149
1150 # We stat -before- creating the object so our cache doesn't lie if
1150 # We stat -before- creating the object so our cache doesn't lie if
1151 # a writer modified between the time we read and stat
1151 # a writer modified between the time we read and stat
1152 entry = filecacheentry(paths, True)
1152 entry = filecacheentry(paths, True)
1153 entry.obj = self.func(obj)
1153 entry.obj = self.func(obj)
1154
1154
1155 obj._filecache[self.name] = entry
1155 obj._filecache[self.name] = entry
1156
1156
1157 obj.__dict__[self.name] = entry.obj
1157 obj.__dict__[self.name] = entry.obj
1158 return entry.obj
1158 return entry.obj
1159
1159
1160 def __set__(self, obj, value):
1160 def __set__(self, obj, value):
1161 if self.name not in obj._filecache:
1161 if self.name not in obj._filecache:
1162 # we add an entry for the missing value because X in __dict__
1162 # we add an entry for the missing value because X in __dict__
1163 # implies X in _filecache
1163 # implies X in _filecache
1164 paths = [self.join(obj, path) for path in self.paths]
1164 paths = [self.join(obj, path) for path in self.paths]
1165 ce = filecacheentry(paths, False)
1165 ce = filecacheentry(paths, False)
1166 obj._filecache[self.name] = ce
1166 obj._filecache[self.name] = ce
1167 else:
1167 else:
1168 ce = obj._filecache[self.name]
1168 ce = obj._filecache[self.name]
1169
1169
1170 ce.obj = value # update cached copy
1170 ce.obj = value # update cached copy
1171 obj.__dict__[self.name] = value # update copy returned by obj.x
1171 obj.__dict__[self.name] = value # update copy returned by obj.x
1172
1172
1173 def __delete__(self, obj):
1173 def __delete__(self, obj):
1174 try:
1174 try:
1175 del obj.__dict__[self.name]
1175 del obj.__dict__[self.name]
1176 except KeyError:
1176 except KeyError:
1177 raise AttributeError(self.name)
1177 raise AttributeError(self.name)
1178
1178
1179 def extdatasource(repo, source):
1179 def extdatasource(repo, source):
1180 """Gather a map of rev -> value dict from the specified source
1180 """Gather a map of rev -> value dict from the specified source
1181
1181
1182 A source spec is treated as a URL, with a special case shell: type
1182 A source spec is treated as a URL, with a special case shell: type
1183 for parsing the output from a shell command.
1183 for parsing the output from a shell command.
1184
1184
1185 The data is parsed as a series of newline-separated records where
1185 The data is parsed as a series of newline-separated records where
1186 each record is a revision specifier optionally followed by a space
1186 each record is a revision specifier optionally followed by a space
1187 and a freeform string value. If the revision is known locally, it
1187 and a freeform string value. If the revision is known locally, it
1188 is converted to a rev, otherwise the record is skipped.
1188 is converted to a rev, otherwise the record is skipped.
1189
1189
1190 Note that both key and value are treated as UTF-8 and converted to
1190 Note that both key and value are treated as UTF-8 and converted to
1191 the local encoding. This allows uniformity between local and
1191 the local encoding. This allows uniformity between local and
1192 remote data sources.
1192 remote data sources.
1193 """
1193 """
1194
1194
1195 spec = repo.ui.config("extdata", source)
1195 spec = repo.ui.config("extdata", source)
1196 if not spec:
1196 if not spec:
1197 raise error.Abort(_("unknown extdata source '%s'") % source)
1197 raise error.Abort(_("unknown extdata source '%s'") % source)
1198
1198
1199 data = {}
1199 data = {}
1200 src = proc = None
1200 src = proc = None
1201 try:
1201 try:
1202 if spec.startswith("shell:"):
1202 if spec.startswith("shell:"):
1203 # external commands should be run relative to the repo root
1203 # external commands should be run relative to the repo root
1204 cmd = spec[6:]
1204 cmd = spec[6:]
1205 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1205 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1206 close_fds=procutil.closefds,
1206 close_fds=procutil.closefds,
1207 stdout=subprocess.PIPE, cwd=repo.root)
1207 stdout=subprocess.PIPE, cwd=repo.root)
1208 src = proc.stdout
1208 src = proc.stdout
1209 else:
1209 else:
1210 # treat as a URL or file
1210 # treat as a URL or file
1211 src = url.open(repo.ui, spec)
1211 src = url.open(repo.ui, spec)
1212 for l in src:
1212 for l in src:
1213 if " " in l:
1213 if " " in l:
1214 k, v = l.strip().split(" ", 1)
1214 k, v = l.strip().split(" ", 1)
1215 else:
1215 else:
1216 k, v = l.strip(), ""
1216 k, v = l.strip(), ""
1217
1217
1218 k = encoding.tolocal(k)
1218 k = encoding.tolocal(k)
1219 try:
1219 try:
1220 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1220 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1221 except (error.LookupError, error.RepoLookupError):
1221 except (error.LookupError, error.RepoLookupError):
1222 pass # we ignore data for nodes that don't exist locally
1222 pass # we ignore data for nodes that don't exist locally
1223 finally:
1223 finally:
1224 if proc:
1224 if proc:
1225 proc.communicate()
1225 proc.communicate()
1226 if src:
1226 if src:
1227 src.close()
1227 src.close()
1228 if proc and proc.returncode != 0:
1228 if proc and proc.returncode != 0:
1229 raise error.Abort(_("extdata command '%s' failed: %s")
1229 raise error.Abort(_("extdata command '%s' failed: %s")
1230 % (cmd, procutil.explainexit(proc.returncode)))
1230 % (cmd, procutil.explainexit(proc.returncode)))
1231
1231
1232 return data
1232 return data
1233
1233
1234 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1234 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1235 if lock is None:
1235 if lock is None:
1236 raise error.LockInheritanceContractViolation(
1236 raise error.LockInheritanceContractViolation(
1237 'lock can only be inherited while held')
1237 'lock can only be inherited while held')
1238 if environ is None:
1238 if environ is None:
1239 environ = {}
1239 environ = {}
1240 with lock.inherit() as locker:
1240 with lock.inherit() as locker:
1241 environ[envvar] = locker
1241 environ[envvar] = locker
1242 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1242 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1243
1243
1244 def wlocksub(repo, cmd, *args, **kwargs):
1244 def wlocksub(repo, cmd, *args, **kwargs):
1245 """run cmd as a subprocess that allows inheriting repo's wlock
1245 """run cmd as a subprocess that allows inheriting repo's wlock
1246
1246
1247 This can only be called while the wlock is held. This takes all the
1247 This can only be called while the wlock is held. This takes all the
1248 arguments that ui.system does, and returns the exit code of the
1248 arguments that ui.system does, and returns the exit code of the
1249 subprocess."""
1249 subprocess."""
1250 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1250 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1251 **kwargs)
1251 **kwargs)
1252
1252
1253 def gdinitconfig(ui):
1253 def gdinitconfig(ui):
1254 """helper function to know if a repo should be created as general delta
1254 """helper function to know if a repo should be created as general delta
1255 """
1255 """
1256 # experimental config: format.generaldelta
1256 # experimental config: format.generaldelta
1257 return (ui.configbool('format', 'generaldelta')
1257 return (ui.configbool('format', 'generaldelta')
1258 or ui.configbool('format', 'usegeneraldelta'))
1258 or ui.configbool('format', 'usegeneraldelta'))
1259
1259
1260 def gddeltaconfig(ui):
1260 def gddeltaconfig(ui):
1261 """helper function to know if incoming delta should be optimised
1261 """helper function to know if incoming delta should be optimised
1262 """
1262 """
1263 # experimental config: format.generaldelta
1263 # experimental config: format.generaldelta
1264 return ui.configbool('format', 'generaldelta')
1264 return ui.configbool('format', 'generaldelta')
1265
1265
1266 class simplekeyvaluefile(object):
1266 class simplekeyvaluefile(object):
1267 """A simple file with key=value lines
1267 """A simple file with key=value lines
1268
1268
1269 Keys must be alphanumerics and start with a letter, values must not
1269 Keys must be alphanumerics and start with a letter, values must not
1270 contain '\n' characters"""
1270 contain '\n' characters"""
1271 firstlinekey = '__firstline'
1271 firstlinekey = '__firstline'
1272
1272
1273 def __init__(self, vfs, path, keys=None):
1273 def __init__(self, vfs, path, keys=None):
1274 self.vfs = vfs
1274 self.vfs = vfs
1275 self.path = path
1275 self.path = path
1276
1276
1277 def read(self, firstlinenonkeyval=False):
1277 def read(self, firstlinenonkeyval=False):
1278 """Read the contents of a simple key-value file
1278 """Read the contents of a simple key-value file
1279
1279
1280 'firstlinenonkeyval' indicates whether the first line of file should
1280 'firstlinenonkeyval' indicates whether the first line of file should
1281 be treated as a key-value pair or reuturned fully under the
1281 be treated as a key-value pair or reuturned fully under the
1282 __firstline key."""
1282 __firstline key."""
1283 lines = self.vfs.readlines(self.path)
1283 lines = self.vfs.readlines(self.path)
1284 d = {}
1284 d = {}
1285 if firstlinenonkeyval:
1285 if firstlinenonkeyval:
1286 if not lines:
1286 if not lines:
1287 e = _("empty simplekeyvalue file")
1287 e = _("empty simplekeyvalue file")
1288 raise error.CorruptedState(e)
1288 raise error.CorruptedState(e)
1289 # we don't want to include '\n' in the __firstline
1289 # we don't want to include '\n' in the __firstline
1290 d[self.firstlinekey] = lines[0][:-1]
1290 d[self.firstlinekey] = lines[0][:-1]
1291 del lines[0]
1291 del lines[0]
1292
1292
1293 try:
1293 try:
1294 # the 'if line.strip()' part prevents us from failing on empty
1294 # the 'if line.strip()' part prevents us from failing on empty
1295 # lines which only contain '\n' therefore are not skipped
1295 # lines which only contain '\n' therefore are not skipped
1296 # by 'if line'
1296 # by 'if line'
1297 updatedict = dict(line[:-1].split('=', 1) for line in lines
1297 updatedict = dict(line[:-1].split('=', 1) for line in lines
1298 if line.strip())
1298 if line.strip())
1299 if self.firstlinekey in updatedict:
1299 if self.firstlinekey in updatedict:
1300 e = _("%r can't be used as a key")
1300 e = _("%r can't be used as a key")
1301 raise error.CorruptedState(e % self.firstlinekey)
1301 raise error.CorruptedState(e % self.firstlinekey)
1302 d.update(updatedict)
1302 d.update(updatedict)
1303 except ValueError as e:
1303 except ValueError as e:
1304 raise error.CorruptedState(str(e))
1304 raise error.CorruptedState(str(e))
1305 return d
1305 return d
1306
1306
1307 def write(self, data, firstline=None):
1307 def write(self, data, firstline=None):
1308 """Write key=>value mapping to a file
1308 """Write key=>value mapping to a file
1309 data is a dict. Keys must be alphanumerical and start with a letter.
1309 data is a dict. Keys must be alphanumerical and start with a letter.
1310 Values must not contain newline characters.
1310 Values must not contain newline characters.
1311
1311
1312 If 'firstline' is not None, it is written to file before
1312 If 'firstline' is not None, it is written to file before
1313 everything else, as it is, not in a key=value form"""
1313 everything else, as it is, not in a key=value form"""
1314 lines = []
1314 lines = []
1315 if firstline is not None:
1315 if firstline is not None:
1316 lines.append('%s\n' % firstline)
1316 lines.append('%s\n' % firstline)
1317
1317
1318 for k, v in data.items():
1318 for k, v in data.items():
1319 if k == self.firstlinekey:
1319 if k == self.firstlinekey:
1320 e = "key name '%s' is reserved" % self.firstlinekey
1320 e = "key name '%s' is reserved" % self.firstlinekey
1321 raise error.ProgrammingError(e)
1321 raise error.ProgrammingError(e)
1322 if not k[0:1].isalpha():
1322 if not k[0:1].isalpha():
1323 e = "keys must start with a letter in a key-value file"
1323 e = "keys must start with a letter in a key-value file"
1324 raise error.ProgrammingError(e)
1324 raise error.ProgrammingError(e)
1325 if not k.isalnum():
1325 if not k.isalnum():
1326 e = "invalid key name in a simple key-value file"
1326 e = "invalid key name in a simple key-value file"
1327 raise error.ProgrammingError(e)
1327 raise error.ProgrammingError(e)
1328 if '\n' in v:
1328 if '\n' in v:
1329 e = "invalid value in a simple key-value file"
1329 e = "invalid value in a simple key-value file"
1330 raise error.ProgrammingError(e)
1330 raise error.ProgrammingError(e)
1331 lines.append("%s=%s\n" % (k, v))
1331 lines.append("%s=%s\n" % (k, v))
1332 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1332 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1333 fp.write(''.join(lines))
1333 fp.write(''.join(lines))
1334
1334
1335 _reportobsoletedsource = [
1335 _reportobsoletedsource = [
1336 'debugobsolete',
1336 'debugobsolete',
1337 'pull',
1337 'pull',
1338 'push',
1338 'push',
1339 'serve',
1339 'serve',
1340 'unbundle',
1340 'unbundle',
1341 ]
1341 ]
1342
1342
1343 _reportnewcssource = [
1343 _reportnewcssource = [
1344 'pull',
1344 'pull',
1345 'unbundle',
1345 'unbundle',
1346 ]
1346 ]
1347
1347
1348 # a list of (repo, ctx, files) functions called by various commands to allow
1348 # a list of (repo, ctx, files) functions called by various commands to allow
1349 # extensions to ensure the corresponding files are available locally, before the
1349 # extensions to ensure the corresponding files are available locally, before the
1350 # command uses them.
1350 # command uses them.
1351 fileprefetchhooks = util.hooks()
1351 fileprefetchhooks = util.hooks()
1352
1352
1353 # A marker that tells the evolve extension to suppress its own reporting
1353 # A marker that tells the evolve extension to suppress its own reporting
1354 _reportstroubledchangesets = True
1354 _reportstroubledchangesets = True
1355
1355
1356 def registersummarycallback(repo, otr, txnname=''):
1356 def registersummarycallback(repo, otr, txnname=''):
1357 """register a callback to issue a summary after the transaction is closed
1357 """register a callback to issue a summary after the transaction is closed
1358 """
1358 """
1359 def txmatch(sources):
1359 def txmatch(sources):
1360 return any(txnname.startswith(source) for source in sources)
1360 return any(txnname.startswith(source) for source in sources)
1361
1361
1362 categories = []
1362 categories = []
1363
1363
1364 def reportsummary(func):
1364 def reportsummary(func):
1365 """decorator for report callbacks."""
1365 """decorator for report callbacks."""
1366 # The repoview life cycle is shorter than the one of the actual
1366 # The repoview life cycle is shorter than the one of the actual
1367 # underlying repository. So the filtered object can die before the
1367 # underlying repository. So the filtered object can die before the
1368 # weakref is used leading to troubles. We keep a reference to the
1368 # weakref is used leading to troubles. We keep a reference to the
1369 # unfiltered object and restore the filtering when retrieving the
1369 # unfiltered object and restore the filtering when retrieving the
1370 # repository through the weakref.
1370 # repository through the weakref.
1371 filtername = repo.filtername
1371 filtername = repo.filtername
1372 reporef = weakref.ref(repo.unfiltered())
1372 reporef = weakref.ref(repo.unfiltered())
1373 def wrapped(tr):
1373 def wrapped(tr):
1374 repo = reporef()
1374 repo = reporef()
1375 if filtername:
1375 if filtername:
1376 repo = repo.filtered(filtername)
1376 repo = repo.filtered(filtername)
1377 func(repo, tr)
1377 func(repo, tr)
1378 newcat = '%02i-txnreport' % len(categories)
1378 newcat = '%02i-txnreport' % len(categories)
1379 otr.addpostclose(newcat, wrapped)
1379 otr.addpostclose(newcat, wrapped)
1380 categories.append(newcat)
1380 categories.append(newcat)
1381 return wrapped
1381 return wrapped
1382
1382
1383 if txmatch(_reportobsoletedsource):
1383 if txmatch(_reportobsoletedsource):
1384 @reportsummary
1384 @reportsummary
1385 def reportobsoleted(repo, tr):
1385 def reportobsoleted(repo, tr):
1386 obsoleted = obsutil.getobsoleted(repo, tr)
1386 obsoleted = obsutil.getobsoleted(repo, tr)
1387 if obsoleted:
1387 if obsoleted:
1388 repo.ui.status(_('obsoleted %i changesets\n')
1388 repo.ui.status(_('obsoleted %i changesets\n')
1389 % len(obsoleted))
1389 % len(obsoleted))
1390
1390
1391 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1391 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1392 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1392 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1393 instabilitytypes = [
1393 instabilitytypes = [
1394 ('orphan', 'orphan'),
1394 ('orphan', 'orphan'),
1395 ('phase-divergent', 'phasedivergent'),
1395 ('phase-divergent', 'phasedivergent'),
1396 ('content-divergent', 'contentdivergent'),
1396 ('content-divergent', 'contentdivergent'),
1397 ]
1397 ]
1398
1398
1399 def getinstabilitycounts(repo):
1399 def getinstabilitycounts(repo):
1400 filtered = repo.changelog.filteredrevs
1400 filtered = repo.changelog.filteredrevs
1401 counts = {}
1401 counts = {}
1402 for instability, revset in instabilitytypes:
1402 for instability, revset in instabilitytypes:
1403 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1403 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1404 filtered)
1404 filtered)
1405 return counts
1405 return counts
1406
1406
1407 oldinstabilitycounts = getinstabilitycounts(repo)
1407 oldinstabilitycounts = getinstabilitycounts(repo)
1408 @reportsummary
1408 @reportsummary
1409 def reportnewinstabilities(repo, tr):
1409 def reportnewinstabilities(repo, tr):
1410 newinstabilitycounts = getinstabilitycounts(repo)
1410 newinstabilitycounts = getinstabilitycounts(repo)
1411 for instability, revset in instabilitytypes:
1411 for instability, revset in instabilitytypes:
1412 delta = (newinstabilitycounts[instability] -
1412 delta = (newinstabilitycounts[instability] -
1413 oldinstabilitycounts[instability])
1413 oldinstabilitycounts[instability])
1414 if delta > 0:
1414 if delta > 0:
1415 repo.ui.warn(_('%i new %s changesets\n') %
1415 repo.ui.warn(_('%i new %s changesets\n') %
1416 (delta, instability))
1416 (delta, instability))
1417
1417
1418 if txmatch(_reportnewcssource):
1418 if txmatch(_reportnewcssource):
1419 @reportsummary
1419 @reportsummary
1420 def reportnewcs(repo, tr):
1420 def reportnewcs(repo, tr):
1421 """Report the range of new revisions pulled/unbundled."""
1421 """Report the range of new revisions pulled/unbundled."""
1422 newrevs = tr.changes.get('revs', xrange(0, 0))
1422 newrevs = tr.changes.get('revs', xrange(0, 0))
1423 if not newrevs:
1423 if not newrevs:
1424 return
1424 return
1425
1425
1426 # Compute the bounds of new revisions' range, excluding obsoletes.
1426 # Compute the bounds of new revisions' range, excluding obsoletes.
1427 unfi = repo.unfiltered()
1427 unfi = repo.unfiltered()
1428 revs = unfi.revs('%ld and not obsolete()', newrevs)
1428 revs = unfi.revs('%ld and not obsolete()', newrevs)
1429 if not revs:
1429 if not revs:
1430 # Got only obsoletes.
1430 # Got only obsoletes.
1431 return
1431 return
1432 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1432 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1433
1433
1434 if minrev == maxrev:
1434 if minrev == maxrev:
1435 revrange = minrev
1435 revrange = minrev
1436 else:
1436 else:
1437 revrange = '%s:%s' % (minrev, maxrev)
1437 revrange = '%s:%s' % (minrev, maxrev)
1438 repo.ui.status(_('new changesets %s\n') % revrange)
1438 repo.ui.status(_('new changesets %s\n') % revrange)
1439
1439
1440 def nodesummaries(repo, nodes, maxnumnodes=4):
1440 def nodesummaries(repo, nodes, maxnumnodes=4):
1441 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1441 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1442 return ' '.join(short(h) for h in nodes)
1442 return ' '.join(short(h) for h in nodes)
1443 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1443 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1444 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1444 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1445
1445
1446 def enforcesinglehead(repo, tr, desc):
1446 def enforcesinglehead(repo, tr, desc):
1447 """check that no named branch has multiple heads"""
1447 """check that no named branch has multiple heads"""
1448 if desc in ('strip', 'repair'):
1448 if desc in ('strip', 'repair'):
1449 # skip the logic during strip
1449 # skip the logic during strip
1450 return
1450 return
1451 visible = repo.filtered('visible')
1451 visible = repo.filtered('visible')
1452 # possible improvement: we could restrict the check to affected branch
1452 # possible improvement: we could restrict the check to affected branch
1453 for name, heads in visible.branchmap().iteritems():
1453 for name, heads in visible.branchmap().iteritems():
1454 if len(heads) > 1:
1454 if len(heads) > 1:
1455 msg = _('rejecting multiple heads on branch "%s"')
1455 msg = _('rejecting multiple heads on branch "%s"')
1456 msg %= name
1456 msg %= name
1457 hint = _('%d heads: %s')
1457 hint = _('%d heads: %s')
1458 hint %= (len(heads), nodesummaries(repo, heads))
1458 hint %= (len(heads), nodesummaries(repo, heads))
1459 raise error.Abort(msg, hint=hint)
1459 raise error.Abort(msg, hint=hint)
1460
1460
1461 def wrapconvertsink(sink):
1461 def wrapconvertsink(sink):
1462 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1462 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1463 before it is used, whether or not the convert extension was formally loaded.
1463 before it is used, whether or not the convert extension was formally loaded.
1464 """
1464 """
1465 return sink
1465 return sink
1466
1466
1467 def unhidehashlikerevs(repo, specs, hiddentype):
1467 def unhidehashlikerevs(repo, specs, hiddentype):
1468 """parse the user specs and unhide changesets whose hash or revision number
1468 """parse the user specs and unhide changesets whose hash or revision number
1469 is passed.
1469 is passed.
1470
1470
1471 hiddentype can be: 1) 'warn': warn while unhiding changesets
1471 hiddentype can be: 1) 'warn': warn while unhiding changesets
1472 2) 'nowarn': don't warn while unhiding changesets
1472 2) 'nowarn': don't warn while unhiding changesets
1473
1473
1474 returns a repo object with the required changesets unhidden
1474 returns a repo object with the required changesets unhidden
1475 """
1475 """
1476 if not repo.filtername or not repo.ui.configbool('experimental',
1476 if not repo.filtername or not repo.ui.configbool('experimental',
1477 'directaccess'):
1477 'directaccess'):
1478 return repo
1478 return repo
1479
1479
1480 if repo.filtername not in ('visible', 'visible-hidden'):
1480 if repo.filtername not in ('visible', 'visible-hidden'):
1481 return repo
1481 return repo
1482
1482
1483 symbols = set()
1483 symbols = set()
1484 for spec in specs:
1484 for spec in specs:
1485 try:
1485 try:
1486 tree = revsetlang.parse(spec)
1486 tree = revsetlang.parse(spec)
1487 except error.ParseError: # will be reported by scmutil.revrange()
1487 except error.ParseError: # will be reported by scmutil.revrange()
1488 continue
1488 continue
1489
1489
1490 symbols.update(revsetlang.gethashlikesymbols(tree))
1490 symbols.update(revsetlang.gethashlikesymbols(tree))
1491
1491
1492 if not symbols:
1492 if not symbols:
1493 return repo
1493 return repo
1494
1494
1495 revs = _getrevsfromsymbols(repo, symbols)
1495 revs = _getrevsfromsymbols(repo, symbols)
1496
1496
1497 if not revs:
1497 if not revs:
1498 return repo
1498 return repo
1499
1499
1500 if hiddentype == 'warn':
1500 if hiddentype == 'warn':
1501 unfi = repo.unfiltered()
1501 unfi = repo.unfiltered()
1502 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1502 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1503 repo.ui.warn(_("warning: accessing hidden changesets for write "
1503 repo.ui.warn(_("warning: accessing hidden changesets for write "
1504 "operation: %s\n") % revstr)
1504 "operation: %s\n") % revstr)
1505
1505
1506 # we have to use new filtername to separate branch/tags cache until we can
1506 # we have to use new filtername to separate branch/tags cache until we can
1507 # disbale these cache when revisions are dynamically pinned.
1507 # disbale these cache when revisions are dynamically pinned.
1508 return repo.filtered('visible-hidden', revs)
1508 return repo.filtered('visible-hidden', revs)
1509
1509
1510 def _getrevsfromsymbols(repo, symbols):
1510 def _getrevsfromsymbols(repo, symbols):
1511 """parse the list of symbols and returns a set of revision numbers of hidden
1511 """parse the list of symbols and returns a set of revision numbers of hidden
1512 changesets present in symbols"""
1512 changesets present in symbols"""
1513 revs = set()
1513 revs = set()
1514 unfi = repo.unfiltered()
1514 unfi = repo.unfiltered()
1515 unficl = unfi.changelog
1515 unficl = unfi.changelog
1516 cl = repo.changelog
1516 cl = repo.changelog
1517 tiprev = len(unficl)
1517 tiprev = len(unficl)
1518 pmatch = unficl._partialmatch
1518 pmatch = unficl._partialmatch
1519 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1519 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1520 for s in symbols:
1520 for s in symbols:
1521 try:
1521 try:
1522 n = int(s)
1522 n = int(s)
1523 if n <= tiprev:
1523 if n <= tiprev:
1524 if not allowrevnums:
1524 if not allowrevnums:
1525 continue
1525 continue
1526 else:
1526 else:
1527 if n not in cl:
1527 if n not in cl:
1528 revs.add(n)
1528 revs.add(n)
1529 continue
1529 continue
1530 except ValueError:
1530 except ValueError:
1531 pass
1531 pass
1532
1532
1533 try:
1533 try:
1534 s = pmatch(s)
1534 s = pmatch(s)
1535 except (error.LookupError, error.WdirUnsupported):
1535 except (error.LookupError, error.WdirUnsupported):
1536 s = None
1536 s = None
1537
1537
1538 if s is not None:
1538 if s is not None:
1539 rev = unficl.rev(s)
1539 rev = unficl.rev(s)
1540 if rev not in cl:
1540 if rev not in cl:
1541 revs.add(rev)
1541 revs.add(rev)
1542
1542
1543 return revs
1543 return revs
@@ -1,146 +1,146 b''
1 #require serve
1 #require serve
2
2
3 #testcases sshv1 sshv2
3 #testcases sshv1 sshv2
4
4
5 #if sshv2
5 #if sshv2
6 $ cat >> $HGRCPATH << EOF
6 $ cat >> $HGRCPATH << EOF
7 > [experimental]
7 > [experimental]
8 > sshpeer.advertise-v2 = true
8 > sshpeer.advertise-v2 = true
9 > sshserver.support-v2 = true
9 > sshserver.support-v2 = true
10 > EOF
10 > EOF
11 #endif
11 #endif
12
12
13 $ hg init test
13 $ hg init test
14 $ cd test
14 $ cd test
15
15
16 $ echo foo>foo
16 $ echo foo>foo
17 $ hg addremove
17 $ hg addremove
18 adding foo
18 adding foo
19 $ hg commit -m 1
19 $ hg commit -m 1
20
20
21 $ hg verify
21 $ hg verify
22 checking changesets
22 checking changesets
23 checking manifests
23 checking manifests
24 crosschecking files in changesets and manifests
24 crosschecking files in changesets and manifests
25 checking files
25 checking files
26 1 files, 1 changesets, 1 total revisions
26 1 files, 1 changesets, 1 total revisions
27
27
28 $ hg serve -p $HGPORT -d --pid-file=hg.pid
28 $ hg serve -p $HGPORT -d --pid-file=hg.pid
29 $ cat hg.pid >> $DAEMON_PIDS
29 $ cat hg.pid >> $DAEMON_PIDS
30 $ cd ..
30 $ cd ..
31
31
32 $ hg clone --pull http://foo:bar@localhost:$HGPORT/ copy
32 $ hg clone --pull http://foo:bar@localhost:$HGPORT/ copy
33 requesting all changes
33 requesting all changes
34 adding changesets
34 adding changesets
35 adding manifests
35 adding manifests
36 adding file changes
36 adding file changes
37 added 1 changesets with 1 changes to 1 files
37 added 1 changesets with 1 changes to 1 files
38 new changesets 340e38bdcde4
38 new changesets 340e38bdcde4
39 updating to branch default
39 updating to branch default
40 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
40 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
41
41
42 $ cd copy
42 $ cd copy
43 $ hg verify
43 $ hg verify
44 checking changesets
44 checking changesets
45 checking manifests
45 checking manifests
46 crosschecking files in changesets and manifests
46 crosschecking files in changesets and manifests
47 checking files
47 checking files
48 1 files, 1 changesets, 1 total revisions
48 1 files, 1 changesets, 1 total revisions
49
49
50 $ hg co
50 $ hg co
51 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
51 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
52 $ cat foo
52 $ cat foo
53 foo
53 foo
54
54
55 $ hg manifest --debug
55 $ hg manifest --debug
56 2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 foo
56 2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 foo
57
57
58 $ hg pull
58 $ hg pull
59 pulling from http://foo@localhost:$HGPORT/
59 pulling from http://foo@localhost:$HGPORT/
60 searching for changes
60 searching for changes
61 no changes found
61 no changes found
62
62
63 $ hg rollback --dry-run --verbose
63 $ hg rollback --dry-run --verbose
64 repository tip rolled back to revision -1 (undo pull: http://foo:***@localhost:$HGPORT/)
64 repository tip rolled back to revision -1 (undo pull: http://foo:***@localhost:$HGPORT/)
65
65
66 Test pull of non-existing 20 character revision specification, making sure plain ascii identifiers
66 Test pull of non-existing 20 character revision specification, making sure plain ascii identifiers
67 not are encoded like a node:
67 not are encoded like a node:
68
68
69 $ hg pull -r 'xxxxxxxxxxxxxxxxxxxy'
69 $ hg pull -r 'xxxxxxxxxxxxxxxxxxxy'
70 pulling from http://foo@localhost:$HGPORT/
70 pulling from http://foo@localhost:$HGPORT/
71 abort: unknown revision 'xxxxxxxxxxxxxxxxxxxy'!
71 abort: unknown revision 'xxxxxxxxxxxxxxxxxxxy'!
72 [255]
72 [255]
73 $ hg pull -r 'xxxxxxxxxxxxxxxxxx y'
73 $ hg pull -r 'xxxxxxxxxxxxxxxxxx y'
74 pulling from http://foo@localhost:$HGPORT/
74 pulling from http://foo@localhost:$HGPORT/
75 abort: unknown revision '7878787878787878787878787878787878782079'!
75 abort: unknown revision 'xxxxxxxxxxxxxxxxxx y'!
76 [255]
76 [255]
77
77
78 Issue622: hg init && hg pull -u URL doesn't checkout default branch
78 Issue622: hg init && hg pull -u URL doesn't checkout default branch
79
79
80 $ cd ..
80 $ cd ..
81 $ hg init empty
81 $ hg init empty
82 $ cd empty
82 $ cd empty
83 $ hg pull -u ../test
83 $ hg pull -u ../test
84 pulling from ../test
84 pulling from ../test
85 requesting all changes
85 requesting all changes
86 adding changesets
86 adding changesets
87 adding manifests
87 adding manifests
88 adding file changes
88 adding file changes
89 added 1 changesets with 1 changes to 1 files
89 added 1 changesets with 1 changes to 1 files
90 new changesets 340e38bdcde4
90 new changesets 340e38bdcde4
91 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
91 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
92
92
93 Test 'file:' uri handling:
93 Test 'file:' uri handling:
94
94
95 $ hg pull -q file://../test-does-not-exist
95 $ hg pull -q file://../test-does-not-exist
96 abort: file:// URLs can only refer to localhost
96 abort: file:// URLs can only refer to localhost
97 [255]
97 [255]
98
98
99 $ hg pull -q file://../test
99 $ hg pull -q file://../test
100 abort: file:// URLs can only refer to localhost
100 abort: file:// URLs can only refer to localhost
101 [255]
101 [255]
102
102
103 MSYS changes 'file:' into 'file;'
103 MSYS changes 'file:' into 'file;'
104
104
105 #if no-msys
105 #if no-msys
106 $ hg pull -q file:../test # no-msys
106 $ hg pull -q file:../test # no-msys
107 #endif
107 #endif
108
108
109 It's tricky to make file:// URLs working on every platform with
109 It's tricky to make file:// URLs working on every platform with
110 regular shell commands.
110 regular shell commands.
111
111
112 $ URL=`$PYTHON -c "import os; print 'file://foobar' + ('/' + os.getcwd().replace(os.sep, '/')).replace('//', '/') + '/../test'"`
112 $ URL=`$PYTHON -c "import os; print 'file://foobar' + ('/' + os.getcwd().replace(os.sep, '/')).replace('//', '/') + '/../test'"`
113 $ hg pull -q "$URL"
113 $ hg pull -q "$URL"
114 abort: file:// URLs can only refer to localhost
114 abort: file:// URLs can only refer to localhost
115 [255]
115 [255]
116
116
117 $ URL=`$PYTHON -c "import os; print 'file://localhost' + ('/' + os.getcwd().replace(os.sep, '/')).replace('//', '/') + '/../test'"`
117 $ URL=`$PYTHON -c "import os; print 'file://localhost' + ('/' + os.getcwd().replace(os.sep, '/')).replace('//', '/') + '/../test'"`
118 $ hg pull -q "$URL"
118 $ hg pull -q "$URL"
119
119
120 SEC: check for unsafe ssh url
120 SEC: check for unsafe ssh url
121
121
122 $ cat >> $HGRCPATH << EOF
122 $ cat >> $HGRCPATH << EOF
123 > [ui]
123 > [ui]
124 > ssh = sh -c "read l; read l; read l"
124 > ssh = sh -c "read l; read l; read l"
125 > EOF
125 > EOF
126
126
127 $ hg pull 'ssh://-oProxyCommand=touch${IFS}owned/path'
127 $ hg pull 'ssh://-oProxyCommand=touch${IFS}owned/path'
128 pulling from ssh://-oProxyCommand%3Dtouch%24%7BIFS%7Downed/path
128 pulling from ssh://-oProxyCommand%3Dtouch%24%7BIFS%7Downed/path
129 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
129 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
130 [255]
130 [255]
131 $ hg pull 'ssh://%2DoProxyCommand=touch${IFS}owned/path'
131 $ hg pull 'ssh://%2DoProxyCommand=touch${IFS}owned/path'
132 pulling from ssh://-oProxyCommand%3Dtouch%24%7BIFS%7Downed/path
132 pulling from ssh://-oProxyCommand%3Dtouch%24%7BIFS%7Downed/path
133 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
133 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
134 [255]
134 [255]
135 $ hg pull 'ssh://fakehost|touch${IFS}owned/path'
135 $ hg pull 'ssh://fakehost|touch${IFS}owned/path'
136 pulling from ssh://fakehost%7Ctouch%24%7BIFS%7Downed/path
136 pulling from ssh://fakehost%7Ctouch%24%7BIFS%7Downed/path
137 abort: no suitable response from remote hg!
137 abort: no suitable response from remote hg!
138 [255]
138 [255]
139 $ hg pull 'ssh://fakehost%7Ctouch%20owned/path'
139 $ hg pull 'ssh://fakehost%7Ctouch%20owned/path'
140 pulling from ssh://fakehost%7Ctouch%20owned/path
140 pulling from ssh://fakehost%7Ctouch%20owned/path
141 abort: no suitable response from remote hg!
141 abort: no suitable response from remote hg!
142 [255]
142 [255]
143
143
144 $ [ ! -f owned ] || echo 'you got owned'
144 $ [ ! -f owned ] || echo 'you got owned'
145
145
146 $ cd ..
146 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now