##// END OF EJS Templates
extdata: ignore ambiguous identifier as well
Yuya Nishihara -
r34460:d5c5cc76 default
parent child Browse files
Show More
@@ -1,1193 +1,1194 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 hex,
20 hex,
21 nullid,
21 nullid,
22 short,
22 short,
23 wdirid,
23 wdirid,
24 wdirrev,
24 wdirrev,
25 )
25 )
26
26
27 from . import (
27 from . import (
28 encoding,
28 encoding,
29 error,
29 error,
30 match as matchmod,
30 match as matchmod,
31 obsolete,
31 obsolete,
32 obsutil,
32 obsutil,
33 pathutil,
33 pathutil,
34 phases,
34 phases,
35 pycompat,
35 pycompat,
36 revsetlang,
36 revsetlang,
37 similar,
37 similar,
38 url,
38 url,
39 util,
39 util,
40 )
40 )
41
41
42 if pycompat.osname == 'nt':
42 if pycompat.osname == 'nt':
43 from . import scmwindows as scmplatform
43 from . import scmwindows as scmplatform
44 else:
44 else:
45 from . import scmposix as scmplatform
45 from . import scmposix as scmplatform
46
46
47 termsize = scmplatform.termsize
47 termsize = scmplatform.termsize
48
48
49 class status(tuple):
49 class status(tuple):
50 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
50 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
51 and 'ignored' properties are only relevant to the working copy.
51 and 'ignored' properties are only relevant to the working copy.
52 '''
52 '''
53
53
54 __slots__ = ()
54 __slots__ = ()
55
55
56 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
56 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
57 clean):
57 clean):
58 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
58 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
59 ignored, clean))
59 ignored, clean))
60
60
61 @property
61 @property
62 def modified(self):
62 def modified(self):
63 '''files that have been modified'''
63 '''files that have been modified'''
64 return self[0]
64 return self[0]
65
65
66 @property
66 @property
67 def added(self):
67 def added(self):
68 '''files that have been added'''
68 '''files that have been added'''
69 return self[1]
69 return self[1]
70
70
71 @property
71 @property
72 def removed(self):
72 def removed(self):
73 '''files that have been removed'''
73 '''files that have been removed'''
74 return self[2]
74 return self[2]
75
75
76 @property
76 @property
77 def deleted(self):
77 def deleted(self):
78 '''files that are in the dirstate, but have been deleted from the
78 '''files that are in the dirstate, but have been deleted from the
79 working copy (aka "missing")
79 working copy (aka "missing")
80 '''
80 '''
81 return self[3]
81 return self[3]
82
82
83 @property
83 @property
84 def unknown(self):
84 def unknown(self):
85 '''files not in the dirstate that are not ignored'''
85 '''files not in the dirstate that are not ignored'''
86 return self[4]
86 return self[4]
87
87
88 @property
88 @property
89 def ignored(self):
89 def ignored(self):
90 '''files not in the dirstate that are ignored (by _dirignore())'''
90 '''files not in the dirstate that are ignored (by _dirignore())'''
91 return self[5]
91 return self[5]
92
92
93 @property
93 @property
94 def clean(self):
94 def clean(self):
95 '''files that have not been modified'''
95 '''files that have not been modified'''
96 return self[6]
96 return self[6]
97
97
98 def __repr__(self, *args, **kwargs):
98 def __repr__(self, *args, **kwargs):
99 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
99 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
100 'unknown=%r, ignored=%r, clean=%r>') % self)
100 'unknown=%r, ignored=%r, clean=%r>') % self)
101
101
102 def itersubrepos(ctx1, ctx2):
102 def itersubrepos(ctx1, ctx2):
103 """find subrepos in ctx1 or ctx2"""
103 """find subrepos in ctx1 or ctx2"""
104 # Create a (subpath, ctx) mapping where we prefer subpaths from
104 # Create a (subpath, ctx) mapping where we prefer subpaths from
105 # ctx1. The subpaths from ctx2 are important when the .hgsub file
105 # ctx1. The subpaths from ctx2 are important when the .hgsub file
106 # has been modified (in ctx2) but not yet committed (in ctx1).
106 # has been modified (in ctx2) but not yet committed (in ctx1).
107 subpaths = dict.fromkeys(ctx2.substate, ctx2)
107 subpaths = dict.fromkeys(ctx2.substate, ctx2)
108 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
108 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
109
109
110 missing = set()
110 missing = set()
111
111
112 for subpath in ctx2.substate:
112 for subpath in ctx2.substate:
113 if subpath not in ctx1.substate:
113 if subpath not in ctx1.substate:
114 del subpaths[subpath]
114 del subpaths[subpath]
115 missing.add(subpath)
115 missing.add(subpath)
116
116
117 for subpath, ctx in sorted(subpaths.iteritems()):
117 for subpath, ctx in sorted(subpaths.iteritems()):
118 yield subpath, ctx.sub(subpath)
118 yield subpath, ctx.sub(subpath)
119
119
120 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
120 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
121 # status and diff will have an accurate result when it does
121 # status and diff will have an accurate result when it does
122 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
122 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
123 # against itself.
123 # against itself.
124 for subpath in missing:
124 for subpath in missing:
125 yield subpath, ctx2.nullsub(subpath, ctx1)
125 yield subpath, ctx2.nullsub(subpath, ctx1)
126
126
127 def nochangesfound(ui, repo, excluded=None):
127 def nochangesfound(ui, repo, excluded=None):
128 '''Report no changes for push/pull, excluded is None or a list of
128 '''Report no changes for push/pull, excluded is None or a list of
129 nodes excluded from the push/pull.
129 nodes excluded from the push/pull.
130 '''
130 '''
131 secretlist = []
131 secretlist = []
132 if excluded:
132 if excluded:
133 for n in excluded:
133 for n in excluded:
134 ctx = repo[n]
134 ctx = repo[n]
135 if ctx.phase() >= phases.secret and not ctx.extinct():
135 if ctx.phase() >= phases.secret and not ctx.extinct():
136 secretlist.append(n)
136 secretlist.append(n)
137
137
138 if secretlist:
138 if secretlist:
139 ui.status(_("no changes found (ignored %d secret changesets)\n")
139 ui.status(_("no changes found (ignored %d secret changesets)\n")
140 % len(secretlist))
140 % len(secretlist))
141 else:
141 else:
142 ui.status(_("no changes found\n"))
142 ui.status(_("no changes found\n"))
143
143
144 def callcatch(ui, func):
144 def callcatch(ui, func):
145 """call func() with global exception handling
145 """call func() with global exception handling
146
146
147 return func() if no exception happens. otherwise do some error handling
147 return func() if no exception happens. otherwise do some error handling
148 and return an exit code accordingly. does not handle all exceptions.
148 and return an exit code accordingly. does not handle all exceptions.
149 """
149 """
150 try:
150 try:
151 try:
151 try:
152 return func()
152 return func()
153 except: # re-raises
153 except: # re-raises
154 ui.traceback()
154 ui.traceback()
155 raise
155 raise
156 # Global exception handling, alphabetically
156 # Global exception handling, alphabetically
157 # Mercurial-specific first, followed by built-in and library exceptions
157 # Mercurial-specific first, followed by built-in and library exceptions
158 except error.LockHeld as inst:
158 except error.LockHeld as inst:
159 if inst.errno == errno.ETIMEDOUT:
159 if inst.errno == errno.ETIMEDOUT:
160 reason = _('timed out waiting for lock held by %r') % inst.locker
160 reason = _('timed out waiting for lock held by %r') % inst.locker
161 else:
161 else:
162 reason = _('lock held by %r') % inst.locker
162 reason = _('lock held by %r') % inst.locker
163 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
163 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
164 if not inst.locker:
164 if not inst.locker:
165 ui.warn(_("(lock might be very busy)\n"))
165 ui.warn(_("(lock might be very busy)\n"))
166 except error.LockUnavailable as inst:
166 except error.LockUnavailable as inst:
167 ui.warn(_("abort: could not lock %s: %s\n") %
167 ui.warn(_("abort: could not lock %s: %s\n") %
168 (inst.desc or inst.filename,
168 (inst.desc or inst.filename,
169 encoding.strtolocal(inst.strerror)))
169 encoding.strtolocal(inst.strerror)))
170 except error.OutOfBandError as inst:
170 except error.OutOfBandError as inst:
171 if inst.args:
171 if inst.args:
172 msg = _("abort: remote error:\n")
172 msg = _("abort: remote error:\n")
173 else:
173 else:
174 msg = _("abort: remote error\n")
174 msg = _("abort: remote error\n")
175 ui.warn(msg)
175 ui.warn(msg)
176 if inst.args:
176 if inst.args:
177 ui.warn(''.join(inst.args))
177 ui.warn(''.join(inst.args))
178 if inst.hint:
178 if inst.hint:
179 ui.warn('(%s)\n' % inst.hint)
179 ui.warn('(%s)\n' % inst.hint)
180 except error.RepoError as inst:
180 except error.RepoError as inst:
181 ui.warn(_("abort: %s!\n") % inst)
181 ui.warn(_("abort: %s!\n") % inst)
182 if inst.hint:
182 if inst.hint:
183 ui.warn(_("(%s)\n") % inst.hint)
183 ui.warn(_("(%s)\n") % inst.hint)
184 except error.ResponseError as inst:
184 except error.ResponseError as inst:
185 ui.warn(_("abort: %s") % inst.args[0])
185 ui.warn(_("abort: %s") % inst.args[0])
186 if not isinstance(inst.args[1], basestring):
186 if not isinstance(inst.args[1], basestring):
187 ui.warn(" %r\n" % (inst.args[1],))
187 ui.warn(" %r\n" % (inst.args[1],))
188 elif not inst.args[1]:
188 elif not inst.args[1]:
189 ui.warn(_(" empty string\n"))
189 ui.warn(_(" empty string\n"))
190 else:
190 else:
191 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
191 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
192 except error.CensoredNodeError as inst:
192 except error.CensoredNodeError as inst:
193 ui.warn(_("abort: file censored %s!\n") % inst)
193 ui.warn(_("abort: file censored %s!\n") % inst)
194 except error.RevlogError as inst:
194 except error.RevlogError as inst:
195 ui.warn(_("abort: %s!\n") % inst)
195 ui.warn(_("abort: %s!\n") % inst)
196 except error.InterventionRequired as inst:
196 except error.InterventionRequired as inst:
197 ui.warn("%s\n" % inst)
197 ui.warn("%s\n" % inst)
198 if inst.hint:
198 if inst.hint:
199 ui.warn(_("(%s)\n") % inst.hint)
199 ui.warn(_("(%s)\n") % inst.hint)
200 return 1
200 return 1
201 except error.WdirUnsupported:
201 except error.WdirUnsupported:
202 ui.warn(_("abort: working directory revision cannot be specified\n"))
202 ui.warn(_("abort: working directory revision cannot be specified\n"))
203 except error.Abort as inst:
203 except error.Abort as inst:
204 ui.warn(_("abort: %s\n") % inst)
204 ui.warn(_("abort: %s\n") % inst)
205 if inst.hint:
205 if inst.hint:
206 ui.warn(_("(%s)\n") % inst.hint)
206 ui.warn(_("(%s)\n") % inst.hint)
207 except ImportError as inst:
207 except ImportError as inst:
208 ui.warn(_("abort: %s!\n") % inst)
208 ui.warn(_("abort: %s!\n") % inst)
209 m = str(inst).split()[-1]
209 m = str(inst).split()[-1]
210 if m in "mpatch bdiff".split():
210 if m in "mpatch bdiff".split():
211 ui.warn(_("(did you forget to compile extensions?)\n"))
211 ui.warn(_("(did you forget to compile extensions?)\n"))
212 elif m in "zlib".split():
212 elif m in "zlib".split():
213 ui.warn(_("(is your Python install correct?)\n"))
213 ui.warn(_("(is your Python install correct?)\n"))
214 except IOError as inst:
214 except IOError as inst:
215 if util.safehasattr(inst, "code"):
215 if util.safehasattr(inst, "code"):
216 ui.warn(_("abort: %s\n") % inst)
216 ui.warn(_("abort: %s\n") % inst)
217 elif util.safehasattr(inst, "reason"):
217 elif util.safehasattr(inst, "reason"):
218 try: # usually it is in the form (errno, strerror)
218 try: # usually it is in the form (errno, strerror)
219 reason = inst.reason.args[1]
219 reason = inst.reason.args[1]
220 except (AttributeError, IndexError):
220 except (AttributeError, IndexError):
221 # it might be anything, for example a string
221 # it might be anything, for example a string
222 reason = inst.reason
222 reason = inst.reason
223 if isinstance(reason, unicode):
223 if isinstance(reason, unicode):
224 # SSLError of Python 2.7.9 contains a unicode
224 # SSLError of Python 2.7.9 contains a unicode
225 reason = encoding.unitolocal(reason)
225 reason = encoding.unitolocal(reason)
226 ui.warn(_("abort: error: %s\n") % reason)
226 ui.warn(_("abort: error: %s\n") % reason)
227 elif (util.safehasattr(inst, "args")
227 elif (util.safehasattr(inst, "args")
228 and inst.args and inst.args[0] == errno.EPIPE):
228 and inst.args and inst.args[0] == errno.EPIPE):
229 pass
229 pass
230 elif getattr(inst, "strerror", None):
230 elif getattr(inst, "strerror", None):
231 if getattr(inst, "filename", None):
231 if getattr(inst, "filename", None):
232 ui.warn(_("abort: %s: %s\n") % (
232 ui.warn(_("abort: %s: %s\n") % (
233 encoding.strtolocal(inst.strerror), inst.filename))
233 encoding.strtolocal(inst.strerror), inst.filename))
234 else:
234 else:
235 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
235 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
236 else:
236 else:
237 raise
237 raise
238 except OSError as inst:
238 except OSError as inst:
239 if getattr(inst, "filename", None) is not None:
239 if getattr(inst, "filename", None) is not None:
240 ui.warn(_("abort: %s: '%s'\n") % (
240 ui.warn(_("abort: %s: '%s'\n") % (
241 encoding.strtolocal(inst.strerror), inst.filename))
241 encoding.strtolocal(inst.strerror), inst.filename))
242 else:
242 else:
243 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
243 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
244 except MemoryError:
244 except MemoryError:
245 ui.warn(_("abort: out of memory\n"))
245 ui.warn(_("abort: out of memory\n"))
246 except SystemExit as inst:
246 except SystemExit as inst:
247 # Commands shouldn't sys.exit directly, but give a return code.
247 # Commands shouldn't sys.exit directly, but give a return code.
248 # Just in case catch this and and pass exit code to caller.
248 # Just in case catch this and and pass exit code to caller.
249 return inst.code
249 return inst.code
250 except socket.error as inst:
250 except socket.error as inst:
251 ui.warn(_("abort: %s\n") % inst.args[-1])
251 ui.warn(_("abort: %s\n") % inst.args[-1])
252
252
253 return -1
253 return -1
254
254
255 def checknewlabel(repo, lbl, kind):
255 def checknewlabel(repo, lbl, kind):
256 # Do not use the "kind" parameter in ui output.
256 # Do not use the "kind" parameter in ui output.
257 # It makes strings difficult to translate.
257 # It makes strings difficult to translate.
258 if lbl in ['tip', '.', 'null']:
258 if lbl in ['tip', '.', 'null']:
259 raise error.Abort(_("the name '%s' is reserved") % lbl)
259 raise error.Abort(_("the name '%s' is reserved") % lbl)
260 for c in (':', '\0', '\n', '\r'):
260 for c in (':', '\0', '\n', '\r'):
261 if c in lbl:
261 if c in lbl:
262 raise error.Abort(_("%r cannot be used in a name") % c)
262 raise error.Abort(_("%r cannot be used in a name") % c)
263 try:
263 try:
264 int(lbl)
264 int(lbl)
265 raise error.Abort(_("cannot use an integer as a name"))
265 raise error.Abort(_("cannot use an integer as a name"))
266 except ValueError:
266 except ValueError:
267 pass
267 pass
268
268
269 def checkfilename(f):
269 def checkfilename(f):
270 '''Check that the filename f is an acceptable filename for a tracked file'''
270 '''Check that the filename f is an acceptable filename for a tracked file'''
271 if '\r' in f or '\n' in f:
271 if '\r' in f or '\n' in f:
272 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
272 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
273
273
274 def checkportable(ui, f):
274 def checkportable(ui, f):
275 '''Check if filename f is portable and warn or abort depending on config'''
275 '''Check if filename f is portable and warn or abort depending on config'''
276 checkfilename(f)
276 checkfilename(f)
277 abort, warn = checkportabilityalert(ui)
277 abort, warn = checkportabilityalert(ui)
278 if abort or warn:
278 if abort or warn:
279 msg = util.checkwinfilename(f)
279 msg = util.checkwinfilename(f)
280 if msg:
280 if msg:
281 msg = "%s: %s" % (msg, util.shellquote(f))
281 msg = "%s: %s" % (msg, util.shellquote(f))
282 if abort:
282 if abort:
283 raise error.Abort(msg)
283 raise error.Abort(msg)
284 ui.warn(_("warning: %s\n") % msg)
284 ui.warn(_("warning: %s\n") % msg)
285
285
286 def checkportabilityalert(ui):
286 def checkportabilityalert(ui):
287 '''check if the user's config requests nothing, a warning, or abort for
287 '''check if the user's config requests nothing, a warning, or abort for
288 non-portable filenames'''
288 non-portable filenames'''
289 val = ui.config('ui', 'portablefilenames')
289 val = ui.config('ui', 'portablefilenames')
290 lval = val.lower()
290 lval = val.lower()
291 bval = util.parsebool(val)
291 bval = util.parsebool(val)
292 abort = pycompat.osname == 'nt' or lval == 'abort'
292 abort = pycompat.osname == 'nt' or lval == 'abort'
293 warn = bval or lval == 'warn'
293 warn = bval or lval == 'warn'
294 if bval is None and not (warn or abort or lval == 'ignore'):
294 if bval is None and not (warn or abort or lval == 'ignore'):
295 raise error.ConfigError(
295 raise error.ConfigError(
296 _("ui.portablefilenames value is invalid ('%s')") % val)
296 _("ui.portablefilenames value is invalid ('%s')") % val)
297 return abort, warn
297 return abort, warn
298
298
299 class casecollisionauditor(object):
299 class casecollisionauditor(object):
300 def __init__(self, ui, abort, dirstate):
300 def __init__(self, ui, abort, dirstate):
301 self._ui = ui
301 self._ui = ui
302 self._abort = abort
302 self._abort = abort
303 allfiles = '\0'.join(dirstate._map)
303 allfiles = '\0'.join(dirstate._map)
304 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
304 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
305 self._dirstate = dirstate
305 self._dirstate = dirstate
306 # The purpose of _newfiles is so that we don't complain about
306 # The purpose of _newfiles is so that we don't complain about
307 # case collisions if someone were to call this object with the
307 # case collisions if someone were to call this object with the
308 # same filename twice.
308 # same filename twice.
309 self._newfiles = set()
309 self._newfiles = set()
310
310
311 def __call__(self, f):
311 def __call__(self, f):
312 if f in self._newfiles:
312 if f in self._newfiles:
313 return
313 return
314 fl = encoding.lower(f)
314 fl = encoding.lower(f)
315 if fl in self._loweredfiles and f not in self._dirstate:
315 if fl in self._loweredfiles and f not in self._dirstate:
316 msg = _('possible case-folding collision for %s') % f
316 msg = _('possible case-folding collision for %s') % f
317 if self._abort:
317 if self._abort:
318 raise error.Abort(msg)
318 raise error.Abort(msg)
319 self._ui.warn(_("warning: %s\n") % msg)
319 self._ui.warn(_("warning: %s\n") % msg)
320 self._loweredfiles.add(fl)
320 self._loweredfiles.add(fl)
321 self._newfiles.add(f)
321 self._newfiles.add(f)
322
322
323 def filteredhash(repo, maxrev):
323 def filteredhash(repo, maxrev):
324 """build hash of filtered revisions in the current repoview.
324 """build hash of filtered revisions in the current repoview.
325
325
326 Multiple caches perform up-to-date validation by checking that the
326 Multiple caches perform up-to-date validation by checking that the
327 tiprev and tipnode stored in the cache file match the current repository.
327 tiprev and tipnode stored in the cache file match the current repository.
328 However, this is not sufficient for validating repoviews because the set
328 However, this is not sufficient for validating repoviews because the set
329 of revisions in the view may change without the repository tiprev and
329 of revisions in the view may change without the repository tiprev and
330 tipnode changing.
330 tipnode changing.
331
331
332 This function hashes all the revs filtered from the view and returns
332 This function hashes all the revs filtered from the view and returns
333 that SHA-1 digest.
333 that SHA-1 digest.
334 """
334 """
335 cl = repo.changelog
335 cl = repo.changelog
336 if not cl.filteredrevs:
336 if not cl.filteredrevs:
337 return None
337 return None
338 key = None
338 key = None
339 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
339 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
340 if revs:
340 if revs:
341 s = hashlib.sha1()
341 s = hashlib.sha1()
342 for rev in revs:
342 for rev in revs:
343 s.update('%d;' % rev)
343 s.update('%d;' % rev)
344 key = s.digest()
344 key = s.digest()
345 return key
345 return key
346
346
347 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
347 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
348 '''yield every hg repository under path, always recursively.
348 '''yield every hg repository under path, always recursively.
349 The recurse flag will only control recursion into repo working dirs'''
349 The recurse flag will only control recursion into repo working dirs'''
350 def errhandler(err):
350 def errhandler(err):
351 if err.filename == path:
351 if err.filename == path:
352 raise err
352 raise err
353 samestat = getattr(os.path, 'samestat', None)
353 samestat = getattr(os.path, 'samestat', None)
354 if followsym and samestat is not None:
354 if followsym and samestat is not None:
355 def adddir(dirlst, dirname):
355 def adddir(dirlst, dirname):
356 match = False
356 match = False
357 dirstat = os.stat(dirname)
357 dirstat = os.stat(dirname)
358 for lstdirstat in dirlst:
358 for lstdirstat in dirlst:
359 if samestat(dirstat, lstdirstat):
359 if samestat(dirstat, lstdirstat):
360 match = True
360 match = True
361 break
361 break
362 if not match:
362 if not match:
363 dirlst.append(dirstat)
363 dirlst.append(dirstat)
364 return not match
364 return not match
365 else:
365 else:
366 followsym = False
366 followsym = False
367
367
368 if (seen_dirs is None) and followsym:
368 if (seen_dirs is None) and followsym:
369 seen_dirs = []
369 seen_dirs = []
370 adddir(seen_dirs, path)
370 adddir(seen_dirs, path)
371 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
371 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
372 dirs.sort()
372 dirs.sort()
373 if '.hg' in dirs:
373 if '.hg' in dirs:
374 yield root # found a repository
374 yield root # found a repository
375 qroot = os.path.join(root, '.hg', 'patches')
375 qroot = os.path.join(root, '.hg', 'patches')
376 if os.path.isdir(os.path.join(qroot, '.hg')):
376 if os.path.isdir(os.path.join(qroot, '.hg')):
377 yield qroot # we have a patch queue repo here
377 yield qroot # we have a patch queue repo here
378 if recurse:
378 if recurse:
379 # avoid recursing inside the .hg directory
379 # avoid recursing inside the .hg directory
380 dirs.remove('.hg')
380 dirs.remove('.hg')
381 else:
381 else:
382 dirs[:] = [] # don't descend further
382 dirs[:] = [] # don't descend further
383 elif followsym:
383 elif followsym:
384 newdirs = []
384 newdirs = []
385 for d in dirs:
385 for d in dirs:
386 fname = os.path.join(root, d)
386 fname = os.path.join(root, d)
387 if adddir(seen_dirs, fname):
387 if adddir(seen_dirs, fname):
388 if os.path.islink(fname):
388 if os.path.islink(fname):
389 for hgname in walkrepos(fname, True, seen_dirs):
389 for hgname in walkrepos(fname, True, seen_dirs):
390 yield hgname
390 yield hgname
391 else:
391 else:
392 newdirs.append(d)
392 newdirs.append(d)
393 dirs[:] = newdirs
393 dirs[:] = newdirs
394
394
395 def binnode(ctx):
395 def binnode(ctx):
396 """Return binary node id for a given basectx"""
396 """Return binary node id for a given basectx"""
397 node = ctx.node()
397 node = ctx.node()
398 if node is None:
398 if node is None:
399 return wdirid
399 return wdirid
400 return node
400 return node
401
401
402 def intrev(ctx):
402 def intrev(ctx):
403 """Return integer for a given basectx that can be used in comparison or
403 """Return integer for a given basectx that can be used in comparison or
404 arithmetic operation"""
404 arithmetic operation"""
405 rev = ctx.rev()
405 rev = ctx.rev()
406 if rev is None:
406 if rev is None:
407 return wdirrev
407 return wdirrev
408 return rev
408 return rev
409
409
410 def formatchangeid(ctx):
410 def formatchangeid(ctx):
411 """Format changectx as '{rev}:{node|formatnode}', which is the default
411 """Format changectx as '{rev}:{node|formatnode}', which is the default
412 template provided by cmdutil.changeset_templater"""
412 template provided by cmdutil.changeset_templater"""
413 repo = ctx.repo()
413 repo = ctx.repo()
414 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
414 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
415
415
416 def formatrevnode(ui, rev, node):
416 def formatrevnode(ui, rev, node):
417 """Format given revision and node depending on the current verbosity"""
417 """Format given revision and node depending on the current verbosity"""
418 if ui.debugflag:
418 if ui.debugflag:
419 hexfunc = hex
419 hexfunc = hex
420 else:
420 else:
421 hexfunc = short
421 hexfunc = short
422 return '%d:%s' % (rev, hexfunc(node))
422 return '%d:%s' % (rev, hexfunc(node))
423
423
424 def revsingle(repo, revspec, default='.', localalias=None):
424 def revsingle(repo, revspec, default='.', localalias=None):
425 if not revspec and revspec != 0:
425 if not revspec and revspec != 0:
426 return repo[default]
426 return repo[default]
427
427
428 l = revrange(repo, [revspec], localalias=localalias)
428 l = revrange(repo, [revspec], localalias=localalias)
429 if not l:
429 if not l:
430 raise error.Abort(_('empty revision set'))
430 raise error.Abort(_('empty revision set'))
431 return repo[l.last()]
431 return repo[l.last()]
432
432
433 def _pairspec(revspec):
433 def _pairspec(revspec):
434 tree = revsetlang.parse(revspec)
434 tree = revsetlang.parse(revspec)
435 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
435 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
436
436
437 def revpair(repo, revs):
437 def revpair(repo, revs):
438 if not revs:
438 if not revs:
439 return repo.dirstate.p1(), None
439 return repo.dirstate.p1(), None
440
440
441 l = revrange(repo, revs)
441 l = revrange(repo, revs)
442
442
443 if not l:
443 if not l:
444 first = second = None
444 first = second = None
445 elif l.isascending():
445 elif l.isascending():
446 first = l.min()
446 first = l.min()
447 second = l.max()
447 second = l.max()
448 elif l.isdescending():
448 elif l.isdescending():
449 first = l.max()
449 first = l.max()
450 second = l.min()
450 second = l.min()
451 else:
451 else:
452 first = l.first()
452 first = l.first()
453 second = l.last()
453 second = l.last()
454
454
455 if first is None:
455 if first is None:
456 raise error.Abort(_('empty revision range'))
456 raise error.Abort(_('empty revision range'))
457 if (first == second and len(revs) >= 2
457 if (first == second and len(revs) >= 2
458 and not all(revrange(repo, [r]) for r in revs)):
458 and not all(revrange(repo, [r]) for r in revs)):
459 raise error.Abort(_('empty revision on one side of range'))
459 raise error.Abort(_('empty revision on one side of range'))
460
460
461 # if top-level is range expression, the result must always be a pair
461 # if top-level is range expression, the result must always be a pair
462 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
462 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
463 return repo.lookup(first), None
463 return repo.lookup(first), None
464
464
465 return repo.lookup(first), repo.lookup(second)
465 return repo.lookup(first), repo.lookup(second)
466
466
467 def revrange(repo, specs, localalias=None):
467 def revrange(repo, specs, localalias=None):
468 """Execute 1 to many revsets and return the union.
468 """Execute 1 to many revsets and return the union.
469
469
470 This is the preferred mechanism for executing revsets using user-specified
470 This is the preferred mechanism for executing revsets using user-specified
471 config options, such as revset aliases.
471 config options, such as revset aliases.
472
472
473 The revsets specified by ``specs`` will be executed via a chained ``OR``
473 The revsets specified by ``specs`` will be executed via a chained ``OR``
474 expression. If ``specs`` is empty, an empty result is returned.
474 expression. If ``specs`` is empty, an empty result is returned.
475
475
476 ``specs`` can contain integers, in which case they are assumed to be
476 ``specs`` can contain integers, in which case they are assumed to be
477 revision numbers.
477 revision numbers.
478
478
479 It is assumed the revsets are already formatted. If you have arguments
479 It is assumed the revsets are already formatted. If you have arguments
480 that need to be expanded in the revset, call ``revsetlang.formatspec()``
480 that need to be expanded in the revset, call ``revsetlang.formatspec()``
481 and pass the result as an element of ``specs``.
481 and pass the result as an element of ``specs``.
482
482
483 Specifying a single revset is allowed.
483 Specifying a single revset is allowed.
484
484
485 Returns a ``revset.abstractsmartset`` which is a list-like interface over
485 Returns a ``revset.abstractsmartset`` which is a list-like interface over
486 integer revisions.
486 integer revisions.
487 """
487 """
488 allspecs = []
488 allspecs = []
489 for spec in specs:
489 for spec in specs:
490 if isinstance(spec, int):
490 if isinstance(spec, int):
491 spec = revsetlang.formatspec('rev(%d)', spec)
491 spec = revsetlang.formatspec('rev(%d)', spec)
492 allspecs.append(spec)
492 allspecs.append(spec)
493 return repo.anyrevs(allspecs, user=True, localalias=localalias)
493 return repo.anyrevs(allspecs, user=True, localalias=localalias)
494
494
495 def meaningfulparents(repo, ctx):
495 def meaningfulparents(repo, ctx):
496 """Return list of meaningful (or all if debug) parentrevs for rev.
496 """Return list of meaningful (or all if debug) parentrevs for rev.
497
497
498 For merges (two non-nullrev revisions) both parents are meaningful.
498 For merges (two non-nullrev revisions) both parents are meaningful.
499 Otherwise the first parent revision is considered meaningful if it
499 Otherwise the first parent revision is considered meaningful if it
500 is not the preceding revision.
500 is not the preceding revision.
501 """
501 """
502 parents = ctx.parents()
502 parents = ctx.parents()
503 if len(parents) > 1:
503 if len(parents) > 1:
504 return parents
504 return parents
505 if repo.ui.debugflag:
505 if repo.ui.debugflag:
506 return [parents[0], repo['null']]
506 return [parents[0], repo['null']]
507 if parents[0].rev() >= intrev(ctx) - 1:
507 if parents[0].rev() >= intrev(ctx) - 1:
508 return []
508 return []
509 return parents
509 return parents
510
510
511 def expandpats(pats):
511 def expandpats(pats):
512 '''Expand bare globs when running on windows.
512 '''Expand bare globs when running on windows.
513 On posix we assume it already has already been done by sh.'''
513 On posix we assume it already has already been done by sh.'''
514 if not util.expandglobs:
514 if not util.expandglobs:
515 return list(pats)
515 return list(pats)
516 ret = []
516 ret = []
517 for kindpat in pats:
517 for kindpat in pats:
518 kind, pat = matchmod._patsplit(kindpat, None)
518 kind, pat = matchmod._patsplit(kindpat, None)
519 if kind is None:
519 if kind is None:
520 try:
520 try:
521 globbed = glob.glob(pat)
521 globbed = glob.glob(pat)
522 except re.error:
522 except re.error:
523 globbed = [pat]
523 globbed = [pat]
524 if globbed:
524 if globbed:
525 ret.extend(globbed)
525 ret.extend(globbed)
526 continue
526 continue
527 ret.append(kindpat)
527 ret.append(kindpat)
528 return ret
528 return ret
529
529
530 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
530 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
531 badfn=None):
531 badfn=None):
532 '''Return a matcher and the patterns that were used.
532 '''Return a matcher and the patterns that were used.
533 The matcher will warn about bad matches, unless an alternate badfn callback
533 The matcher will warn about bad matches, unless an alternate badfn callback
534 is provided.'''
534 is provided.'''
535 if pats == ("",):
535 if pats == ("",):
536 pats = []
536 pats = []
537 if opts is None:
537 if opts is None:
538 opts = {}
538 opts = {}
539 if not globbed and default == 'relpath':
539 if not globbed and default == 'relpath':
540 pats = expandpats(pats or [])
540 pats = expandpats(pats or [])
541
541
542 def bad(f, msg):
542 def bad(f, msg):
543 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
543 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
544
544
545 if badfn is None:
545 if badfn is None:
546 badfn = bad
546 badfn = bad
547
547
548 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
548 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
549 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
549 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
550
550
551 if m.always():
551 if m.always():
552 pats = []
552 pats = []
553 return m, pats
553 return m, pats
554
554
555 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
555 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
556 badfn=None):
556 badfn=None):
557 '''Return a matcher that will warn about bad matches.'''
557 '''Return a matcher that will warn about bad matches.'''
558 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
558 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
559
559
560 def matchall(repo):
560 def matchall(repo):
561 '''Return a matcher that will efficiently match everything.'''
561 '''Return a matcher that will efficiently match everything.'''
562 return matchmod.always(repo.root, repo.getcwd())
562 return matchmod.always(repo.root, repo.getcwd())
563
563
564 def matchfiles(repo, files, badfn=None):
564 def matchfiles(repo, files, badfn=None):
565 '''Return a matcher that will efficiently match exactly these files.'''
565 '''Return a matcher that will efficiently match exactly these files.'''
566 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
566 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
567
567
568 def origpath(ui, repo, filepath):
568 def origpath(ui, repo, filepath):
569 '''customize where .orig files are created
569 '''customize where .orig files are created
570
570
571 Fetch user defined path from config file: [ui] origbackuppath = <path>
571 Fetch user defined path from config file: [ui] origbackuppath = <path>
572 Fall back to default (filepath with .orig suffix) if not specified
572 Fall back to default (filepath with .orig suffix) if not specified
573 '''
573 '''
574 origbackuppath = ui.config('ui', 'origbackuppath')
574 origbackuppath = ui.config('ui', 'origbackuppath')
575 if origbackuppath is None:
575 if origbackuppath is None:
576 return filepath + ".orig"
576 return filepath + ".orig"
577
577
578 filepathfromroot = os.path.relpath(filepath, start=repo.root)
578 filepathfromroot = os.path.relpath(filepath, start=repo.root)
579 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
579 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
580
580
581 origbackupdir = repo.vfs.dirname(fullorigpath)
581 origbackupdir = repo.vfs.dirname(fullorigpath)
582 if not repo.vfs.exists(origbackupdir):
582 if not repo.vfs.exists(origbackupdir):
583 ui.note(_('creating directory: %s\n') % origbackupdir)
583 ui.note(_('creating directory: %s\n') % origbackupdir)
584 util.makedirs(origbackupdir)
584 util.makedirs(origbackupdir)
585
585
586 return fullorigpath
586 return fullorigpath
587
587
588 class _containsnode(object):
588 class _containsnode(object):
589 """proxy __contains__(node) to container.__contains__ which accepts revs"""
589 """proxy __contains__(node) to container.__contains__ which accepts revs"""
590
590
591 def __init__(self, repo, revcontainer):
591 def __init__(self, repo, revcontainer):
592 self._torev = repo.changelog.rev
592 self._torev = repo.changelog.rev
593 self._revcontains = revcontainer.__contains__
593 self._revcontains = revcontainer.__contains__
594
594
595 def __contains__(self, node):
595 def __contains__(self, node):
596 return self._revcontains(self._torev(node))
596 return self._revcontains(self._torev(node))
597
597
598 def cleanupnodes(repo, replacements, operation, moves=None):
598 def cleanupnodes(repo, replacements, operation, moves=None):
599 """do common cleanups when old nodes are replaced by new nodes
599 """do common cleanups when old nodes are replaced by new nodes
600
600
601 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
601 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
602 (we might also want to move working directory parent in the future)
602 (we might also want to move working directory parent in the future)
603
603
604 By default, bookmark moves are calculated automatically from 'replacements',
604 By default, bookmark moves are calculated automatically from 'replacements',
605 but 'moves' can be used to override that. Also, 'moves' may include
605 but 'moves' can be used to override that. Also, 'moves' may include
606 additional bookmark moves that should not have associated obsmarkers.
606 additional bookmark moves that should not have associated obsmarkers.
607
607
608 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
608 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
609 have replacements. operation is a string, like "rebase".
609 have replacements. operation is a string, like "rebase".
610 """
610 """
611 if not replacements and not moves:
611 if not replacements and not moves:
612 return
612 return
613
613
614 # translate mapping's other forms
614 # translate mapping's other forms
615 if not util.safehasattr(replacements, 'items'):
615 if not util.safehasattr(replacements, 'items'):
616 replacements = {n: () for n in replacements}
616 replacements = {n: () for n in replacements}
617
617
618 # Calculate bookmark movements
618 # Calculate bookmark movements
619 if moves is None:
619 if moves is None:
620 moves = {}
620 moves = {}
621 # Unfiltered repo is needed since nodes in replacements might be hidden.
621 # Unfiltered repo is needed since nodes in replacements might be hidden.
622 unfi = repo.unfiltered()
622 unfi = repo.unfiltered()
623 for oldnode, newnodes in replacements.items():
623 for oldnode, newnodes in replacements.items():
624 if oldnode in moves:
624 if oldnode in moves:
625 continue
625 continue
626 if len(newnodes) > 1:
626 if len(newnodes) > 1:
627 # usually a split, take the one with biggest rev number
627 # usually a split, take the one with biggest rev number
628 newnode = next(unfi.set('max(%ln)', newnodes)).node()
628 newnode = next(unfi.set('max(%ln)', newnodes)).node()
629 elif len(newnodes) == 0:
629 elif len(newnodes) == 0:
630 # move bookmark backwards
630 # move bookmark backwards
631 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
631 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
632 list(replacements)))
632 list(replacements)))
633 if roots:
633 if roots:
634 newnode = roots[0].node()
634 newnode = roots[0].node()
635 else:
635 else:
636 newnode = nullid
636 newnode = nullid
637 else:
637 else:
638 newnode = newnodes[0]
638 newnode = newnodes[0]
639 moves[oldnode] = newnode
639 moves[oldnode] = newnode
640
640
641 with repo.transaction('cleanup') as tr:
641 with repo.transaction('cleanup') as tr:
642 # Move bookmarks
642 # Move bookmarks
643 bmarks = repo._bookmarks
643 bmarks = repo._bookmarks
644 bmarkchanges = []
644 bmarkchanges = []
645 allnewnodes = [n for ns in replacements.values() for n in ns]
645 allnewnodes = [n for ns in replacements.values() for n in ns]
646 for oldnode, newnode in moves.items():
646 for oldnode, newnode in moves.items():
647 oldbmarks = repo.nodebookmarks(oldnode)
647 oldbmarks = repo.nodebookmarks(oldnode)
648 if not oldbmarks:
648 if not oldbmarks:
649 continue
649 continue
650 from . import bookmarks # avoid import cycle
650 from . import bookmarks # avoid import cycle
651 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
651 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
652 (oldbmarks, hex(oldnode), hex(newnode)))
652 (oldbmarks, hex(oldnode), hex(newnode)))
653 # Delete divergent bookmarks being parents of related newnodes
653 # Delete divergent bookmarks being parents of related newnodes
654 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
654 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
655 allnewnodes, newnode, oldnode)
655 allnewnodes, newnode, oldnode)
656 deletenodes = _containsnode(repo, deleterevs)
656 deletenodes = _containsnode(repo, deleterevs)
657 for name in oldbmarks:
657 for name in oldbmarks:
658 bmarkchanges.append((name, newnode))
658 bmarkchanges.append((name, newnode))
659 for b in bookmarks.divergent2delete(repo, deletenodes, name):
659 for b in bookmarks.divergent2delete(repo, deletenodes, name):
660 bmarkchanges.append((b, None))
660 bmarkchanges.append((b, None))
661
661
662 if bmarkchanges:
662 if bmarkchanges:
663 bmarks.applychanges(repo, tr, bmarkchanges)
663 bmarks.applychanges(repo, tr, bmarkchanges)
664
664
665 # Obsolete or strip nodes
665 # Obsolete or strip nodes
666 if obsolete.isenabled(repo, obsolete.createmarkersopt):
666 if obsolete.isenabled(repo, obsolete.createmarkersopt):
667 # If a node is already obsoleted, and we want to obsolete it
667 # If a node is already obsoleted, and we want to obsolete it
668 # without a successor, skip that obssolete request since it's
668 # without a successor, skip that obssolete request since it's
669 # unnecessary. That's the "if s or not isobs(n)" check below.
669 # unnecessary. That's the "if s or not isobs(n)" check below.
670 # Also sort the node in topology order, that might be useful for
670 # Also sort the node in topology order, that might be useful for
671 # some obsstore logic.
671 # some obsstore logic.
672 # NOTE: the filtering and sorting might belong to createmarkers.
672 # NOTE: the filtering and sorting might belong to createmarkers.
673 isobs = unfi.obsstore.successors.__contains__
673 isobs = unfi.obsstore.successors.__contains__
674 torev = unfi.changelog.rev
674 torev = unfi.changelog.rev
675 sortfunc = lambda ns: torev(ns[0])
675 sortfunc = lambda ns: torev(ns[0])
676 rels = [(unfi[n], tuple(unfi[m] for m in s))
676 rels = [(unfi[n], tuple(unfi[m] for m in s))
677 for n, s in sorted(replacements.items(), key=sortfunc)
677 for n, s in sorted(replacements.items(), key=sortfunc)
678 if s or not isobs(n)]
678 if s or not isobs(n)]
679 if rels:
679 if rels:
680 obsolete.createmarkers(repo, rels, operation=operation)
680 obsolete.createmarkers(repo, rels, operation=operation)
681 else:
681 else:
682 from . import repair # avoid import cycle
682 from . import repair # avoid import cycle
683 tostrip = list(replacements)
683 tostrip = list(replacements)
684 if tostrip:
684 if tostrip:
685 repair.delayedstrip(repo.ui, repo, tostrip, operation)
685 repair.delayedstrip(repo.ui, repo, tostrip, operation)
686
686
687 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
687 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
688 if opts is None:
688 if opts is None:
689 opts = {}
689 opts = {}
690 m = matcher
690 m = matcher
691 if dry_run is None:
691 if dry_run is None:
692 dry_run = opts.get('dry_run')
692 dry_run = opts.get('dry_run')
693 if similarity is None:
693 if similarity is None:
694 similarity = float(opts.get('similarity') or 0)
694 similarity = float(opts.get('similarity') or 0)
695
695
696 ret = 0
696 ret = 0
697 join = lambda f: os.path.join(prefix, f)
697 join = lambda f: os.path.join(prefix, f)
698
698
699 wctx = repo[None]
699 wctx = repo[None]
700 for subpath in sorted(wctx.substate):
700 for subpath in sorted(wctx.substate):
701 submatch = matchmod.subdirmatcher(subpath, m)
701 submatch = matchmod.subdirmatcher(subpath, m)
702 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
702 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
703 sub = wctx.sub(subpath)
703 sub = wctx.sub(subpath)
704 try:
704 try:
705 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
705 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
706 ret = 1
706 ret = 1
707 except error.LookupError:
707 except error.LookupError:
708 repo.ui.status(_("skipping missing subrepository: %s\n")
708 repo.ui.status(_("skipping missing subrepository: %s\n")
709 % join(subpath))
709 % join(subpath))
710
710
711 rejected = []
711 rejected = []
712 def badfn(f, msg):
712 def badfn(f, msg):
713 if f in m.files():
713 if f in m.files():
714 m.bad(f, msg)
714 m.bad(f, msg)
715 rejected.append(f)
715 rejected.append(f)
716
716
717 badmatch = matchmod.badmatch(m, badfn)
717 badmatch = matchmod.badmatch(m, badfn)
718 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
718 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
719 badmatch)
719 badmatch)
720
720
721 unknownset = set(unknown + forgotten)
721 unknownset = set(unknown + forgotten)
722 toprint = unknownset.copy()
722 toprint = unknownset.copy()
723 toprint.update(deleted)
723 toprint.update(deleted)
724 for abs in sorted(toprint):
724 for abs in sorted(toprint):
725 if repo.ui.verbose or not m.exact(abs):
725 if repo.ui.verbose or not m.exact(abs):
726 if abs in unknownset:
726 if abs in unknownset:
727 status = _('adding %s\n') % m.uipath(abs)
727 status = _('adding %s\n') % m.uipath(abs)
728 else:
728 else:
729 status = _('removing %s\n') % m.uipath(abs)
729 status = _('removing %s\n') % m.uipath(abs)
730 repo.ui.status(status)
730 repo.ui.status(status)
731
731
732 renames = _findrenames(repo, m, added + unknown, removed + deleted,
732 renames = _findrenames(repo, m, added + unknown, removed + deleted,
733 similarity)
733 similarity)
734
734
735 if not dry_run:
735 if not dry_run:
736 _markchanges(repo, unknown + forgotten, deleted, renames)
736 _markchanges(repo, unknown + forgotten, deleted, renames)
737
737
738 for f in rejected:
738 for f in rejected:
739 if f in m.files():
739 if f in m.files():
740 return 1
740 return 1
741 return ret
741 return ret
742
742
743 def marktouched(repo, files, similarity=0.0):
743 def marktouched(repo, files, similarity=0.0):
744 '''Assert that files have somehow been operated upon. files are relative to
744 '''Assert that files have somehow been operated upon. files are relative to
745 the repo root.'''
745 the repo root.'''
746 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
746 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
747 rejected = []
747 rejected = []
748
748
749 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
749 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
750
750
751 if repo.ui.verbose:
751 if repo.ui.verbose:
752 unknownset = set(unknown + forgotten)
752 unknownset = set(unknown + forgotten)
753 toprint = unknownset.copy()
753 toprint = unknownset.copy()
754 toprint.update(deleted)
754 toprint.update(deleted)
755 for abs in sorted(toprint):
755 for abs in sorted(toprint):
756 if abs in unknownset:
756 if abs in unknownset:
757 status = _('adding %s\n') % abs
757 status = _('adding %s\n') % abs
758 else:
758 else:
759 status = _('removing %s\n') % abs
759 status = _('removing %s\n') % abs
760 repo.ui.status(status)
760 repo.ui.status(status)
761
761
762 renames = _findrenames(repo, m, added + unknown, removed + deleted,
762 renames = _findrenames(repo, m, added + unknown, removed + deleted,
763 similarity)
763 similarity)
764
764
765 _markchanges(repo, unknown + forgotten, deleted, renames)
765 _markchanges(repo, unknown + forgotten, deleted, renames)
766
766
767 for f in rejected:
767 for f in rejected:
768 if f in m.files():
768 if f in m.files():
769 return 1
769 return 1
770 return 0
770 return 0
771
771
772 def _interestingfiles(repo, matcher):
772 def _interestingfiles(repo, matcher):
773 '''Walk dirstate with matcher, looking for files that addremove would care
773 '''Walk dirstate with matcher, looking for files that addremove would care
774 about.
774 about.
775
775
776 This is different from dirstate.status because it doesn't care about
776 This is different from dirstate.status because it doesn't care about
777 whether files are modified or clean.'''
777 whether files are modified or clean.'''
778 added, unknown, deleted, removed, forgotten = [], [], [], [], []
778 added, unknown, deleted, removed, forgotten = [], [], [], [], []
779 audit_path = pathutil.pathauditor(repo.root, cached=True)
779 audit_path = pathutil.pathauditor(repo.root, cached=True)
780
780
781 ctx = repo[None]
781 ctx = repo[None]
782 dirstate = repo.dirstate
782 dirstate = repo.dirstate
783 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
783 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
784 unknown=True, ignored=False, full=False)
784 unknown=True, ignored=False, full=False)
785 for abs, st in walkresults.iteritems():
785 for abs, st in walkresults.iteritems():
786 dstate = dirstate[abs]
786 dstate = dirstate[abs]
787 if dstate == '?' and audit_path.check(abs):
787 if dstate == '?' and audit_path.check(abs):
788 unknown.append(abs)
788 unknown.append(abs)
789 elif dstate != 'r' and not st:
789 elif dstate != 'r' and not st:
790 deleted.append(abs)
790 deleted.append(abs)
791 elif dstate == 'r' and st:
791 elif dstate == 'r' and st:
792 forgotten.append(abs)
792 forgotten.append(abs)
793 # for finding renames
793 # for finding renames
794 elif dstate == 'r' and not st:
794 elif dstate == 'r' and not st:
795 removed.append(abs)
795 removed.append(abs)
796 elif dstate == 'a':
796 elif dstate == 'a':
797 added.append(abs)
797 added.append(abs)
798
798
799 return added, unknown, deleted, removed, forgotten
799 return added, unknown, deleted, removed, forgotten
800
800
801 def _findrenames(repo, matcher, added, removed, similarity):
801 def _findrenames(repo, matcher, added, removed, similarity):
802 '''Find renames from removed files to added ones.'''
802 '''Find renames from removed files to added ones.'''
803 renames = {}
803 renames = {}
804 if similarity > 0:
804 if similarity > 0:
805 for old, new, score in similar.findrenames(repo, added, removed,
805 for old, new, score in similar.findrenames(repo, added, removed,
806 similarity):
806 similarity):
807 if (repo.ui.verbose or not matcher.exact(old)
807 if (repo.ui.verbose or not matcher.exact(old)
808 or not matcher.exact(new)):
808 or not matcher.exact(new)):
809 repo.ui.status(_('recording removal of %s as rename to %s '
809 repo.ui.status(_('recording removal of %s as rename to %s '
810 '(%d%% similar)\n') %
810 '(%d%% similar)\n') %
811 (matcher.rel(old), matcher.rel(new),
811 (matcher.rel(old), matcher.rel(new),
812 score * 100))
812 score * 100))
813 renames[new] = old
813 renames[new] = old
814 return renames
814 return renames
815
815
816 def _markchanges(repo, unknown, deleted, renames):
816 def _markchanges(repo, unknown, deleted, renames):
817 '''Marks the files in unknown as added, the files in deleted as removed,
817 '''Marks the files in unknown as added, the files in deleted as removed,
818 and the files in renames as copied.'''
818 and the files in renames as copied.'''
819 wctx = repo[None]
819 wctx = repo[None]
820 with repo.wlock():
820 with repo.wlock():
821 wctx.forget(deleted)
821 wctx.forget(deleted)
822 wctx.add(unknown)
822 wctx.add(unknown)
823 for new, old in renames.iteritems():
823 for new, old in renames.iteritems():
824 wctx.copy(old, new)
824 wctx.copy(old, new)
825
825
826 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
826 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
827 """Update the dirstate to reflect the intent of copying src to dst. For
827 """Update the dirstate to reflect the intent of copying src to dst. For
828 different reasons it might not end with dst being marked as copied from src.
828 different reasons it might not end with dst being marked as copied from src.
829 """
829 """
830 origsrc = repo.dirstate.copied(src) or src
830 origsrc = repo.dirstate.copied(src) or src
831 if dst == origsrc: # copying back a copy?
831 if dst == origsrc: # copying back a copy?
832 if repo.dirstate[dst] not in 'mn' and not dryrun:
832 if repo.dirstate[dst] not in 'mn' and not dryrun:
833 repo.dirstate.normallookup(dst)
833 repo.dirstate.normallookup(dst)
834 else:
834 else:
835 if repo.dirstate[origsrc] == 'a' and origsrc == src:
835 if repo.dirstate[origsrc] == 'a' and origsrc == src:
836 if not ui.quiet:
836 if not ui.quiet:
837 ui.warn(_("%s has not been committed yet, so no copy "
837 ui.warn(_("%s has not been committed yet, so no copy "
838 "data will be stored for %s.\n")
838 "data will be stored for %s.\n")
839 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
839 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
840 if repo.dirstate[dst] in '?r' and not dryrun:
840 if repo.dirstate[dst] in '?r' and not dryrun:
841 wctx.add([dst])
841 wctx.add([dst])
842 elif not dryrun:
842 elif not dryrun:
843 wctx.copy(origsrc, dst)
843 wctx.copy(origsrc, dst)
844
844
845 def readrequires(opener, supported):
845 def readrequires(opener, supported):
846 '''Reads and parses .hg/requires and checks if all entries found
846 '''Reads and parses .hg/requires and checks if all entries found
847 are in the list of supported features.'''
847 are in the list of supported features.'''
848 requirements = set(opener.read("requires").splitlines())
848 requirements = set(opener.read("requires").splitlines())
849 missings = []
849 missings = []
850 for r in requirements:
850 for r in requirements:
851 if r not in supported:
851 if r not in supported:
852 if not r or not r[0].isalnum():
852 if not r or not r[0].isalnum():
853 raise error.RequirementError(_(".hg/requires file is corrupt"))
853 raise error.RequirementError(_(".hg/requires file is corrupt"))
854 missings.append(r)
854 missings.append(r)
855 missings.sort()
855 missings.sort()
856 if missings:
856 if missings:
857 raise error.RequirementError(
857 raise error.RequirementError(
858 _("repository requires features unknown to this Mercurial: %s")
858 _("repository requires features unknown to this Mercurial: %s")
859 % " ".join(missings),
859 % " ".join(missings),
860 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
860 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
861 " for more information"))
861 " for more information"))
862 return requirements
862 return requirements
863
863
864 def writerequires(opener, requirements):
864 def writerequires(opener, requirements):
865 with opener('requires', 'w') as fp:
865 with opener('requires', 'w') as fp:
866 for r in sorted(requirements):
866 for r in sorted(requirements):
867 fp.write("%s\n" % r)
867 fp.write("%s\n" % r)
868
868
869 class filecachesubentry(object):
869 class filecachesubentry(object):
870 def __init__(self, path, stat):
870 def __init__(self, path, stat):
871 self.path = path
871 self.path = path
872 self.cachestat = None
872 self.cachestat = None
873 self._cacheable = None
873 self._cacheable = None
874
874
875 if stat:
875 if stat:
876 self.cachestat = filecachesubentry.stat(self.path)
876 self.cachestat = filecachesubentry.stat(self.path)
877
877
878 if self.cachestat:
878 if self.cachestat:
879 self._cacheable = self.cachestat.cacheable()
879 self._cacheable = self.cachestat.cacheable()
880 else:
880 else:
881 # None means we don't know yet
881 # None means we don't know yet
882 self._cacheable = None
882 self._cacheable = None
883
883
884 def refresh(self):
884 def refresh(self):
885 if self.cacheable():
885 if self.cacheable():
886 self.cachestat = filecachesubentry.stat(self.path)
886 self.cachestat = filecachesubentry.stat(self.path)
887
887
888 def cacheable(self):
888 def cacheable(self):
889 if self._cacheable is not None:
889 if self._cacheable is not None:
890 return self._cacheable
890 return self._cacheable
891
891
892 # we don't know yet, assume it is for now
892 # we don't know yet, assume it is for now
893 return True
893 return True
894
894
895 def changed(self):
895 def changed(self):
896 # no point in going further if we can't cache it
896 # no point in going further if we can't cache it
897 if not self.cacheable():
897 if not self.cacheable():
898 return True
898 return True
899
899
900 newstat = filecachesubentry.stat(self.path)
900 newstat = filecachesubentry.stat(self.path)
901
901
902 # we may not know if it's cacheable yet, check again now
902 # we may not know if it's cacheable yet, check again now
903 if newstat and self._cacheable is None:
903 if newstat and self._cacheable is None:
904 self._cacheable = newstat.cacheable()
904 self._cacheable = newstat.cacheable()
905
905
906 # check again
906 # check again
907 if not self._cacheable:
907 if not self._cacheable:
908 return True
908 return True
909
909
910 if self.cachestat != newstat:
910 if self.cachestat != newstat:
911 self.cachestat = newstat
911 self.cachestat = newstat
912 return True
912 return True
913 else:
913 else:
914 return False
914 return False
915
915
916 @staticmethod
916 @staticmethod
917 def stat(path):
917 def stat(path):
918 try:
918 try:
919 return util.cachestat(path)
919 return util.cachestat(path)
920 except OSError as e:
920 except OSError as e:
921 if e.errno != errno.ENOENT:
921 if e.errno != errno.ENOENT:
922 raise
922 raise
923
923
924 class filecacheentry(object):
924 class filecacheentry(object):
925 def __init__(self, paths, stat=True):
925 def __init__(self, paths, stat=True):
926 self._entries = []
926 self._entries = []
927 for path in paths:
927 for path in paths:
928 self._entries.append(filecachesubentry(path, stat))
928 self._entries.append(filecachesubentry(path, stat))
929
929
930 def changed(self):
930 def changed(self):
931 '''true if any entry has changed'''
931 '''true if any entry has changed'''
932 for entry in self._entries:
932 for entry in self._entries:
933 if entry.changed():
933 if entry.changed():
934 return True
934 return True
935 return False
935 return False
936
936
937 def refresh(self):
937 def refresh(self):
938 for entry in self._entries:
938 for entry in self._entries:
939 entry.refresh()
939 entry.refresh()
940
940
941 class filecache(object):
941 class filecache(object):
942 '''A property like decorator that tracks files under .hg/ for updates.
942 '''A property like decorator that tracks files under .hg/ for updates.
943
943
944 Records stat info when called in _filecache.
944 Records stat info when called in _filecache.
945
945
946 On subsequent calls, compares old stat info with new info, and recreates the
946 On subsequent calls, compares old stat info with new info, and recreates the
947 object when any of the files changes, updating the new stat info in
947 object when any of the files changes, updating the new stat info in
948 _filecache.
948 _filecache.
949
949
950 Mercurial either atomic renames or appends for files under .hg,
950 Mercurial either atomic renames or appends for files under .hg,
951 so to ensure the cache is reliable we need the filesystem to be able
951 so to ensure the cache is reliable we need the filesystem to be able
952 to tell us if a file has been replaced. If it can't, we fallback to
952 to tell us if a file has been replaced. If it can't, we fallback to
953 recreating the object on every call (essentially the same behavior as
953 recreating the object on every call (essentially the same behavior as
954 propertycache).
954 propertycache).
955
955
956 '''
956 '''
957 def __init__(self, *paths):
957 def __init__(self, *paths):
958 self.paths = paths
958 self.paths = paths
959
959
960 def join(self, obj, fname):
960 def join(self, obj, fname):
961 """Used to compute the runtime path of a cached file.
961 """Used to compute the runtime path of a cached file.
962
962
963 Users should subclass filecache and provide their own version of this
963 Users should subclass filecache and provide their own version of this
964 function to call the appropriate join function on 'obj' (an instance
964 function to call the appropriate join function on 'obj' (an instance
965 of the class that its member function was decorated).
965 of the class that its member function was decorated).
966 """
966 """
967 raise NotImplementedError
967 raise NotImplementedError
968
968
969 def __call__(self, func):
969 def __call__(self, func):
970 self.func = func
970 self.func = func
971 self.name = func.__name__.encode('ascii')
971 self.name = func.__name__.encode('ascii')
972 return self
972 return self
973
973
974 def __get__(self, obj, type=None):
974 def __get__(self, obj, type=None):
975 # if accessed on the class, return the descriptor itself.
975 # if accessed on the class, return the descriptor itself.
976 if obj is None:
976 if obj is None:
977 return self
977 return self
978 # do we need to check if the file changed?
978 # do we need to check if the file changed?
979 if self.name in obj.__dict__:
979 if self.name in obj.__dict__:
980 assert self.name in obj._filecache, self.name
980 assert self.name in obj._filecache, self.name
981 return obj.__dict__[self.name]
981 return obj.__dict__[self.name]
982
982
983 entry = obj._filecache.get(self.name)
983 entry = obj._filecache.get(self.name)
984
984
985 if entry:
985 if entry:
986 if entry.changed():
986 if entry.changed():
987 entry.obj = self.func(obj)
987 entry.obj = self.func(obj)
988 else:
988 else:
989 paths = [self.join(obj, path) for path in self.paths]
989 paths = [self.join(obj, path) for path in self.paths]
990
990
991 # We stat -before- creating the object so our cache doesn't lie if
991 # We stat -before- creating the object so our cache doesn't lie if
992 # a writer modified between the time we read and stat
992 # a writer modified between the time we read and stat
993 entry = filecacheentry(paths, True)
993 entry = filecacheentry(paths, True)
994 entry.obj = self.func(obj)
994 entry.obj = self.func(obj)
995
995
996 obj._filecache[self.name] = entry
996 obj._filecache[self.name] = entry
997
997
998 obj.__dict__[self.name] = entry.obj
998 obj.__dict__[self.name] = entry.obj
999 return entry.obj
999 return entry.obj
1000
1000
1001 def __set__(self, obj, value):
1001 def __set__(self, obj, value):
1002 if self.name not in obj._filecache:
1002 if self.name not in obj._filecache:
1003 # we add an entry for the missing value because X in __dict__
1003 # we add an entry for the missing value because X in __dict__
1004 # implies X in _filecache
1004 # implies X in _filecache
1005 paths = [self.join(obj, path) for path in self.paths]
1005 paths = [self.join(obj, path) for path in self.paths]
1006 ce = filecacheentry(paths, False)
1006 ce = filecacheentry(paths, False)
1007 obj._filecache[self.name] = ce
1007 obj._filecache[self.name] = ce
1008 else:
1008 else:
1009 ce = obj._filecache[self.name]
1009 ce = obj._filecache[self.name]
1010
1010
1011 ce.obj = value # update cached copy
1011 ce.obj = value # update cached copy
1012 obj.__dict__[self.name] = value # update copy returned by obj.x
1012 obj.__dict__[self.name] = value # update copy returned by obj.x
1013
1013
1014 def __delete__(self, obj):
1014 def __delete__(self, obj):
1015 try:
1015 try:
1016 del obj.__dict__[self.name]
1016 del obj.__dict__[self.name]
1017 except KeyError:
1017 except KeyError:
1018 raise AttributeError(self.name)
1018 raise AttributeError(self.name)
1019
1019
1020 def extdatasource(repo, source):
1020 def extdatasource(repo, source):
1021 """Gather a map of rev -> value dict from the specified source
1021 """Gather a map of rev -> value dict from the specified source
1022
1022
1023 A source spec is treated as a URL, with a special case shell: type
1023 A source spec is treated as a URL, with a special case shell: type
1024 for parsing the output from a shell command.
1024 for parsing the output from a shell command.
1025
1025
1026 The data is parsed as a series of newline-separated records where
1026 The data is parsed as a series of newline-separated records where
1027 each record is a revision specifier optionally followed by a space
1027 each record is a revision specifier optionally followed by a space
1028 and a freeform string value. If the revision is known locally, it
1028 and a freeform string value. If the revision is known locally, it
1029 is converted to a rev, otherwise the record is skipped.
1029 is converted to a rev, otherwise the record is skipped.
1030
1030
1031 Note that both key and value are treated as UTF-8 and converted to
1031 Note that both key and value are treated as UTF-8 and converted to
1032 the local encoding. This allows uniformity between local and
1032 the local encoding. This allows uniformity between local and
1033 remote data sources.
1033 remote data sources.
1034 """
1034 """
1035
1035
1036 spec = repo.ui.config("extdata", source)
1036 spec = repo.ui.config("extdata", source)
1037 if not spec:
1037 if not spec:
1038 raise error.Abort(_("unknown extdata source '%s'") % source)
1038 raise error.Abort(_("unknown extdata source '%s'") % source)
1039
1039
1040 data = {}
1040 data = {}
1041 if spec.startswith("shell:"):
1041 if spec.startswith("shell:"):
1042 # external commands should be run relative to the repo root
1042 # external commands should be run relative to the repo root
1043 cmd = spec[6:]
1043 cmd = spec[6:]
1044 cwd = os.getcwd()
1044 cwd = os.getcwd()
1045 os.chdir(repo.root)
1045 os.chdir(repo.root)
1046 try:
1046 try:
1047 src = util.popen(cmd)
1047 src = util.popen(cmd)
1048 finally:
1048 finally:
1049 os.chdir(cwd)
1049 os.chdir(cwd)
1050 else:
1050 else:
1051 # treat as a URL or file
1051 # treat as a URL or file
1052 src = url.open(repo.ui, spec)
1052 src = url.open(repo.ui, spec)
1053
1053
1054 try:
1054 try:
1055 for l in src.readlines():
1055 for l in src.readlines():
1056 if " " in l:
1056 if " " in l:
1057 k, v = l.strip().split(" ", 1)
1057 k, v = l.strip().split(" ", 1)
1058 else:
1058 else:
1059 k, v = l.strip(), ""
1059 k, v = l.strip(), ""
1060
1060
1061 k = encoding.tolocal(k)
1061 k = encoding.tolocal(k)
1062 if k in repo:
1062 try:
1063 # we ignore data for nodes that don't exist locally
1064 data[repo[k].rev()] = encoding.tolocal(v)
1063 data[repo[k].rev()] = encoding.tolocal(v)
1064 except (error.LookupError, error.RepoLookupError):
1065 pass # we ignore data for nodes that don't exist locally
1065 finally:
1066 finally:
1066 src.close()
1067 src.close()
1067
1068
1068 return data
1069 return data
1069
1070
1070 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1071 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1071 if lock is None:
1072 if lock is None:
1072 raise error.LockInheritanceContractViolation(
1073 raise error.LockInheritanceContractViolation(
1073 'lock can only be inherited while held')
1074 'lock can only be inherited while held')
1074 if environ is None:
1075 if environ is None:
1075 environ = {}
1076 environ = {}
1076 with lock.inherit() as locker:
1077 with lock.inherit() as locker:
1077 environ[envvar] = locker
1078 environ[envvar] = locker
1078 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1079 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1079
1080
1080 def wlocksub(repo, cmd, *args, **kwargs):
1081 def wlocksub(repo, cmd, *args, **kwargs):
1081 """run cmd as a subprocess that allows inheriting repo's wlock
1082 """run cmd as a subprocess that allows inheriting repo's wlock
1082
1083
1083 This can only be called while the wlock is held. This takes all the
1084 This can only be called while the wlock is held. This takes all the
1084 arguments that ui.system does, and returns the exit code of the
1085 arguments that ui.system does, and returns the exit code of the
1085 subprocess."""
1086 subprocess."""
1086 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1087 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1087 **kwargs)
1088 **kwargs)
1088
1089
1089 def gdinitconfig(ui):
1090 def gdinitconfig(ui):
1090 """helper function to know if a repo should be created as general delta
1091 """helper function to know if a repo should be created as general delta
1091 """
1092 """
1092 # experimental config: format.generaldelta
1093 # experimental config: format.generaldelta
1093 return (ui.configbool('format', 'generaldelta')
1094 return (ui.configbool('format', 'generaldelta')
1094 or ui.configbool('format', 'usegeneraldelta'))
1095 or ui.configbool('format', 'usegeneraldelta'))
1095
1096
1096 def gddeltaconfig(ui):
1097 def gddeltaconfig(ui):
1097 """helper function to know if incoming delta should be optimised
1098 """helper function to know if incoming delta should be optimised
1098 """
1099 """
1099 # experimental config: format.generaldelta
1100 # experimental config: format.generaldelta
1100 return ui.configbool('format', 'generaldelta')
1101 return ui.configbool('format', 'generaldelta')
1101
1102
1102 class simplekeyvaluefile(object):
1103 class simplekeyvaluefile(object):
1103 """A simple file with key=value lines
1104 """A simple file with key=value lines
1104
1105
1105 Keys must be alphanumerics and start with a letter, values must not
1106 Keys must be alphanumerics and start with a letter, values must not
1106 contain '\n' characters"""
1107 contain '\n' characters"""
1107 firstlinekey = '__firstline'
1108 firstlinekey = '__firstline'
1108
1109
1109 def __init__(self, vfs, path, keys=None):
1110 def __init__(self, vfs, path, keys=None):
1110 self.vfs = vfs
1111 self.vfs = vfs
1111 self.path = path
1112 self.path = path
1112
1113
1113 def read(self, firstlinenonkeyval=False):
1114 def read(self, firstlinenonkeyval=False):
1114 """Read the contents of a simple key-value file
1115 """Read the contents of a simple key-value file
1115
1116
1116 'firstlinenonkeyval' indicates whether the first line of file should
1117 'firstlinenonkeyval' indicates whether the first line of file should
1117 be treated as a key-value pair or reuturned fully under the
1118 be treated as a key-value pair or reuturned fully under the
1118 __firstline key."""
1119 __firstline key."""
1119 lines = self.vfs.readlines(self.path)
1120 lines = self.vfs.readlines(self.path)
1120 d = {}
1121 d = {}
1121 if firstlinenonkeyval:
1122 if firstlinenonkeyval:
1122 if not lines:
1123 if not lines:
1123 e = _("empty simplekeyvalue file")
1124 e = _("empty simplekeyvalue file")
1124 raise error.CorruptedState(e)
1125 raise error.CorruptedState(e)
1125 # we don't want to include '\n' in the __firstline
1126 # we don't want to include '\n' in the __firstline
1126 d[self.firstlinekey] = lines[0][:-1]
1127 d[self.firstlinekey] = lines[0][:-1]
1127 del lines[0]
1128 del lines[0]
1128
1129
1129 try:
1130 try:
1130 # the 'if line.strip()' part prevents us from failing on empty
1131 # the 'if line.strip()' part prevents us from failing on empty
1131 # lines which only contain '\n' therefore are not skipped
1132 # lines which only contain '\n' therefore are not skipped
1132 # by 'if line'
1133 # by 'if line'
1133 updatedict = dict(line[:-1].split('=', 1) for line in lines
1134 updatedict = dict(line[:-1].split('=', 1) for line in lines
1134 if line.strip())
1135 if line.strip())
1135 if self.firstlinekey in updatedict:
1136 if self.firstlinekey in updatedict:
1136 e = _("%r can't be used as a key")
1137 e = _("%r can't be used as a key")
1137 raise error.CorruptedState(e % self.firstlinekey)
1138 raise error.CorruptedState(e % self.firstlinekey)
1138 d.update(updatedict)
1139 d.update(updatedict)
1139 except ValueError as e:
1140 except ValueError as e:
1140 raise error.CorruptedState(str(e))
1141 raise error.CorruptedState(str(e))
1141 return d
1142 return d
1142
1143
1143 def write(self, data, firstline=None):
1144 def write(self, data, firstline=None):
1144 """Write key=>value mapping to a file
1145 """Write key=>value mapping to a file
1145 data is a dict. Keys must be alphanumerical and start with a letter.
1146 data is a dict. Keys must be alphanumerical and start with a letter.
1146 Values must not contain newline characters.
1147 Values must not contain newline characters.
1147
1148
1148 If 'firstline' is not None, it is written to file before
1149 If 'firstline' is not None, it is written to file before
1149 everything else, as it is, not in a key=value form"""
1150 everything else, as it is, not in a key=value form"""
1150 lines = []
1151 lines = []
1151 if firstline is not None:
1152 if firstline is not None:
1152 lines.append('%s\n' % firstline)
1153 lines.append('%s\n' % firstline)
1153
1154
1154 for k, v in data.items():
1155 for k, v in data.items():
1155 if k == self.firstlinekey:
1156 if k == self.firstlinekey:
1156 e = "key name '%s' is reserved" % self.firstlinekey
1157 e = "key name '%s' is reserved" % self.firstlinekey
1157 raise error.ProgrammingError(e)
1158 raise error.ProgrammingError(e)
1158 if not k[0].isalpha():
1159 if not k[0].isalpha():
1159 e = "keys must start with a letter in a key-value file"
1160 e = "keys must start with a letter in a key-value file"
1160 raise error.ProgrammingError(e)
1161 raise error.ProgrammingError(e)
1161 if not k.isalnum():
1162 if not k.isalnum():
1162 e = "invalid key name in a simple key-value file"
1163 e = "invalid key name in a simple key-value file"
1163 raise error.ProgrammingError(e)
1164 raise error.ProgrammingError(e)
1164 if '\n' in v:
1165 if '\n' in v:
1165 e = "invalid value in a simple key-value file"
1166 e = "invalid value in a simple key-value file"
1166 raise error.ProgrammingError(e)
1167 raise error.ProgrammingError(e)
1167 lines.append("%s=%s\n" % (k, v))
1168 lines.append("%s=%s\n" % (k, v))
1168 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1169 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1169 fp.write(''.join(lines))
1170 fp.write(''.join(lines))
1170
1171
1171 _reportobsoletedsource = [
1172 _reportobsoletedsource = [
1172 'debugobsolete',
1173 'debugobsolete',
1173 'pull',
1174 'pull',
1174 'push',
1175 'push',
1175 'serve',
1176 'serve',
1176 'unbundle',
1177 'unbundle',
1177 ]
1178 ]
1178
1179
1179 def registersummarycallback(repo, otr, txnname=''):
1180 def registersummarycallback(repo, otr, txnname=''):
1180 """register a callback to issue a summary after the transaction is closed
1181 """register a callback to issue a summary after the transaction is closed
1181 """
1182 """
1182 for source in _reportobsoletedsource:
1183 for source in _reportobsoletedsource:
1183 if txnname.startswith(source):
1184 if txnname.startswith(source):
1184 reporef = weakref.ref(repo)
1185 reporef = weakref.ref(repo)
1185 def reportsummary(tr):
1186 def reportsummary(tr):
1186 """the actual callback reporting the summary"""
1187 """the actual callback reporting the summary"""
1187 repo = reporef()
1188 repo = reporef()
1188 obsoleted = obsutil.getobsoleted(repo, tr)
1189 obsoleted = obsutil.getobsoleted(repo, tr)
1189 if obsoleted:
1190 if obsoleted:
1190 repo.ui.status(_('obsoleted %i changesets\n')
1191 repo.ui.status(_('obsoleted %i changesets\n')
1191 % len(obsoleted))
1192 % len(obsoleted))
1192 otr.addpostclose('00-txnreport', reportsummary)
1193 otr.addpostclose('00-txnreport', reportsummary)
1193 break
1194 break
@@ -1,88 +1,89 b''
1 $ hg init repo
1 $ hg init repo
2 $ cd repo
2 $ cd repo
3 $ for n in 0 1 2 3; do
3 $ for n in 0 1 2 3 4 5 6 7 8 9 10 11; do
4 > echo $n > $n
4 > echo $n > $n
5 > hg ci -qAm $n
5 > hg ci -qAm $n
6 > done
6 > done
7
7
8 test revset support
8 test revset support
9
9
10 $ cat <<'EOF' >> .hg/hgrc
10 $ cat <<'EOF' >> .hg/hgrc
11 > [extdata]
11 > [extdata]
12 > filedata = file:extdata.txt
12 > filedata = file:extdata.txt
13 > notes = notes.txt
13 > notes = notes.txt
14 > shelldata = shell:cat extdata.txt | grep 2
14 > shelldata = shell:cat extdata.txt | grep 2
15 > EOF
15 > EOF
16 $ cat <<'EOF' > extdata.txt
16 $ cat <<'EOF' > extdata.txt
17 > 2 another comment on 2
17 > 2 another comment on 2
18 > 3
18 > 3
19 > EOF
19 > EOF
20 $ cat <<'EOF' > notes.txt
20 $ cat <<'EOF' > notes.txt
21 > f6ed this change is great!
21 > f6ed this change is great!
22 > e834 this is buggy :(
22 > e834 this is buggy :(
23 > 0625 first post
23 > 0625 first post
24 > bogusnode gives no error
24 > bogusnode gives no error
25 > a ambiguous node gives no error
25 > EOF
26 > EOF
26
27
27 $ hg log -qr "extdata(filedata)"
28 $ hg log -qr "extdata(filedata)"
28 2:f6ed99a58333
29 2:f6ed99a58333
29 3:9de260b1e88e
30 3:9de260b1e88e
30 $ hg log -qr "extdata(shelldata)"
31 $ hg log -qr "extdata(shelldata)"
31 2:f6ed99a58333
32 2:f6ed99a58333
32
33
33 test weight of extdata() revset
34 test weight of extdata() revset
34
35
35 $ hg debugrevspec -p optimized "extdata(filedata) & 3"
36 $ hg debugrevspec -p optimized "extdata(filedata) & 3"
36 * optimized:
37 * optimized:
37 (andsmally
38 (andsmally
38 (func
39 (func
39 (symbol 'extdata')
40 (symbol 'extdata')
40 (symbol 'filedata'))
41 (symbol 'filedata'))
41 (symbol '3'))
42 (symbol '3'))
42 3
43 3
43
44
44 test bad extdata() revset source
45 test bad extdata() revset source
45
46
46 $ hg log -qr "extdata()"
47 $ hg log -qr "extdata()"
47 hg: parse error: extdata takes at least 1 string argument
48 hg: parse error: extdata takes at least 1 string argument
48 [255]
49 [255]
49 $ hg log -qr "extdata(unknown)"
50 $ hg log -qr "extdata(unknown)"
50 abort: unknown extdata source 'unknown'
51 abort: unknown extdata source 'unknown'
51 [255]
52 [255]
52
53
53 test template support:
54 test template support:
54
55
55 $ hg log -r:3 -T "{node|short}{if(extdata('notes'), ' # {extdata('notes')}')}\n"
56 $ hg log -r:3 -T "{node|short}{if(extdata('notes'), ' # {extdata('notes')}')}\n"
56 06254b906311 # first post
57 06254b906311 # first post
57 e8342c9a2ed1 # this is buggy :(
58 e8342c9a2ed1 # this is buggy :(
58 f6ed99a58333 # this change is great!
59 f6ed99a58333 # this change is great!
59 9de260b1e88e
60 9de260b1e88e
60
61
61 test template cache:
62 test template cache:
62
63
63 $ hg log -r:3 -T '{rev} "{extdata("notes")}" "{extdata("shelldata")}"\n'
64 $ hg log -r:3 -T '{rev} "{extdata("notes")}" "{extdata("shelldata")}"\n'
64 0 "first post" ""
65 0 "first post" ""
65 1 "this is buggy :(" ""
66 1 "this is buggy :(" ""
66 2 "this change is great!" "another comment on 2"
67 2 "this change is great!" "another comment on 2"
67 3 "" ""
68 3 "" ""
68
69
69 test bad extdata() template source
70 test bad extdata() template source
70
71
71 $ hg log -T "{extdata()}\n"
72 $ hg log -T "{extdata()}\n"
72 hg: parse error: extdata expects one argument
73 hg: parse error: extdata expects one argument
73 [255]
74 [255]
74 $ hg log -T "{extdata('unknown')}\n"
75 $ hg log -T "{extdata('unknown')}\n"
75 abort: unknown extdata source 'unknown'
76 abort: unknown extdata source 'unknown'
76 [255]
77 [255]
77
78
78 we don't fix up relative file URLs, but we do run shell commands in repo root
79 we don't fix up relative file URLs, but we do run shell commands in repo root
79
80
80 $ mkdir sub
81 $ mkdir sub
81 $ cd sub
82 $ cd sub
82 $ hg log -qr "extdata(filedata)"
83 $ hg log -qr "extdata(filedata)"
83 abort: error: No such file or directory
84 abort: error: No such file or directory
84 [255]
85 [255]
85 $ hg log -qr "extdata(shelldata)"
86 $ hg log -qr "extdata(shelldata)"
86 2:f6ed99a58333
87 2:f6ed99a58333
87
88
88 $ cd ..
89 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now