##// END OF EJS Templates
py3: slice on bytes to prevent getting the ascii values...
Pulkit Goyal -
r35931:558e01a2 default
parent child Browse files
Show More
@@ -1,1415 +1,1415 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 hex,
21 hex,
22 nullid,
22 nullid,
23 short,
23 short,
24 wdirid,
24 wdirid,
25 wdirrev,
25 wdirrev,
26 )
26 )
27
27
28 from . import (
28 from . import (
29 encoding,
29 encoding,
30 error,
30 error,
31 match as matchmod,
31 match as matchmod,
32 obsolete,
32 obsolete,
33 obsutil,
33 obsutil,
34 pathutil,
34 pathutil,
35 phases,
35 phases,
36 pycompat,
36 pycompat,
37 revsetlang,
37 revsetlang,
38 similar,
38 similar,
39 url,
39 url,
40 util,
40 util,
41 vfs,
41 vfs,
42 )
42 )
43
43
44 if pycompat.iswindows:
44 if pycompat.iswindows:
45 from . import scmwindows as scmplatform
45 from . import scmwindows as scmplatform
46 else:
46 else:
47 from . import scmposix as scmplatform
47 from . import scmposix as scmplatform
48
48
49 termsize = scmplatform.termsize
49 termsize = scmplatform.termsize
50
50
51 class status(tuple):
51 class status(tuple):
52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
53 and 'ignored' properties are only relevant to the working copy.
53 and 'ignored' properties are only relevant to the working copy.
54 '''
54 '''
55
55
56 __slots__ = ()
56 __slots__ = ()
57
57
58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
59 clean):
59 clean):
60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
61 ignored, clean))
61 ignored, clean))
62
62
63 @property
63 @property
64 def modified(self):
64 def modified(self):
65 '''files that have been modified'''
65 '''files that have been modified'''
66 return self[0]
66 return self[0]
67
67
68 @property
68 @property
69 def added(self):
69 def added(self):
70 '''files that have been added'''
70 '''files that have been added'''
71 return self[1]
71 return self[1]
72
72
73 @property
73 @property
74 def removed(self):
74 def removed(self):
75 '''files that have been removed'''
75 '''files that have been removed'''
76 return self[2]
76 return self[2]
77
77
78 @property
78 @property
79 def deleted(self):
79 def deleted(self):
80 '''files that are in the dirstate, but have been deleted from the
80 '''files that are in the dirstate, but have been deleted from the
81 working copy (aka "missing")
81 working copy (aka "missing")
82 '''
82 '''
83 return self[3]
83 return self[3]
84
84
85 @property
85 @property
86 def unknown(self):
86 def unknown(self):
87 '''files not in the dirstate that are not ignored'''
87 '''files not in the dirstate that are not ignored'''
88 return self[4]
88 return self[4]
89
89
90 @property
90 @property
91 def ignored(self):
91 def ignored(self):
92 '''files not in the dirstate that are ignored (by _dirignore())'''
92 '''files not in the dirstate that are ignored (by _dirignore())'''
93 return self[5]
93 return self[5]
94
94
95 @property
95 @property
96 def clean(self):
96 def clean(self):
97 '''files that have not been modified'''
97 '''files that have not been modified'''
98 return self[6]
98 return self[6]
99
99
100 def __repr__(self, *args, **kwargs):
100 def __repr__(self, *args, **kwargs):
101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
102 'unknown=%r, ignored=%r, clean=%r>') % self)
102 'unknown=%r, ignored=%r, clean=%r>') % self)
103
103
104 def itersubrepos(ctx1, ctx2):
104 def itersubrepos(ctx1, ctx2):
105 """find subrepos in ctx1 or ctx2"""
105 """find subrepos in ctx1 or ctx2"""
106 # Create a (subpath, ctx) mapping where we prefer subpaths from
106 # Create a (subpath, ctx) mapping where we prefer subpaths from
107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
108 # has been modified (in ctx2) but not yet committed (in ctx1).
108 # has been modified (in ctx2) but not yet committed (in ctx1).
109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
111
111
112 missing = set()
112 missing = set()
113
113
114 for subpath in ctx2.substate:
114 for subpath in ctx2.substate:
115 if subpath not in ctx1.substate:
115 if subpath not in ctx1.substate:
116 del subpaths[subpath]
116 del subpaths[subpath]
117 missing.add(subpath)
117 missing.add(subpath)
118
118
119 for subpath, ctx in sorted(subpaths.iteritems()):
119 for subpath, ctx in sorted(subpaths.iteritems()):
120 yield subpath, ctx.sub(subpath)
120 yield subpath, ctx.sub(subpath)
121
121
122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
123 # status and diff will have an accurate result when it does
123 # status and diff will have an accurate result when it does
124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
125 # against itself.
125 # against itself.
126 for subpath in missing:
126 for subpath in missing:
127 yield subpath, ctx2.nullsub(subpath, ctx1)
127 yield subpath, ctx2.nullsub(subpath, ctx1)
128
128
129 def nochangesfound(ui, repo, excluded=None):
129 def nochangesfound(ui, repo, excluded=None):
130 '''Report no changes for push/pull, excluded is None or a list of
130 '''Report no changes for push/pull, excluded is None or a list of
131 nodes excluded from the push/pull.
131 nodes excluded from the push/pull.
132 '''
132 '''
133 secretlist = []
133 secretlist = []
134 if excluded:
134 if excluded:
135 for n in excluded:
135 for n in excluded:
136 ctx = repo[n]
136 ctx = repo[n]
137 if ctx.phase() >= phases.secret and not ctx.extinct():
137 if ctx.phase() >= phases.secret and not ctx.extinct():
138 secretlist.append(n)
138 secretlist.append(n)
139
139
140 if secretlist:
140 if secretlist:
141 ui.status(_("no changes found (ignored %d secret changesets)\n")
141 ui.status(_("no changes found (ignored %d secret changesets)\n")
142 % len(secretlist))
142 % len(secretlist))
143 else:
143 else:
144 ui.status(_("no changes found\n"))
144 ui.status(_("no changes found\n"))
145
145
146 def callcatch(ui, func):
146 def callcatch(ui, func):
147 """call func() with global exception handling
147 """call func() with global exception handling
148
148
149 return func() if no exception happens. otherwise do some error handling
149 return func() if no exception happens. otherwise do some error handling
150 and return an exit code accordingly. does not handle all exceptions.
150 and return an exit code accordingly. does not handle all exceptions.
151 """
151 """
152 try:
152 try:
153 try:
153 try:
154 return func()
154 return func()
155 except: # re-raises
155 except: # re-raises
156 ui.traceback()
156 ui.traceback()
157 raise
157 raise
158 # Global exception handling, alphabetically
158 # Global exception handling, alphabetically
159 # Mercurial-specific first, followed by built-in and library exceptions
159 # Mercurial-specific first, followed by built-in and library exceptions
160 except error.LockHeld as inst:
160 except error.LockHeld as inst:
161 if inst.errno == errno.ETIMEDOUT:
161 if inst.errno == errno.ETIMEDOUT:
162 reason = _('timed out waiting for lock held by %r') % inst.locker
162 reason = _('timed out waiting for lock held by %r') % inst.locker
163 else:
163 else:
164 reason = _('lock held by %r') % inst.locker
164 reason = _('lock held by %r') % inst.locker
165 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
165 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
166 if not inst.locker:
166 if not inst.locker:
167 ui.warn(_("(lock might be very busy)\n"))
167 ui.warn(_("(lock might be very busy)\n"))
168 except error.LockUnavailable as inst:
168 except error.LockUnavailable as inst:
169 ui.warn(_("abort: could not lock %s: %s\n") %
169 ui.warn(_("abort: could not lock %s: %s\n") %
170 (inst.desc or inst.filename,
170 (inst.desc or inst.filename,
171 encoding.strtolocal(inst.strerror)))
171 encoding.strtolocal(inst.strerror)))
172 except error.OutOfBandError as inst:
172 except error.OutOfBandError as inst:
173 if inst.args:
173 if inst.args:
174 msg = _("abort: remote error:\n")
174 msg = _("abort: remote error:\n")
175 else:
175 else:
176 msg = _("abort: remote error\n")
176 msg = _("abort: remote error\n")
177 ui.warn(msg)
177 ui.warn(msg)
178 if inst.args:
178 if inst.args:
179 ui.warn(''.join(inst.args))
179 ui.warn(''.join(inst.args))
180 if inst.hint:
180 if inst.hint:
181 ui.warn('(%s)\n' % inst.hint)
181 ui.warn('(%s)\n' % inst.hint)
182 except error.RepoError as inst:
182 except error.RepoError as inst:
183 ui.warn(_("abort: %s!\n") % inst)
183 ui.warn(_("abort: %s!\n") % inst)
184 if inst.hint:
184 if inst.hint:
185 ui.warn(_("(%s)\n") % inst.hint)
185 ui.warn(_("(%s)\n") % inst.hint)
186 except error.ResponseError as inst:
186 except error.ResponseError as inst:
187 ui.warn(_("abort: %s") % inst.args[0])
187 ui.warn(_("abort: %s") % inst.args[0])
188 if not isinstance(inst.args[1], basestring):
188 if not isinstance(inst.args[1], basestring):
189 ui.warn(" %r\n" % (inst.args[1],))
189 ui.warn(" %r\n" % (inst.args[1],))
190 elif not inst.args[1]:
190 elif not inst.args[1]:
191 ui.warn(_(" empty string\n"))
191 ui.warn(_(" empty string\n"))
192 else:
192 else:
193 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
193 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
194 except error.CensoredNodeError as inst:
194 except error.CensoredNodeError as inst:
195 ui.warn(_("abort: file censored %s!\n") % inst)
195 ui.warn(_("abort: file censored %s!\n") % inst)
196 except error.RevlogError as inst:
196 except error.RevlogError as inst:
197 ui.warn(_("abort: %s!\n") % inst)
197 ui.warn(_("abort: %s!\n") % inst)
198 except error.InterventionRequired as inst:
198 except error.InterventionRequired as inst:
199 ui.warn("%s\n" % inst)
199 ui.warn("%s\n" % inst)
200 if inst.hint:
200 if inst.hint:
201 ui.warn(_("(%s)\n") % inst.hint)
201 ui.warn(_("(%s)\n") % inst.hint)
202 return 1
202 return 1
203 except error.WdirUnsupported:
203 except error.WdirUnsupported:
204 ui.warn(_("abort: working directory revision cannot be specified\n"))
204 ui.warn(_("abort: working directory revision cannot be specified\n"))
205 except error.Abort as inst:
205 except error.Abort as inst:
206 ui.warn(_("abort: %s\n") % inst)
206 ui.warn(_("abort: %s\n") % inst)
207 if inst.hint:
207 if inst.hint:
208 ui.warn(_("(%s)\n") % inst.hint)
208 ui.warn(_("(%s)\n") % inst.hint)
209 except ImportError as inst:
209 except ImportError as inst:
210 ui.warn(_("abort: %s!\n") % inst)
210 ui.warn(_("abort: %s!\n") % inst)
211 m = str(inst).split()[-1]
211 m = str(inst).split()[-1]
212 if m in "mpatch bdiff".split():
212 if m in "mpatch bdiff".split():
213 ui.warn(_("(did you forget to compile extensions?)\n"))
213 ui.warn(_("(did you forget to compile extensions?)\n"))
214 elif m in "zlib".split():
214 elif m in "zlib".split():
215 ui.warn(_("(is your Python install correct?)\n"))
215 ui.warn(_("(is your Python install correct?)\n"))
216 except IOError as inst:
216 except IOError as inst:
217 if util.safehasattr(inst, "code"):
217 if util.safehasattr(inst, "code"):
218 ui.warn(_("abort: %s\n") % inst)
218 ui.warn(_("abort: %s\n") % inst)
219 elif util.safehasattr(inst, "reason"):
219 elif util.safehasattr(inst, "reason"):
220 try: # usually it is in the form (errno, strerror)
220 try: # usually it is in the form (errno, strerror)
221 reason = inst.reason.args[1]
221 reason = inst.reason.args[1]
222 except (AttributeError, IndexError):
222 except (AttributeError, IndexError):
223 # it might be anything, for example a string
223 # it might be anything, for example a string
224 reason = inst.reason
224 reason = inst.reason
225 if isinstance(reason, unicode):
225 if isinstance(reason, unicode):
226 # SSLError of Python 2.7.9 contains a unicode
226 # SSLError of Python 2.7.9 contains a unicode
227 reason = encoding.unitolocal(reason)
227 reason = encoding.unitolocal(reason)
228 ui.warn(_("abort: error: %s\n") % reason)
228 ui.warn(_("abort: error: %s\n") % reason)
229 elif (util.safehasattr(inst, "args")
229 elif (util.safehasattr(inst, "args")
230 and inst.args and inst.args[0] == errno.EPIPE):
230 and inst.args and inst.args[0] == errno.EPIPE):
231 pass
231 pass
232 elif getattr(inst, "strerror", None):
232 elif getattr(inst, "strerror", None):
233 if getattr(inst, "filename", None):
233 if getattr(inst, "filename", None):
234 ui.warn(_("abort: %s: %s\n") % (
234 ui.warn(_("abort: %s: %s\n") % (
235 encoding.strtolocal(inst.strerror), inst.filename))
235 encoding.strtolocal(inst.strerror), inst.filename))
236 else:
236 else:
237 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
237 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
238 else:
238 else:
239 raise
239 raise
240 except OSError as inst:
240 except OSError as inst:
241 if getattr(inst, "filename", None) is not None:
241 if getattr(inst, "filename", None) is not None:
242 ui.warn(_("abort: %s: '%s'\n") % (
242 ui.warn(_("abort: %s: '%s'\n") % (
243 encoding.strtolocal(inst.strerror), inst.filename))
243 encoding.strtolocal(inst.strerror), inst.filename))
244 else:
244 else:
245 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
245 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
246 except MemoryError:
246 except MemoryError:
247 ui.warn(_("abort: out of memory\n"))
247 ui.warn(_("abort: out of memory\n"))
248 except SystemExit as inst:
248 except SystemExit as inst:
249 # Commands shouldn't sys.exit directly, but give a return code.
249 # Commands shouldn't sys.exit directly, but give a return code.
250 # Just in case catch this and and pass exit code to caller.
250 # Just in case catch this and and pass exit code to caller.
251 return inst.code
251 return inst.code
252 except socket.error as inst:
252 except socket.error as inst:
253 ui.warn(_("abort: %s\n") % inst.args[-1])
253 ui.warn(_("abort: %s\n") % inst.args[-1])
254
254
255 return -1
255 return -1
256
256
257 def checknewlabel(repo, lbl, kind):
257 def checknewlabel(repo, lbl, kind):
258 # Do not use the "kind" parameter in ui output.
258 # Do not use the "kind" parameter in ui output.
259 # It makes strings difficult to translate.
259 # It makes strings difficult to translate.
260 if lbl in ['tip', '.', 'null']:
260 if lbl in ['tip', '.', 'null']:
261 raise error.Abort(_("the name '%s' is reserved") % lbl)
261 raise error.Abort(_("the name '%s' is reserved") % lbl)
262 for c in (':', '\0', '\n', '\r'):
262 for c in (':', '\0', '\n', '\r'):
263 if c in lbl:
263 if c in lbl:
264 raise error.Abort(_("%r cannot be used in a name") % c)
264 raise error.Abort(_("%r cannot be used in a name") % c)
265 try:
265 try:
266 int(lbl)
266 int(lbl)
267 raise error.Abort(_("cannot use an integer as a name"))
267 raise error.Abort(_("cannot use an integer as a name"))
268 except ValueError:
268 except ValueError:
269 pass
269 pass
270
270
271 def checkfilename(f):
271 def checkfilename(f):
272 '''Check that the filename f is an acceptable filename for a tracked file'''
272 '''Check that the filename f is an acceptable filename for a tracked file'''
273 if '\r' in f or '\n' in f:
273 if '\r' in f or '\n' in f:
274 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
274 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
275
275
276 def checkportable(ui, f):
276 def checkportable(ui, f):
277 '''Check if filename f is portable and warn or abort depending on config'''
277 '''Check if filename f is portable and warn or abort depending on config'''
278 checkfilename(f)
278 checkfilename(f)
279 abort, warn = checkportabilityalert(ui)
279 abort, warn = checkportabilityalert(ui)
280 if abort or warn:
280 if abort or warn:
281 msg = util.checkwinfilename(f)
281 msg = util.checkwinfilename(f)
282 if msg:
282 if msg:
283 msg = "%s: %s" % (msg, util.shellquote(f))
283 msg = "%s: %s" % (msg, util.shellquote(f))
284 if abort:
284 if abort:
285 raise error.Abort(msg)
285 raise error.Abort(msg)
286 ui.warn(_("warning: %s\n") % msg)
286 ui.warn(_("warning: %s\n") % msg)
287
287
288 def checkportabilityalert(ui):
288 def checkportabilityalert(ui):
289 '''check if the user's config requests nothing, a warning, or abort for
289 '''check if the user's config requests nothing, a warning, or abort for
290 non-portable filenames'''
290 non-portable filenames'''
291 val = ui.config('ui', 'portablefilenames')
291 val = ui.config('ui', 'portablefilenames')
292 lval = val.lower()
292 lval = val.lower()
293 bval = util.parsebool(val)
293 bval = util.parsebool(val)
294 abort = pycompat.iswindows or lval == 'abort'
294 abort = pycompat.iswindows or lval == 'abort'
295 warn = bval or lval == 'warn'
295 warn = bval or lval == 'warn'
296 if bval is None and not (warn or abort or lval == 'ignore'):
296 if bval is None and not (warn or abort or lval == 'ignore'):
297 raise error.ConfigError(
297 raise error.ConfigError(
298 _("ui.portablefilenames value is invalid ('%s')") % val)
298 _("ui.portablefilenames value is invalid ('%s')") % val)
299 return abort, warn
299 return abort, warn
300
300
301 class casecollisionauditor(object):
301 class casecollisionauditor(object):
302 def __init__(self, ui, abort, dirstate):
302 def __init__(self, ui, abort, dirstate):
303 self._ui = ui
303 self._ui = ui
304 self._abort = abort
304 self._abort = abort
305 allfiles = '\0'.join(dirstate._map)
305 allfiles = '\0'.join(dirstate._map)
306 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
306 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
307 self._dirstate = dirstate
307 self._dirstate = dirstate
308 # The purpose of _newfiles is so that we don't complain about
308 # The purpose of _newfiles is so that we don't complain about
309 # case collisions if someone were to call this object with the
309 # case collisions if someone were to call this object with the
310 # same filename twice.
310 # same filename twice.
311 self._newfiles = set()
311 self._newfiles = set()
312
312
313 def __call__(self, f):
313 def __call__(self, f):
314 if f in self._newfiles:
314 if f in self._newfiles:
315 return
315 return
316 fl = encoding.lower(f)
316 fl = encoding.lower(f)
317 if fl in self._loweredfiles and f not in self._dirstate:
317 if fl in self._loweredfiles and f not in self._dirstate:
318 msg = _('possible case-folding collision for %s') % f
318 msg = _('possible case-folding collision for %s') % f
319 if self._abort:
319 if self._abort:
320 raise error.Abort(msg)
320 raise error.Abort(msg)
321 self._ui.warn(_("warning: %s\n") % msg)
321 self._ui.warn(_("warning: %s\n") % msg)
322 self._loweredfiles.add(fl)
322 self._loweredfiles.add(fl)
323 self._newfiles.add(f)
323 self._newfiles.add(f)
324
324
325 def filteredhash(repo, maxrev):
325 def filteredhash(repo, maxrev):
326 """build hash of filtered revisions in the current repoview.
326 """build hash of filtered revisions in the current repoview.
327
327
328 Multiple caches perform up-to-date validation by checking that the
328 Multiple caches perform up-to-date validation by checking that the
329 tiprev and tipnode stored in the cache file match the current repository.
329 tiprev and tipnode stored in the cache file match the current repository.
330 However, this is not sufficient for validating repoviews because the set
330 However, this is not sufficient for validating repoviews because the set
331 of revisions in the view may change without the repository tiprev and
331 of revisions in the view may change without the repository tiprev and
332 tipnode changing.
332 tipnode changing.
333
333
334 This function hashes all the revs filtered from the view and returns
334 This function hashes all the revs filtered from the view and returns
335 that SHA-1 digest.
335 that SHA-1 digest.
336 """
336 """
337 cl = repo.changelog
337 cl = repo.changelog
338 if not cl.filteredrevs:
338 if not cl.filteredrevs:
339 return None
339 return None
340 key = None
340 key = None
341 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
341 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
342 if revs:
342 if revs:
343 s = hashlib.sha1()
343 s = hashlib.sha1()
344 for rev in revs:
344 for rev in revs:
345 s.update('%d;' % rev)
345 s.update('%d;' % rev)
346 key = s.digest()
346 key = s.digest()
347 return key
347 return key
348
348
349 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
349 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
350 '''yield every hg repository under path, always recursively.
350 '''yield every hg repository under path, always recursively.
351 The recurse flag will only control recursion into repo working dirs'''
351 The recurse flag will only control recursion into repo working dirs'''
352 def errhandler(err):
352 def errhandler(err):
353 if err.filename == path:
353 if err.filename == path:
354 raise err
354 raise err
355 samestat = getattr(os.path, 'samestat', None)
355 samestat = getattr(os.path, 'samestat', None)
356 if followsym and samestat is not None:
356 if followsym and samestat is not None:
357 def adddir(dirlst, dirname):
357 def adddir(dirlst, dirname):
358 match = False
358 match = False
359 dirstat = os.stat(dirname)
359 dirstat = os.stat(dirname)
360 for lstdirstat in dirlst:
360 for lstdirstat in dirlst:
361 if samestat(dirstat, lstdirstat):
361 if samestat(dirstat, lstdirstat):
362 match = True
362 match = True
363 break
363 break
364 if not match:
364 if not match:
365 dirlst.append(dirstat)
365 dirlst.append(dirstat)
366 return not match
366 return not match
367 else:
367 else:
368 followsym = False
368 followsym = False
369
369
370 if (seen_dirs is None) and followsym:
370 if (seen_dirs is None) and followsym:
371 seen_dirs = []
371 seen_dirs = []
372 adddir(seen_dirs, path)
372 adddir(seen_dirs, path)
373 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
373 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
374 dirs.sort()
374 dirs.sort()
375 if '.hg' in dirs:
375 if '.hg' in dirs:
376 yield root # found a repository
376 yield root # found a repository
377 qroot = os.path.join(root, '.hg', 'patches')
377 qroot = os.path.join(root, '.hg', 'patches')
378 if os.path.isdir(os.path.join(qroot, '.hg')):
378 if os.path.isdir(os.path.join(qroot, '.hg')):
379 yield qroot # we have a patch queue repo here
379 yield qroot # we have a patch queue repo here
380 if recurse:
380 if recurse:
381 # avoid recursing inside the .hg directory
381 # avoid recursing inside the .hg directory
382 dirs.remove('.hg')
382 dirs.remove('.hg')
383 else:
383 else:
384 dirs[:] = [] # don't descend further
384 dirs[:] = [] # don't descend further
385 elif followsym:
385 elif followsym:
386 newdirs = []
386 newdirs = []
387 for d in dirs:
387 for d in dirs:
388 fname = os.path.join(root, d)
388 fname = os.path.join(root, d)
389 if adddir(seen_dirs, fname):
389 if adddir(seen_dirs, fname):
390 if os.path.islink(fname):
390 if os.path.islink(fname):
391 for hgname in walkrepos(fname, True, seen_dirs):
391 for hgname in walkrepos(fname, True, seen_dirs):
392 yield hgname
392 yield hgname
393 else:
393 else:
394 newdirs.append(d)
394 newdirs.append(d)
395 dirs[:] = newdirs
395 dirs[:] = newdirs
396
396
397 def binnode(ctx):
397 def binnode(ctx):
398 """Return binary node id for a given basectx"""
398 """Return binary node id for a given basectx"""
399 node = ctx.node()
399 node = ctx.node()
400 if node is None:
400 if node is None:
401 return wdirid
401 return wdirid
402 return node
402 return node
403
403
404 def intrev(ctx):
404 def intrev(ctx):
405 """Return integer for a given basectx that can be used in comparison or
405 """Return integer for a given basectx that can be used in comparison or
406 arithmetic operation"""
406 arithmetic operation"""
407 rev = ctx.rev()
407 rev = ctx.rev()
408 if rev is None:
408 if rev is None:
409 return wdirrev
409 return wdirrev
410 return rev
410 return rev
411
411
412 def formatchangeid(ctx):
412 def formatchangeid(ctx):
413 """Format changectx as '{rev}:{node|formatnode}', which is the default
413 """Format changectx as '{rev}:{node|formatnode}', which is the default
414 template provided by logcmdutil.changesettemplater"""
414 template provided by logcmdutil.changesettemplater"""
415 repo = ctx.repo()
415 repo = ctx.repo()
416 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
416 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
417
417
418 def formatrevnode(ui, rev, node):
418 def formatrevnode(ui, rev, node):
419 """Format given revision and node depending on the current verbosity"""
419 """Format given revision and node depending on the current verbosity"""
420 if ui.debugflag:
420 if ui.debugflag:
421 hexfunc = hex
421 hexfunc = hex
422 else:
422 else:
423 hexfunc = short
423 hexfunc = short
424 return '%d:%s' % (rev, hexfunc(node))
424 return '%d:%s' % (rev, hexfunc(node))
425
425
426 def revsingle(repo, revspec, default='.', localalias=None):
426 def revsingle(repo, revspec, default='.', localalias=None):
427 if not revspec and revspec != 0:
427 if not revspec and revspec != 0:
428 return repo[default]
428 return repo[default]
429
429
430 l = revrange(repo, [revspec], localalias=localalias)
430 l = revrange(repo, [revspec], localalias=localalias)
431 if not l:
431 if not l:
432 raise error.Abort(_('empty revision set'))
432 raise error.Abort(_('empty revision set'))
433 return repo[l.last()]
433 return repo[l.last()]
434
434
435 def _pairspec(revspec):
435 def _pairspec(revspec):
436 tree = revsetlang.parse(revspec)
436 tree = revsetlang.parse(revspec)
437 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
437 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
438
438
439 def revpair(repo, revs):
439 def revpair(repo, revs):
440 if not revs:
440 if not revs:
441 return repo.dirstate.p1(), None
441 return repo.dirstate.p1(), None
442
442
443 l = revrange(repo, revs)
443 l = revrange(repo, revs)
444
444
445 if not l:
445 if not l:
446 first = second = None
446 first = second = None
447 elif l.isascending():
447 elif l.isascending():
448 first = l.min()
448 first = l.min()
449 second = l.max()
449 second = l.max()
450 elif l.isdescending():
450 elif l.isdescending():
451 first = l.max()
451 first = l.max()
452 second = l.min()
452 second = l.min()
453 else:
453 else:
454 first = l.first()
454 first = l.first()
455 second = l.last()
455 second = l.last()
456
456
457 if first is None:
457 if first is None:
458 raise error.Abort(_('empty revision range'))
458 raise error.Abort(_('empty revision range'))
459 if (first == second and len(revs) >= 2
459 if (first == second and len(revs) >= 2
460 and not all(revrange(repo, [r]) for r in revs)):
460 and not all(revrange(repo, [r]) for r in revs)):
461 raise error.Abort(_('empty revision on one side of range'))
461 raise error.Abort(_('empty revision on one side of range'))
462
462
463 # if top-level is range expression, the result must always be a pair
463 # if top-level is range expression, the result must always be a pair
464 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
464 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
465 return repo.lookup(first), None
465 return repo.lookup(first), None
466
466
467 return repo.lookup(first), repo.lookup(second)
467 return repo.lookup(first), repo.lookup(second)
468
468
469 def revrange(repo, specs, localalias=None):
469 def revrange(repo, specs, localalias=None):
470 """Execute 1 to many revsets and return the union.
470 """Execute 1 to many revsets and return the union.
471
471
472 This is the preferred mechanism for executing revsets using user-specified
472 This is the preferred mechanism for executing revsets using user-specified
473 config options, such as revset aliases.
473 config options, such as revset aliases.
474
474
475 The revsets specified by ``specs`` will be executed via a chained ``OR``
475 The revsets specified by ``specs`` will be executed via a chained ``OR``
476 expression. If ``specs`` is empty, an empty result is returned.
476 expression. If ``specs`` is empty, an empty result is returned.
477
477
478 ``specs`` can contain integers, in which case they are assumed to be
478 ``specs`` can contain integers, in which case they are assumed to be
479 revision numbers.
479 revision numbers.
480
480
481 It is assumed the revsets are already formatted. If you have arguments
481 It is assumed the revsets are already formatted. If you have arguments
482 that need to be expanded in the revset, call ``revsetlang.formatspec()``
482 that need to be expanded in the revset, call ``revsetlang.formatspec()``
483 and pass the result as an element of ``specs``.
483 and pass the result as an element of ``specs``.
484
484
485 Specifying a single revset is allowed.
485 Specifying a single revset is allowed.
486
486
487 Returns a ``revset.abstractsmartset`` which is a list-like interface over
487 Returns a ``revset.abstractsmartset`` which is a list-like interface over
488 integer revisions.
488 integer revisions.
489 """
489 """
490 allspecs = []
490 allspecs = []
491 for spec in specs:
491 for spec in specs:
492 if isinstance(spec, int):
492 if isinstance(spec, int):
493 spec = revsetlang.formatspec('rev(%d)', spec)
493 spec = revsetlang.formatspec('rev(%d)', spec)
494 allspecs.append(spec)
494 allspecs.append(spec)
495 return repo.anyrevs(allspecs, user=True, localalias=localalias)
495 return repo.anyrevs(allspecs, user=True, localalias=localalias)
496
496
497 def meaningfulparents(repo, ctx):
497 def meaningfulparents(repo, ctx):
498 """Return list of meaningful (or all if debug) parentrevs for rev.
498 """Return list of meaningful (or all if debug) parentrevs for rev.
499
499
500 For merges (two non-nullrev revisions) both parents are meaningful.
500 For merges (two non-nullrev revisions) both parents are meaningful.
501 Otherwise the first parent revision is considered meaningful if it
501 Otherwise the first parent revision is considered meaningful if it
502 is not the preceding revision.
502 is not the preceding revision.
503 """
503 """
504 parents = ctx.parents()
504 parents = ctx.parents()
505 if len(parents) > 1:
505 if len(parents) > 1:
506 return parents
506 return parents
507 if repo.ui.debugflag:
507 if repo.ui.debugflag:
508 return [parents[0], repo['null']]
508 return [parents[0], repo['null']]
509 if parents[0].rev() >= intrev(ctx) - 1:
509 if parents[0].rev() >= intrev(ctx) - 1:
510 return []
510 return []
511 return parents
511 return parents
512
512
513 def expandpats(pats):
513 def expandpats(pats):
514 '''Expand bare globs when running on windows.
514 '''Expand bare globs when running on windows.
515 On posix we assume it already has already been done by sh.'''
515 On posix we assume it already has already been done by sh.'''
516 if not util.expandglobs:
516 if not util.expandglobs:
517 return list(pats)
517 return list(pats)
518 ret = []
518 ret = []
519 for kindpat in pats:
519 for kindpat in pats:
520 kind, pat = matchmod._patsplit(kindpat, None)
520 kind, pat = matchmod._patsplit(kindpat, None)
521 if kind is None:
521 if kind is None:
522 try:
522 try:
523 globbed = glob.glob(pat)
523 globbed = glob.glob(pat)
524 except re.error:
524 except re.error:
525 globbed = [pat]
525 globbed = [pat]
526 if globbed:
526 if globbed:
527 ret.extend(globbed)
527 ret.extend(globbed)
528 continue
528 continue
529 ret.append(kindpat)
529 ret.append(kindpat)
530 return ret
530 return ret
531
531
532 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
532 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
533 badfn=None):
533 badfn=None):
534 '''Return a matcher and the patterns that were used.
534 '''Return a matcher and the patterns that were used.
535 The matcher will warn about bad matches, unless an alternate badfn callback
535 The matcher will warn about bad matches, unless an alternate badfn callback
536 is provided.'''
536 is provided.'''
537 if pats == ("",):
537 if pats == ("",):
538 pats = []
538 pats = []
539 if opts is None:
539 if opts is None:
540 opts = {}
540 opts = {}
541 if not globbed and default == 'relpath':
541 if not globbed and default == 'relpath':
542 pats = expandpats(pats or [])
542 pats = expandpats(pats or [])
543
543
544 def bad(f, msg):
544 def bad(f, msg):
545 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
545 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
546
546
547 if badfn is None:
547 if badfn is None:
548 badfn = bad
548 badfn = bad
549
549
550 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
550 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
551 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
551 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
552
552
553 if m.always():
553 if m.always():
554 pats = []
554 pats = []
555 return m, pats
555 return m, pats
556
556
557 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
557 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
558 badfn=None):
558 badfn=None):
559 '''Return a matcher that will warn about bad matches.'''
559 '''Return a matcher that will warn about bad matches.'''
560 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
560 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
561
561
562 def matchall(repo):
562 def matchall(repo):
563 '''Return a matcher that will efficiently match everything.'''
563 '''Return a matcher that will efficiently match everything.'''
564 return matchmod.always(repo.root, repo.getcwd())
564 return matchmod.always(repo.root, repo.getcwd())
565
565
566 def matchfiles(repo, files, badfn=None):
566 def matchfiles(repo, files, badfn=None):
567 '''Return a matcher that will efficiently match exactly these files.'''
567 '''Return a matcher that will efficiently match exactly these files.'''
568 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
568 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
569
569
570 def parsefollowlinespattern(repo, rev, pat, msg):
570 def parsefollowlinespattern(repo, rev, pat, msg):
571 """Return a file name from `pat` pattern suitable for usage in followlines
571 """Return a file name from `pat` pattern suitable for usage in followlines
572 logic.
572 logic.
573 """
573 """
574 if not matchmod.patkind(pat):
574 if not matchmod.patkind(pat):
575 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
575 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
576 else:
576 else:
577 ctx = repo[rev]
577 ctx = repo[rev]
578 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
578 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
579 files = [f for f in ctx if m(f)]
579 files = [f for f in ctx if m(f)]
580 if len(files) != 1:
580 if len(files) != 1:
581 raise error.ParseError(msg)
581 raise error.ParseError(msg)
582 return files[0]
582 return files[0]
583
583
584 def origpath(ui, repo, filepath):
584 def origpath(ui, repo, filepath):
585 '''customize where .orig files are created
585 '''customize where .orig files are created
586
586
587 Fetch user defined path from config file: [ui] origbackuppath = <path>
587 Fetch user defined path from config file: [ui] origbackuppath = <path>
588 Fall back to default (filepath with .orig suffix) if not specified
588 Fall back to default (filepath with .orig suffix) if not specified
589 '''
589 '''
590 origbackuppath = ui.config('ui', 'origbackuppath')
590 origbackuppath = ui.config('ui', 'origbackuppath')
591 if not origbackuppath:
591 if not origbackuppath:
592 return filepath + ".orig"
592 return filepath + ".orig"
593
593
594 # Convert filepath from an absolute path into a path inside the repo.
594 # Convert filepath from an absolute path into a path inside the repo.
595 filepathfromroot = util.normpath(os.path.relpath(filepath,
595 filepathfromroot = util.normpath(os.path.relpath(filepath,
596 start=repo.root))
596 start=repo.root))
597
597
598 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
598 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
599 origbackupdir = origvfs.dirname(filepathfromroot)
599 origbackupdir = origvfs.dirname(filepathfromroot)
600 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
600 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
601 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
601 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
602
602
603 # Remove any files that conflict with the backup file's path
603 # Remove any files that conflict with the backup file's path
604 for f in reversed(list(util.finddirs(filepathfromroot))):
604 for f in reversed(list(util.finddirs(filepathfromroot))):
605 if origvfs.isfileorlink(f):
605 if origvfs.isfileorlink(f):
606 ui.note(_('removing conflicting file: %s\n')
606 ui.note(_('removing conflicting file: %s\n')
607 % origvfs.join(f))
607 % origvfs.join(f))
608 origvfs.unlink(f)
608 origvfs.unlink(f)
609 break
609 break
610
610
611 origvfs.makedirs(origbackupdir)
611 origvfs.makedirs(origbackupdir)
612
612
613 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
613 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
614 ui.note(_('removing conflicting directory: %s\n')
614 ui.note(_('removing conflicting directory: %s\n')
615 % origvfs.join(filepathfromroot))
615 % origvfs.join(filepathfromroot))
616 origvfs.rmtree(filepathfromroot, forcibly=True)
616 origvfs.rmtree(filepathfromroot, forcibly=True)
617
617
618 return origvfs.join(filepathfromroot)
618 return origvfs.join(filepathfromroot)
619
619
620 class _containsnode(object):
620 class _containsnode(object):
621 """proxy __contains__(node) to container.__contains__ which accepts revs"""
621 """proxy __contains__(node) to container.__contains__ which accepts revs"""
622
622
623 def __init__(self, repo, revcontainer):
623 def __init__(self, repo, revcontainer):
624 self._torev = repo.changelog.rev
624 self._torev = repo.changelog.rev
625 self._revcontains = revcontainer.__contains__
625 self._revcontains = revcontainer.__contains__
626
626
627 def __contains__(self, node):
627 def __contains__(self, node):
628 return self._revcontains(self._torev(node))
628 return self._revcontains(self._torev(node))
629
629
630 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
630 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
631 """do common cleanups when old nodes are replaced by new nodes
631 """do common cleanups when old nodes are replaced by new nodes
632
632
633 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
633 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
634 (we might also want to move working directory parent in the future)
634 (we might also want to move working directory parent in the future)
635
635
636 By default, bookmark moves are calculated automatically from 'replacements',
636 By default, bookmark moves are calculated automatically from 'replacements',
637 but 'moves' can be used to override that. Also, 'moves' may include
637 but 'moves' can be used to override that. Also, 'moves' may include
638 additional bookmark moves that should not have associated obsmarkers.
638 additional bookmark moves that should not have associated obsmarkers.
639
639
640 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
640 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
641 have replacements. operation is a string, like "rebase".
641 have replacements. operation is a string, like "rebase".
642
642
643 metadata is dictionary containing metadata to be stored in obsmarker if
643 metadata is dictionary containing metadata to be stored in obsmarker if
644 obsolescence is enabled.
644 obsolescence is enabled.
645 """
645 """
646 if not replacements and not moves:
646 if not replacements and not moves:
647 return
647 return
648
648
649 # translate mapping's other forms
649 # translate mapping's other forms
650 if not util.safehasattr(replacements, 'items'):
650 if not util.safehasattr(replacements, 'items'):
651 replacements = {n: () for n in replacements}
651 replacements = {n: () for n in replacements}
652
652
653 # Calculate bookmark movements
653 # Calculate bookmark movements
654 if moves is None:
654 if moves is None:
655 moves = {}
655 moves = {}
656 # Unfiltered repo is needed since nodes in replacements might be hidden.
656 # Unfiltered repo is needed since nodes in replacements might be hidden.
657 unfi = repo.unfiltered()
657 unfi = repo.unfiltered()
658 for oldnode, newnodes in replacements.items():
658 for oldnode, newnodes in replacements.items():
659 if oldnode in moves:
659 if oldnode in moves:
660 continue
660 continue
661 if len(newnodes) > 1:
661 if len(newnodes) > 1:
662 # usually a split, take the one with biggest rev number
662 # usually a split, take the one with biggest rev number
663 newnode = next(unfi.set('max(%ln)', newnodes)).node()
663 newnode = next(unfi.set('max(%ln)', newnodes)).node()
664 elif len(newnodes) == 0:
664 elif len(newnodes) == 0:
665 # move bookmark backwards
665 # move bookmark backwards
666 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
666 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
667 list(replacements)))
667 list(replacements)))
668 if roots:
668 if roots:
669 newnode = roots[0].node()
669 newnode = roots[0].node()
670 else:
670 else:
671 newnode = nullid
671 newnode = nullid
672 else:
672 else:
673 newnode = newnodes[0]
673 newnode = newnodes[0]
674 moves[oldnode] = newnode
674 moves[oldnode] = newnode
675
675
676 with repo.transaction('cleanup') as tr:
676 with repo.transaction('cleanup') as tr:
677 # Move bookmarks
677 # Move bookmarks
678 bmarks = repo._bookmarks
678 bmarks = repo._bookmarks
679 bmarkchanges = []
679 bmarkchanges = []
680 allnewnodes = [n for ns in replacements.values() for n in ns]
680 allnewnodes = [n for ns in replacements.values() for n in ns]
681 for oldnode, newnode in moves.items():
681 for oldnode, newnode in moves.items():
682 oldbmarks = repo.nodebookmarks(oldnode)
682 oldbmarks = repo.nodebookmarks(oldnode)
683 if not oldbmarks:
683 if not oldbmarks:
684 continue
684 continue
685 from . import bookmarks # avoid import cycle
685 from . import bookmarks # avoid import cycle
686 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
686 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
687 (oldbmarks, hex(oldnode), hex(newnode)))
687 (oldbmarks, hex(oldnode), hex(newnode)))
688 # Delete divergent bookmarks being parents of related newnodes
688 # Delete divergent bookmarks being parents of related newnodes
689 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
689 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
690 allnewnodes, newnode, oldnode)
690 allnewnodes, newnode, oldnode)
691 deletenodes = _containsnode(repo, deleterevs)
691 deletenodes = _containsnode(repo, deleterevs)
692 for name in oldbmarks:
692 for name in oldbmarks:
693 bmarkchanges.append((name, newnode))
693 bmarkchanges.append((name, newnode))
694 for b in bookmarks.divergent2delete(repo, deletenodes, name):
694 for b in bookmarks.divergent2delete(repo, deletenodes, name):
695 bmarkchanges.append((b, None))
695 bmarkchanges.append((b, None))
696
696
697 if bmarkchanges:
697 if bmarkchanges:
698 bmarks.applychanges(repo, tr, bmarkchanges)
698 bmarks.applychanges(repo, tr, bmarkchanges)
699
699
700 # Obsolete or strip nodes
700 # Obsolete or strip nodes
701 if obsolete.isenabled(repo, obsolete.createmarkersopt):
701 if obsolete.isenabled(repo, obsolete.createmarkersopt):
702 # If a node is already obsoleted, and we want to obsolete it
702 # If a node is already obsoleted, and we want to obsolete it
703 # without a successor, skip that obssolete request since it's
703 # without a successor, skip that obssolete request since it's
704 # unnecessary. That's the "if s or not isobs(n)" check below.
704 # unnecessary. That's the "if s or not isobs(n)" check below.
705 # Also sort the node in topology order, that might be useful for
705 # Also sort the node in topology order, that might be useful for
706 # some obsstore logic.
706 # some obsstore logic.
707 # NOTE: the filtering and sorting might belong to createmarkers.
707 # NOTE: the filtering and sorting might belong to createmarkers.
708 isobs = unfi.obsstore.successors.__contains__
708 isobs = unfi.obsstore.successors.__contains__
709 torev = unfi.changelog.rev
709 torev = unfi.changelog.rev
710 sortfunc = lambda ns: torev(ns[0])
710 sortfunc = lambda ns: torev(ns[0])
711 rels = [(unfi[n], tuple(unfi[m] for m in s))
711 rels = [(unfi[n], tuple(unfi[m] for m in s))
712 for n, s in sorted(replacements.items(), key=sortfunc)
712 for n, s in sorted(replacements.items(), key=sortfunc)
713 if s or not isobs(n)]
713 if s or not isobs(n)]
714 if rels:
714 if rels:
715 obsolete.createmarkers(repo, rels, operation=operation,
715 obsolete.createmarkers(repo, rels, operation=operation,
716 metadata=metadata)
716 metadata=metadata)
717 else:
717 else:
718 from . import repair # avoid import cycle
718 from . import repair # avoid import cycle
719 tostrip = list(replacements)
719 tostrip = list(replacements)
720 if tostrip:
720 if tostrip:
721 repair.delayedstrip(repo.ui, repo, tostrip, operation)
721 repair.delayedstrip(repo.ui, repo, tostrip, operation)
722
722
723 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
723 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
724 if opts is None:
724 if opts is None:
725 opts = {}
725 opts = {}
726 m = matcher
726 m = matcher
727 if dry_run is None:
727 if dry_run is None:
728 dry_run = opts.get('dry_run')
728 dry_run = opts.get('dry_run')
729 if similarity is None:
729 if similarity is None:
730 similarity = float(opts.get('similarity') or 0)
730 similarity = float(opts.get('similarity') or 0)
731
731
732 ret = 0
732 ret = 0
733 join = lambda f: os.path.join(prefix, f)
733 join = lambda f: os.path.join(prefix, f)
734
734
735 wctx = repo[None]
735 wctx = repo[None]
736 for subpath in sorted(wctx.substate):
736 for subpath in sorted(wctx.substate):
737 submatch = matchmod.subdirmatcher(subpath, m)
737 submatch = matchmod.subdirmatcher(subpath, m)
738 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
738 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
739 sub = wctx.sub(subpath)
739 sub = wctx.sub(subpath)
740 try:
740 try:
741 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
741 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
742 ret = 1
742 ret = 1
743 except error.LookupError:
743 except error.LookupError:
744 repo.ui.status(_("skipping missing subrepository: %s\n")
744 repo.ui.status(_("skipping missing subrepository: %s\n")
745 % join(subpath))
745 % join(subpath))
746
746
747 rejected = []
747 rejected = []
748 def badfn(f, msg):
748 def badfn(f, msg):
749 if f in m.files():
749 if f in m.files():
750 m.bad(f, msg)
750 m.bad(f, msg)
751 rejected.append(f)
751 rejected.append(f)
752
752
753 badmatch = matchmod.badmatch(m, badfn)
753 badmatch = matchmod.badmatch(m, badfn)
754 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
754 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
755 badmatch)
755 badmatch)
756
756
757 unknownset = set(unknown + forgotten)
757 unknownset = set(unknown + forgotten)
758 toprint = unknownset.copy()
758 toprint = unknownset.copy()
759 toprint.update(deleted)
759 toprint.update(deleted)
760 for abs in sorted(toprint):
760 for abs in sorted(toprint):
761 if repo.ui.verbose or not m.exact(abs):
761 if repo.ui.verbose or not m.exact(abs):
762 if abs in unknownset:
762 if abs in unknownset:
763 status = _('adding %s\n') % m.uipath(abs)
763 status = _('adding %s\n') % m.uipath(abs)
764 else:
764 else:
765 status = _('removing %s\n') % m.uipath(abs)
765 status = _('removing %s\n') % m.uipath(abs)
766 repo.ui.status(status)
766 repo.ui.status(status)
767
767
768 renames = _findrenames(repo, m, added + unknown, removed + deleted,
768 renames = _findrenames(repo, m, added + unknown, removed + deleted,
769 similarity)
769 similarity)
770
770
771 if not dry_run:
771 if not dry_run:
772 _markchanges(repo, unknown + forgotten, deleted, renames)
772 _markchanges(repo, unknown + forgotten, deleted, renames)
773
773
774 for f in rejected:
774 for f in rejected:
775 if f in m.files():
775 if f in m.files():
776 return 1
776 return 1
777 return ret
777 return ret
778
778
779 def marktouched(repo, files, similarity=0.0):
779 def marktouched(repo, files, similarity=0.0):
780 '''Assert that files have somehow been operated upon. files are relative to
780 '''Assert that files have somehow been operated upon. files are relative to
781 the repo root.'''
781 the repo root.'''
782 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
782 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
783 rejected = []
783 rejected = []
784
784
785 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
785 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
786
786
787 if repo.ui.verbose:
787 if repo.ui.verbose:
788 unknownset = set(unknown + forgotten)
788 unknownset = set(unknown + forgotten)
789 toprint = unknownset.copy()
789 toprint = unknownset.copy()
790 toprint.update(deleted)
790 toprint.update(deleted)
791 for abs in sorted(toprint):
791 for abs in sorted(toprint):
792 if abs in unknownset:
792 if abs in unknownset:
793 status = _('adding %s\n') % abs
793 status = _('adding %s\n') % abs
794 else:
794 else:
795 status = _('removing %s\n') % abs
795 status = _('removing %s\n') % abs
796 repo.ui.status(status)
796 repo.ui.status(status)
797
797
798 renames = _findrenames(repo, m, added + unknown, removed + deleted,
798 renames = _findrenames(repo, m, added + unknown, removed + deleted,
799 similarity)
799 similarity)
800
800
801 _markchanges(repo, unknown + forgotten, deleted, renames)
801 _markchanges(repo, unknown + forgotten, deleted, renames)
802
802
803 for f in rejected:
803 for f in rejected:
804 if f in m.files():
804 if f in m.files():
805 return 1
805 return 1
806 return 0
806 return 0
807
807
808 def _interestingfiles(repo, matcher):
808 def _interestingfiles(repo, matcher):
809 '''Walk dirstate with matcher, looking for files that addremove would care
809 '''Walk dirstate with matcher, looking for files that addremove would care
810 about.
810 about.
811
811
812 This is different from dirstate.status because it doesn't care about
812 This is different from dirstate.status because it doesn't care about
813 whether files are modified or clean.'''
813 whether files are modified or clean.'''
814 added, unknown, deleted, removed, forgotten = [], [], [], [], []
814 added, unknown, deleted, removed, forgotten = [], [], [], [], []
815 audit_path = pathutil.pathauditor(repo.root, cached=True)
815 audit_path = pathutil.pathauditor(repo.root, cached=True)
816
816
817 ctx = repo[None]
817 ctx = repo[None]
818 dirstate = repo.dirstate
818 dirstate = repo.dirstate
819 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
819 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
820 unknown=True, ignored=False, full=False)
820 unknown=True, ignored=False, full=False)
821 for abs, st in walkresults.iteritems():
821 for abs, st in walkresults.iteritems():
822 dstate = dirstate[abs]
822 dstate = dirstate[abs]
823 if dstate == '?' and audit_path.check(abs):
823 if dstate == '?' and audit_path.check(abs):
824 unknown.append(abs)
824 unknown.append(abs)
825 elif dstate != 'r' and not st:
825 elif dstate != 'r' and not st:
826 deleted.append(abs)
826 deleted.append(abs)
827 elif dstate == 'r' and st:
827 elif dstate == 'r' and st:
828 forgotten.append(abs)
828 forgotten.append(abs)
829 # for finding renames
829 # for finding renames
830 elif dstate == 'r' and not st:
830 elif dstate == 'r' and not st:
831 removed.append(abs)
831 removed.append(abs)
832 elif dstate == 'a':
832 elif dstate == 'a':
833 added.append(abs)
833 added.append(abs)
834
834
835 return added, unknown, deleted, removed, forgotten
835 return added, unknown, deleted, removed, forgotten
836
836
837 def _findrenames(repo, matcher, added, removed, similarity):
837 def _findrenames(repo, matcher, added, removed, similarity):
838 '''Find renames from removed files to added ones.'''
838 '''Find renames from removed files to added ones.'''
839 renames = {}
839 renames = {}
840 if similarity > 0:
840 if similarity > 0:
841 for old, new, score in similar.findrenames(repo, added, removed,
841 for old, new, score in similar.findrenames(repo, added, removed,
842 similarity):
842 similarity):
843 if (repo.ui.verbose or not matcher.exact(old)
843 if (repo.ui.verbose or not matcher.exact(old)
844 or not matcher.exact(new)):
844 or not matcher.exact(new)):
845 repo.ui.status(_('recording removal of %s as rename to %s '
845 repo.ui.status(_('recording removal of %s as rename to %s '
846 '(%d%% similar)\n') %
846 '(%d%% similar)\n') %
847 (matcher.rel(old), matcher.rel(new),
847 (matcher.rel(old), matcher.rel(new),
848 score * 100))
848 score * 100))
849 renames[new] = old
849 renames[new] = old
850 return renames
850 return renames
851
851
852 def _markchanges(repo, unknown, deleted, renames):
852 def _markchanges(repo, unknown, deleted, renames):
853 '''Marks the files in unknown as added, the files in deleted as removed,
853 '''Marks the files in unknown as added, the files in deleted as removed,
854 and the files in renames as copied.'''
854 and the files in renames as copied.'''
855 wctx = repo[None]
855 wctx = repo[None]
856 with repo.wlock():
856 with repo.wlock():
857 wctx.forget(deleted)
857 wctx.forget(deleted)
858 wctx.add(unknown)
858 wctx.add(unknown)
859 for new, old in renames.iteritems():
859 for new, old in renames.iteritems():
860 wctx.copy(old, new)
860 wctx.copy(old, new)
861
861
862 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
862 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
863 """Update the dirstate to reflect the intent of copying src to dst. For
863 """Update the dirstate to reflect the intent of copying src to dst. For
864 different reasons it might not end with dst being marked as copied from src.
864 different reasons it might not end with dst being marked as copied from src.
865 """
865 """
866 origsrc = repo.dirstate.copied(src) or src
866 origsrc = repo.dirstate.copied(src) or src
867 if dst == origsrc: # copying back a copy?
867 if dst == origsrc: # copying back a copy?
868 if repo.dirstate[dst] not in 'mn' and not dryrun:
868 if repo.dirstate[dst] not in 'mn' and not dryrun:
869 repo.dirstate.normallookup(dst)
869 repo.dirstate.normallookup(dst)
870 else:
870 else:
871 if repo.dirstate[origsrc] == 'a' and origsrc == src:
871 if repo.dirstate[origsrc] == 'a' and origsrc == src:
872 if not ui.quiet:
872 if not ui.quiet:
873 ui.warn(_("%s has not been committed yet, so no copy "
873 ui.warn(_("%s has not been committed yet, so no copy "
874 "data will be stored for %s.\n")
874 "data will be stored for %s.\n")
875 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
875 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
876 if repo.dirstate[dst] in '?r' and not dryrun:
876 if repo.dirstate[dst] in '?r' and not dryrun:
877 wctx.add([dst])
877 wctx.add([dst])
878 elif not dryrun:
878 elif not dryrun:
879 wctx.copy(origsrc, dst)
879 wctx.copy(origsrc, dst)
880
880
881 def readrequires(opener, supported):
881 def readrequires(opener, supported):
882 '''Reads and parses .hg/requires and checks if all entries found
882 '''Reads and parses .hg/requires and checks if all entries found
883 are in the list of supported features.'''
883 are in the list of supported features.'''
884 requirements = set(opener.read("requires").splitlines())
884 requirements = set(opener.read("requires").splitlines())
885 missings = []
885 missings = []
886 for r in requirements:
886 for r in requirements:
887 if r not in supported:
887 if r not in supported:
888 if not r or not r[0].isalnum():
888 if not r or not r[0].isalnum():
889 raise error.RequirementError(_(".hg/requires file is corrupt"))
889 raise error.RequirementError(_(".hg/requires file is corrupt"))
890 missings.append(r)
890 missings.append(r)
891 missings.sort()
891 missings.sort()
892 if missings:
892 if missings:
893 raise error.RequirementError(
893 raise error.RequirementError(
894 _("repository requires features unknown to this Mercurial: %s")
894 _("repository requires features unknown to this Mercurial: %s")
895 % " ".join(missings),
895 % " ".join(missings),
896 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
896 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
897 " for more information"))
897 " for more information"))
898 return requirements
898 return requirements
899
899
900 def writerequires(opener, requirements):
900 def writerequires(opener, requirements):
901 with opener('requires', 'w') as fp:
901 with opener('requires', 'w') as fp:
902 for r in sorted(requirements):
902 for r in sorted(requirements):
903 fp.write("%s\n" % r)
903 fp.write("%s\n" % r)
904
904
905 class filecachesubentry(object):
905 class filecachesubentry(object):
906 def __init__(self, path, stat):
906 def __init__(self, path, stat):
907 self.path = path
907 self.path = path
908 self.cachestat = None
908 self.cachestat = None
909 self._cacheable = None
909 self._cacheable = None
910
910
911 if stat:
911 if stat:
912 self.cachestat = filecachesubentry.stat(self.path)
912 self.cachestat = filecachesubentry.stat(self.path)
913
913
914 if self.cachestat:
914 if self.cachestat:
915 self._cacheable = self.cachestat.cacheable()
915 self._cacheable = self.cachestat.cacheable()
916 else:
916 else:
917 # None means we don't know yet
917 # None means we don't know yet
918 self._cacheable = None
918 self._cacheable = None
919
919
920 def refresh(self):
920 def refresh(self):
921 if self.cacheable():
921 if self.cacheable():
922 self.cachestat = filecachesubentry.stat(self.path)
922 self.cachestat = filecachesubentry.stat(self.path)
923
923
924 def cacheable(self):
924 def cacheable(self):
925 if self._cacheable is not None:
925 if self._cacheable is not None:
926 return self._cacheable
926 return self._cacheable
927
927
928 # we don't know yet, assume it is for now
928 # we don't know yet, assume it is for now
929 return True
929 return True
930
930
931 def changed(self):
931 def changed(self):
932 # no point in going further if we can't cache it
932 # no point in going further if we can't cache it
933 if not self.cacheable():
933 if not self.cacheable():
934 return True
934 return True
935
935
936 newstat = filecachesubentry.stat(self.path)
936 newstat = filecachesubentry.stat(self.path)
937
937
938 # we may not know if it's cacheable yet, check again now
938 # we may not know if it's cacheable yet, check again now
939 if newstat and self._cacheable is None:
939 if newstat and self._cacheable is None:
940 self._cacheable = newstat.cacheable()
940 self._cacheable = newstat.cacheable()
941
941
942 # check again
942 # check again
943 if not self._cacheable:
943 if not self._cacheable:
944 return True
944 return True
945
945
946 if self.cachestat != newstat:
946 if self.cachestat != newstat:
947 self.cachestat = newstat
947 self.cachestat = newstat
948 return True
948 return True
949 else:
949 else:
950 return False
950 return False
951
951
952 @staticmethod
952 @staticmethod
953 def stat(path):
953 def stat(path):
954 try:
954 try:
955 return util.cachestat(path)
955 return util.cachestat(path)
956 except OSError as e:
956 except OSError as e:
957 if e.errno != errno.ENOENT:
957 if e.errno != errno.ENOENT:
958 raise
958 raise
959
959
960 class filecacheentry(object):
960 class filecacheentry(object):
961 def __init__(self, paths, stat=True):
961 def __init__(self, paths, stat=True):
962 self._entries = []
962 self._entries = []
963 for path in paths:
963 for path in paths:
964 self._entries.append(filecachesubentry(path, stat))
964 self._entries.append(filecachesubentry(path, stat))
965
965
966 def changed(self):
966 def changed(self):
967 '''true if any entry has changed'''
967 '''true if any entry has changed'''
968 for entry in self._entries:
968 for entry in self._entries:
969 if entry.changed():
969 if entry.changed():
970 return True
970 return True
971 return False
971 return False
972
972
973 def refresh(self):
973 def refresh(self):
974 for entry in self._entries:
974 for entry in self._entries:
975 entry.refresh()
975 entry.refresh()
976
976
977 class filecache(object):
977 class filecache(object):
978 '''A property like decorator that tracks files under .hg/ for updates.
978 '''A property like decorator that tracks files under .hg/ for updates.
979
979
980 Records stat info when called in _filecache.
980 Records stat info when called in _filecache.
981
981
982 On subsequent calls, compares old stat info with new info, and recreates the
982 On subsequent calls, compares old stat info with new info, and recreates the
983 object when any of the files changes, updating the new stat info in
983 object when any of the files changes, updating the new stat info in
984 _filecache.
984 _filecache.
985
985
986 Mercurial either atomic renames or appends for files under .hg,
986 Mercurial either atomic renames or appends for files under .hg,
987 so to ensure the cache is reliable we need the filesystem to be able
987 so to ensure the cache is reliable we need the filesystem to be able
988 to tell us if a file has been replaced. If it can't, we fallback to
988 to tell us if a file has been replaced. If it can't, we fallback to
989 recreating the object on every call (essentially the same behavior as
989 recreating the object on every call (essentially the same behavior as
990 propertycache).
990 propertycache).
991
991
992 '''
992 '''
993 def __init__(self, *paths):
993 def __init__(self, *paths):
994 self.paths = paths
994 self.paths = paths
995
995
996 def join(self, obj, fname):
996 def join(self, obj, fname):
997 """Used to compute the runtime path of a cached file.
997 """Used to compute the runtime path of a cached file.
998
998
999 Users should subclass filecache and provide their own version of this
999 Users should subclass filecache and provide their own version of this
1000 function to call the appropriate join function on 'obj' (an instance
1000 function to call the appropriate join function on 'obj' (an instance
1001 of the class that its member function was decorated).
1001 of the class that its member function was decorated).
1002 """
1002 """
1003 raise NotImplementedError
1003 raise NotImplementedError
1004
1004
1005 def __call__(self, func):
1005 def __call__(self, func):
1006 self.func = func
1006 self.func = func
1007 self.name = func.__name__.encode('ascii')
1007 self.name = func.__name__.encode('ascii')
1008 return self
1008 return self
1009
1009
1010 def __get__(self, obj, type=None):
1010 def __get__(self, obj, type=None):
1011 # if accessed on the class, return the descriptor itself.
1011 # if accessed on the class, return the descriptor itself.
1012 if obj is None:
1012 if obj is None:
1013 return self
1013 return self
1014 # do we need to check if the file changed?
1014 # do we need to check if the file changed?
1015 if self.name in obj.__dict__:
1015 if self.name in obj.__dict__:
1016 assert self.name in obj._filecache, self.name
1016 assert self.name in obj._filecache, self.name
1017 return obj.__dict__[self.name]
1017 return obj.__dict__[self.name]
1018
1018
1019 entry = obj._filecache.get(self.name)
1019 entry = obj._filecache.get(self.name)
1020
1020
1021 if entry:
1021 if entry:
1022 if entry.changed():
1022 if entry.changed():
1023 entry.obj = self.func(obj)
1023 entry.obj = self.func(obj)
1024 else:
1024 else:
1025 paths = [self.join(obj, path) for path in self.paths]
1025 paths = [self.join(obj, path) for path in self.paths]
1026
1026
1027 # We stat -before- creating the object so our cache doesn't lie if
1027 # We stat -before- creating the object so our cache doesn't lie if
1028 # a writer modified between the time we read and stat
1028 # a writer modified between the time we read and stat
1029 entry = filecacheentry(paths, True)
1029 entry = filecacheentry(paths, True)
1030 entry.obj = self.func(obj)
1030 entry.obj = self.func(obj)
1031
1031
1032 obj._filecache[self.name] = entry
1032 obj._filecache[self.name] = entry
1033
1033
1034 obj.__dict__[self.name] = entry.obj
1034 obj.__dict__[self.name] = entry.obj
1035 return entry.obj
1035 return entry.obj
1036
1036
1037 def __set__(self, obj, value):
1037 def __set__(self, obj, value):
1038 if self.name not in obj._filecache:
1038 if self.name not in obj._filecache:
1039 # we add an entry for the missing value because X in __dict__
1039 # we add an entry for the missing value because X in __dict__
1040 # implies X in _filecache
1040 # implies X in _filecache
1041 paths = [self.join(obj, path) for path in self.paths]
1041 paths = [self.join(obj, path) for path in self.paths]
1042 ce = filecacheentry(paths, False)
1042 ce = filecacheentry(paths, False)
1043 obj._filecache[self.name] = ce
1043 obj._filecache[self.name] = ce
1044 else:
1044 else:
1045 ce = obj._filecache[self.name]
1045 ce = obj._filecache[self.name]
1046
1046
1047 ce.obj = value # update cached copy
1047 ce.obj = value # update cached copy
1048 obj.__dict__[self.name] = value # update copy returned by obj.x
1048 obj.__dict__[self.name] = value # update copy returned by obj.x
1049
1049
1050 def __delete__(self, obj):
1050 def __delete__(self, obj):
1051 try:
1051 try:
1052 del obj.__dict__[self.name]
1052 del obj.__dict__[self.name]
1053 except KeyError:
1053 except KeyError:
1054 raise AttributeError(self.name)
1054 raise AttributeError(self.name)
1055
1055
1056 def extdatasource(repo, source):
1056 def extdatasource(repo, source):
1057 """Gather a map of rev -> value dict from the specified source
1057 """Gather a map of rev -> value dict from the specified source
1058
1058
1059 A source spec is treated as a URL, with a special case shell: type
1059 A source spec is treated as a URL, with a special case shell: type
1060 for parsing the output from a shell command.
1060 for parsing the output from a shell command.
1061
1061
1062 The data is parsed as a series of newline-separated records where
1062 The data is parsed as a series of newline-separated records where
1063 each record is a revision specifier optionally followed by a space
1063 each record is a revision specifier optionally followed by a space
1064 and a freeform string value. If the revision is known locally, it
1064 and a freeform string value. If the revision is known locally, it
1065 is converted to a rev, otherwise the record is skipped.
1065 is converted to a rev, otherwise the record is skipped.
1066
1066
1067 Note that both key and value are treated as UTF-8 and converted to
1067 Note that both key and value are treated as UTF-8 and converted to
1068 the local encoding. This allows uniformity between local and
1068 the local encoding. This allows uniformity between local and
1069 remote data sources.
1069 remote data sources.
1070 """
1070 """
1071
1071
1072 spec = repo.ui.config("extdata", source)
1072 spec = repo.ui.config("extdata", source)
1073 if not spec:
1073 if not spec:
1074 raise error.Abort(_("unknown extdata source '%s'") % source)
1074 raise error.Abort(_("unknown extdata source '%s'") % source)
1075
1075
1076 data = {}
1076 data = {}
1077 src = proc = None
1077 src = proc = None
1078 try:
1078 try:
1079 if spec.startswith("shell:"):
1079 if spec.startswith("shell:"):
1080 # external commands should be run relative to the repo root
1080 # external commands should be run relative to the repo root
1081 cmd = spec[6:]
1081 cmd = spec[6:]
1082 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1082 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1083 close_fds=util.closefds,
1083 close_fds=util.closefds,
1084 stdout=subprocess.PIPE, cwd=repo.root)
1084 stdout=subprocess.PIPE, cwd=repo.root)
1085 src = proc.stdout
1085 src = proc.stdout
1086 else:
1086 else:
1087 # treat as a URL or file
1087 # treat as a URL or file
1088 src = url.open(repo.ui, spec)
1088 src = url.open(repo.ui, spec)
1089 for l in src:
1089 for l in src:
1090 if " " in l:
1090 if " " in l:
1091 k, v = l.strip().split(" ", 1)
1091 k, v = l.strip().split(" ", 1)
1092 else:
1092 else:
1093 k, v = l.strip(), ""
1093 k, v = l.strip(), ""
1094
1094
1095 k = encoding.tolocal(k)
1095 k = encoding.tolocal(k)
1096 try:
1096 try:
1097 data[repo[k].rev()] = encoding.tolocal(v)
1097 data[repo[k].rev()] = encoding.tolocal(v)
1098 except (error.LookupError, error.RepoLookupError):
1098 except (error.LookupError, error.RepoLookupError):
1099 pass # we ignore data for nodes that don't exist locally
1099 pass # we ignore data for nodes that don't exist locally
1100 finally:
1100 finally:
1101 if proc:
1101 if proc:
1102 proc.communicate()
1102 proc.communicate()
1103 if src:
1103 if src:
1104 src.close()
1104 src.close()
1105 if proc and proc.returncode != 0:
1105 if proc and proc.returncode != 0:
1106 raise error.Abort(_("extdata command '%s' failed: %s")
1106 raise error.Abort(_("extdata command '%s' failed: %s")
1107 % (cmd, util.explainexit(proc.returncode)[0]))
1107 % (cmd, util.explainexit(proc.returncode)[0]))
1108
1108
1109 return data
1109 return data
1110
1110
1111 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1111 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1112 if lock is None:
1112 if lock is None:
1113 raise error.LockInheritanceContractViolation(
1113 raise error.LockInheritanceContractViolation(
1114 'lock can only be inherited while held')
1114 'lock can only be inherited while held')
1115 if environ is None:
1115 if environ is None:
1116 environ = {}
1116 environ = {}
1117 with lock.inherit() as locker:
1117 with lock.inherit() as locker:
1118 environ[envvar] = locker
1118 environ[envvar] = locker
1119 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1119 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1120
1120
1121 def wlocksub(repo, cmd, *args, **kwargs):
1121 def wlocksub(repo, cmd, *args, **kwargs):
1122 """run cmd as a subprocess that allows inheriting repo's wlock
1122 """run cmd as a subprocess that allows inheriting repo's wlock
1123
1123
1124 This can only be called while the wlock is held. This takes all the
1124 This can only be called while the wlock is held. This takes all the
1125 arguments that ui.system does, and returns the exit code of the
1125 arguments that ui.system does, and returns the exit code of the
1126 subprocess."""
1126 subprocess."""
1127 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1127 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1128 **kwargs)
1128 **kwargs)
1129
1129
1130 def gdinitconfig(ui):
1130 def gdinitconfig(ui):
1131 """helper function to know if a repo should be created as general delta
1131 """helper function to know if a repo should be created as general delta
1132 """
1132 """
1133 # experimental config: format.generaldelta
1133 # experimental config: format.generaldelta
1134 return (ui.configbool('format', 'generaldelta')
1134 return (ui.configbool('format', 'generaldelta')
1135 or ui.configbool('format', 'usegeneraldelta'))
1135 or ui.configbool('format', 'usegeneraldelta'))
1136
1136
1137 def gddeltaconfig(ui):
1137 def gddeltaconfig(ui):
1138 """helper function to know if incoming delta should be optimised
1138 """helper function to know if incoming delta should be optimised
1139 """
1139 """
1140 # experimental config: format.generaldelta
1140 # experimental config: format.generaldelta
1141 return ui.configbool('format', 'generaldelta')
1141 return ui.configbool('format', 'generaldelta')
1142
1142
1143 class simplekeyvaluefile(object):
1143 class simplekeyvaluefile(object):
1144 """A simple file with key=value lines
1144 """A simple file with key=value lines
1145
1145
1146 Keys must be alphanumerics and start with a letter, values must not
1146 Keys must be alphanumerics and start with a letter, values must not
1147 contain '\n' characters"""
1147 contain '\n' characters"""
1148 firstlinekey = '__firstline'
1148 firstlinekey = '__firstline'
1149
1149
1150 def __init__(self, vfs, path, keys=None):
1150 def __init__(self, vfs, path, keys=None):
1151 self.vfs = vfs
1151 self.vfs = vfs
1152 self.path = path
1152 self.path = path
1153
1153
1154 def read(self, firstlinenonkeyval=False):
1154 def read(self, firstlinenonkeyval=False):
1155 """Read the contents of a simple key-value file
1155 """Read the contents of a simple key-value file
1156
1156
1157 'firstlinenonkeyval' indicates whether the first line of file should
1157 'firstlinenonkeyval' indicates whether the first line of file should
1158 be treated as a key-value pair or reuturned fully under the
1158 be treated as a key-value pair or reuturned fully under the
1159 __firstline key."""
1159 __firstline key."""
1160 lines = self.vfs.readlines(self.path)
1160 lines = self.vfs.readlines(self.path)
1161 d = {}
1161 d = {}
1162 if firstlinenonkeyval:
1162 if firstlinenonkeyval:
1163 if not lines:
1163 if not lines:
1164 e = _("empty simplekeyvalue file")
1164 e = _("empty simplekeyvalue file")
1165 raise error.CorruptedState(e)
1165 raise error.CorruptedState(e)
1166 # we don't want to include '\n' in the __firstline
1166 # we don't want to include '\n' in the __firstline
1167 d[self.firstlinekey] = lines[0][:-1]
1167 d[self.firstlinekey] = lines[0][:-1]
1168 del lines[0]
1168 del lines[0]
1169
1169
1170 try:
1170 try:
1171 # the 'if line.strip()' part prevents us from failing on empty
1171 # the 'if line.strip()' part prevents us from failing on empty
1172 # lines which only contain '\n' therefore are not skipped
1172 # lines which only contain '\n' therefore are not skipped
1173 # by 'if line'
1173 # by 'if line'
1174 updatedict = dict(line[:-1].split('=', 1) for line in lines
1174 updatedict = dict(line[:-1].split('=', 1) for line in lines
1175 if line.strip())
1175 if line.strip())
1176 if self.firstlinekey in updatedict:
1176 if self.firstlinekey in updatedict:
1177 e = _("%r can't be used as a key")
1177 e = _("%r can't be used as a key")
1178 raise error.CorruptedState(e % self.firstlinekey)
1178 raise error.CorruptedState(e % self.firstlinekey)
1179 d.update(updatedict)
1179 d.update(updatedict)
1180 except ValueError as e:
1180 except ValueError as e:
1181 raise error.CorruptedState(str(e))
1181 raise error.CorruptedState(str(e))
1182 return d
1182 return d
1183
1183
1184 def write(self, data, firstline=None):
1184 def write(self, data, firstline=None):
1185 """Write key=>value mapping to a file
1185 """Write key=>value mapping to a file
1186 data is a dict. Keys must be alphanumerical and start with a letter.
1186 data is a dict. Keys must be alphanumerical and start with a letter.
1187 Values must not contain newline characters.
1187 Values must not contain newline characters.
1188
1188
1189 If 'firstline' is not None, it is written to file before
1189 If 'firstline' is not None, it is written to file before
1190 everything else, as it is, not in a key=value form"""
1190 everything else, as it is, not in a key=value form"""
1191 lines = []
1191 lines = []
1192 if firstline is not None:
1192 if firstline is not None:
1193 lines.append('%s\n' % firstline)
1193 lines.append('%s\n' % firstline)
1194
1194
1195 for k, v in data.items():
1195 for k, v in data.items():
1196 if k == self.firstlinekey:
1196 if k == self.firstlinekey:
1197 e = "key name '%s' is reserved" % self.firstlinekey
1197 e = "key name '%s' is reserved" % self.firstlinekey
1198 raise error.ProgrammingError(e)
1198 raise error.ProgrammingError(e)
1199 if not k[0].isalpha():
1199 if not k[0:1].isalpha():
1200 e = "keys must start with a letter in a key-value file"
1200 e = "keys must start with a letter in a key-value file"
1201 raise error.ProgrammingError(e)
1201 raise error.ProgrammingError(e)
1202 if not k.isalnum():
1202 if not k.isalnum():
1203 e = "invalid key name in a simple key-value file"
1203 e = "invalid key name in a simple key-value file"
1204 raise error.ProgrammingError(e)
1204 raise error.ProgrammingError(e)
1205 if '\n' in v:
1205 if '\n' in v:
1206 e = "invalid value in a simple key-value file"
1206 e = "invalid value in a simple key-value file"
1207 raise error.ProgrammingError(e)
1207 raise error.ProgrammingError(e)
1208 lines.append("%s=%s\n" % (k, v))
1208 lines.append("%s=%s\n" % (k, v))
1209 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1209 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1210 fp.write(''.join(lines))
1210 fp.write(''.join(lines))
1211
1211
1212 _reportobsoletedsource = [
1212 _reportobsoletedsource = [
1213 'debugobsolete',
1213 'debugobsolete',
1214 'pull',
1214 'pull',
1215 'push',
1215 'push',
1216 'serve',
1216 'serve',
1217 'unbundle',
1217 'unbundle',
1218 ]
1218 ]
1219
1219
1220 _reportnewcssource = [
1220 _reportnewcssource = [
1221 'pull',
1221 'pull',
1222 'unbundle',
1222 'unbundle',
1223 ]
1223 ]
1224
1224
1225 # A marker that tells the evolve extension to suppress its own reporting
1225 # A marker that tells the evolve extension to suppress its own reporting
1226 _reportstroubledchangesets = True
1226 _reportstroubledchangesets = True
1227
1227
1228 def registersummarycallback(repo, otr, txnname=''):
1228 def registersummarycallback(repo, otr, txnname=''):
1229 """register a callback to issue a summary after the transaction is closed
1229 """register a callback to issue a summary after the transaction is closed
1230 """
1230 """
1231 def txmatch(sources):
1231 def txmatch(sources):
1232 return any(txnname.startswith(source) for source in sources)
1232 return any(txnname.startswith(source) for source in sources)
1233
1233
1234 categories = []
1234 categories = []
1235
1235
1236 def reportsummary(func):
1236 def reportsummary(func):
1237 """decorator for report callbacks."""
1237 """decorator for report callbacks."""
1238 # The repoview life cycle is shorter than the one of the actual
1238 # The repoview life cycle is shorter than the one of the actual
1239 # underlying repository. So the filtered object can die before the
1239 # underlying repository. So the filtered object can die before the
1240 # weakref is used leading to troubles. We keep a reference to the
1240 # weakref is used leading to troubles. We keep a reference to the
1241 # unfiltered object and restore the filtering when retrieving the
1241 # unfiltered object and restore the filtering when retrieving the
1242 # repository through the weakref.
1242 # repository through the weakref.
1243 filtername = repo.filtername
1243 filtername = repo.filtername
1244 reporef = weakref.ref(repo.unfiltered())
1244 reporef = weakref.ref(repo.unfiltered())
1245 def wrapped(tr):
1245 def wrapped(tr):
1246 repo = reporef()
1246 repo = reporef()
1247 if filtername:
1247 if filtername:
1248 repo = repo.filtered(filtername)
1248 repo = repo.filtered(filtername)
1249 func(repo, tr)
1249 func(repo, tr)
1250 newcat = '%02i-txnreport' % len(categories)
1250 newcat = '%02i-txnreport' % len(categories)
1251 otr.addpostclose(newcat, wrapped)
1251 otr.addpostclose(newcat, wrapped)
1252 categories.append(newcat)
1252 categories.append(newcat)
1253 return wrapped
1253 return wrapped
1254
1254
1255 if txmatch(_reportobsoletedsource):
1255 if txmatch(_reportobsoletedsource):
1256 @reportsummary
1256 @reportsummary
1257 def reportobsoleted(repo, tr):
1257 def reportobsoleted(repo, tr):
1258 obsoleted = obsutil.getobsoleted(repo, tr)
1258 obsoleted = obsutil.getobsoleted(repo, tr)
1259 if obsoleted:
1259 if obsoleted:
1260 repo.ui.status(_('obsoleted %i changesets\n')
1260 repo.ui.status(_('obsoleted %i changesets\n')
1261 % len(obsoleted))
1261 % len(obsoleted))
1262
1262
1263 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1263 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1264 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1264 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1265 instabilitytypes = [
1265 instabilitytypes = [
1266 ('orphan', 'orphan'),
1266 ('orphan', 'orphan'),
1267 ('phase-divergent', 'phasedivergent'),
1267 ('phase-divergent', 'phasedivergent'),
1268 ('content-divergent', 'contentdivergent'),
1268 ('content-divergent', 'contentdivergent'),
1269 ]
1269 ]
1270
1270
1271 def getinstabilitycounts(repo):
1271 def getinstabilitycounts(repo):
1272 filtered = repo.changelog.filteredrevs
1272 filtered = repo.changelog.filteredrevs
1273 counts = {}
1273 counts = {}
1274 for instability, revset in instabilitytypes:
1274 for instability, revset in instabilitytypes:
1275 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1275 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1276 filtered)
1276 filtered)
1277 return counts
1277 return counts
1278
1278
1279 oldinstabilitycounts = getinstabilitycounts(repo)
1279 oldinstabilitycounts = getinstabilitycounts(repo)
1280 @reportsummary
1280 @reportsummary
1281 def reportnewinstabilities(repo, tr):
1281 def reportnewinstabilities(repo, tr):
1282 newinstabilitycounts = getinstabilitycounts(repo)
1282 newinstabilitycounts = getinstabilitycounts(repo)
1283 for instability, revset in instabilitytypes:
1283 for instability, revset in instabilitytypes:
1284 delta = (newinstabilitycounts[instability] -
1284 delta = (newinstabilitycounts[instability] -
1285 oldinstabilitycounts[instability])
1285 oldinstabilitycounts[instability])
1286 if delta > 0:
1286 if delta > 0:
1287 repo.ui.warn(_('%i new %s changesets\n') %
1287 repo.ui.warn(_('%i new %s changesets\n') %
1288 (delta, instability))
1288 (delta, instability))
1289
1289
1290 if txmatch(_reportnewcssource):
1290 if txmatch(_reportnewcssource):
1291 @reportsummary
1291 @reportsummary
1292 def reportnewcs(repo, tr):
1292 def reportnewcs(repo, tr):
1293 """Report the range of new revisions pulled/unbundled."""
1293 """Report the range of new revisions pulled/unbundled."""
1294 newrevs = tr.changes.get('revs', xrange(0, 0))
1294 newrevs = tr.changes.get('revs', xrange(0, 0))
1295 if not newrevs:
1295 if not newrevs:
1296 return
1296 return
1297
1297
1298 # Compute the bounds of new revisions' range, excluding obsoletes.
1298 # Compute the bounds of new revisions' range, excluding obsoletes.
1299 unfi = repo.unfiltered()
1299 unfi = repo.unfiltered()
1300 revs = unfi.revs('%ld and not obsolete()', newrevs)
1300 revs = unfi.revs('%ld and not obsolete()', newrevs)
1301 if not revs:
1301 if not revs:
1302 # Got only obsoletes.
1302 # Got only obsoletes.
1303 return
1303 return
1304 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1304 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1305
1305
1306 if minrev == maxrev:
1306 if minrev == maxrev:
1307 revrange = minrev
1307 revrange = minrev
1308 else:
1308 else:
1309 revrange = '%s:%s' % (minrev, maxrev)
1309 revrange = '%s:%s' % (minrev, maxrev)
1310 repo.ui.status(_('new changesets %s\n') % revrange)
1310 repo.ui.status(_('new changesets %s\n') % revrange)
1311
1311
1312 def nodesummaries(repo, nodes, maxnumnodes=4):
1312 def nodesummaries(repo, nodes, maxnumnodes=4):
1313 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1313 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1314 return ' '.join(short(h) for h in nodes)
1314 return ' '.join(short(h) for h in nodes)
1315 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1315 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1316 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1316 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1317
1317
1318 def enforcesinglehead(repo, tr, desc):
1318 def enforcesinglehead(repo, tr, desc):
1319 """check that no named branch has multiple heads"""
1319 """check that no named branch has multiple heads"""
1320 if desc in ('strip', 'repair'):
1320 if desc in ('strip', 'repair'):
1321 # skip the logic during strip
1321 # skip the logic during strip
1322 return
1322 return
1323 visible = repo.filtered('visible')
1323 visible = repo.filtered('visible')
1324 # possible improvement: we could restrict the check to affected branch
1324 # possible improvement: we could restrict the check to affected branch
1325 for name, heads in visible.branchmap().iteritems():
1325 for name, heads in visible.branchmap().iteritems():
1326 if len(heads) > 1:
1326 if len(heads) > 1:
1327 msg = _('rejecting multiple heads on branch "%s"')
1327 msg = _('rejecting multiple heads on branch "%s"')
1328 msg %= name
1328 msg %= name
1329 hint = _('%d heads: %s')
1329 hint = _('%d heads: %s')
1330 hint %= (len(heads), nodesummaries(repo, heads))
1330 hint %= (len(heads), nodesummaries(repo, heads))
1331 raise error.Abort(msg, hint=hint)
1331 raise error.Abort(msg, hint=hint)
1332
1332
1333 def wrapconvertsink(sink):
1333 def wrapconvertsink(sink):
1334 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1334 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1335 before it is used, whether or not the convert extension was formally loaded.
1335 before it is used, whether or not the convert extension was formally loaded.
1336 """
1336 """
1337 return sink
1337 return sink
1338
1338
1339 def unhidehashlikerevs(repo, specs, hiddentype):
1339 def unhidehashlikerevs(repo, specs, hiddentype):
1340 """parse the user specs and unhide changesets whose hash or revision number
1340 """parse the user specs and unhide changesets whose hash or revision number
1341 is passed.
1341 is passed.
1342
1342
1343 hiddentype can be: 1) 'warn': warn while unhiding changesets
1343 hiddentype can be: 1) 'warn': warn while unhiding changesets
1344 2) 'nowarn': don't warn while unhiding changesets
1344 2) 'nowarn': don't warn while unhiding changesets
1345
1345
1346 returns a repo object with the required changesets unhidden
1346 returns a repo object with the required changesets unhidden
1347 """
1347 """
1348 if not repo.filtername or not repo.ui.configbool('experimental',
1348 if not repo.filtername or not repo.ui.configbool('experimental',
1349 'directaccess'):
1349 'directaccess'):
1350 return repo
1350 return repo
1351
1351
1352 if repo.filtername not in ('visible', 'visible-hidden'):
1352 if repo.filtername not in ('visible', 'visible-hidden'):
1353 return repo
1353 return repo
1354
1354
1355 symbols = set()
1355 symbols = set()
1356 for spec in specs:
1356 for spec in specs:
1357 try:
1357 try:
1358 tree = revsetlang.parse(spec)
1358 tree = revsetlang.parse(spec)
1359 except error.ParseError: # will be reported by scmutil.revrange()
1359 except error.ParseError: # will be reported by scmutil.revrange()
1360 continue
1360 continue
1361
1361
1362 symbols.update(revsetlang.gethashlikesymbols(tree))
1362 symbols.update(revsetlang.gethashlikesymbols(tree))
1363
1363
1364 if not symbols:
1364 if not symbols:
1365 return repo
1365 return repo
1366
1366
1367 revs = _getrevsfromsymbols(repo, symbols)
1367 revs = _getrevsfromsymbols(repo, symbols)
1368
1368
1369 if not revs:
1369 if not revs:
1370 return repo
1370 return repo
1371
1371
1372 if hiddentype == 'warn':
1372 if hiddentype == 'warn':
1373 unfi = repo.unfiltered()
1373 unfi = repo.unfiltered()
1374 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1374 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1375 repo.ui.warn(_("warning: accessing hidden changesets for write "
1375 repo.ui.warn(_("warning: accessing hidden changesets for write "
1376 "operation: %s\n") % revstr)
1376 "operation: %s\n") % revstr)
1377
1377
1378 # we have to use new filtername to separate branch/tags cache until we can
1378 # we have to use new filtername to separate branch/tags cache until we can
1379 # disbale these cache when revisions are dynamically pinned.
1379 # disbale these cache when revisions are dynamically pinned.
1380 return repo.filtered('visible-hidden', revs)
1380 return repo.filtered('visible-hidden', revs)
1381
1381
1382 def _getrevsfromsymbols(repo, symbols):
1382 def _getrevsfromsymbols(repo, symbols):
1383 """parse the list of symbols and returns a set of revision numbers of hidden
1383 """parse the list of symbols and returns a set of revision numbers of hidden
1384 changesets present in symbols"""
1384 changesets present in symbols"""
1385 revs = set()
1385 revs = set()
1386 unfi = repo.unfiltered()
1386 unfi = repo.unfiltered()
1387 unficl = unfi.changelog
1387 unficl = unfi.changelog
1388 cl = repo.changelog
1388 cl = repo.changelog
1389 tiprev = len(unficl)
1389 tiprev = len(unficl)
1390 pmatch = unficl._partialmatch
1390 pmatch = unficl._partialmatch
1391 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1391 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1392 for s in symbols:
1392 for s in symbols:
1393 try:
1393 try:
1394 n = int(s)
1394 n = int(s)
1395 if n <= tiprev:
1395 if n <= tiprev:
1396 if not allowrevnums:
1396 if not allowrevnums:
1397 continue
1397 continue
1398 else:
1398 else:
1399 if n not in cl:
1399 if n not in cl:
1400 revs.add(n)
1400 revs.add(n)
1401 continue
1401 continue
1402 except ValueError:
1402 except ValueError:
1403 pass
1403 pass
1404
1404
1405 try:
1405 try:
1406 s = pmatch(s)
1406 s = pmatch(s)
1407 except error.LookupError:
1407 except error.LookupError:
1408 s = None
1408 s = None
1409
1409
1410 if s is not None:
1410 if s is not None:
1411 rev = unficl.rev(s)
1411 rev = unficl.rev(s)
1412 if rev not in cl:
1412 if rev not in cl:
1413 revs.add(rev)
1413 revs.add(rev)
1414
1414
1415 return revs
1415 return revs
General Comments 0
You need to be logged in to leave comments. Login now