##// END OF EJS Templates
extdata: use subprocess so we don't have to chdir() manually
Yuya Nishihara -
r34462:c67db5dc default
parent child Browse files
Show More
@@ -1,1194 +1,1196 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import weakref
17 import weakref
17
18
18 from .i18n import _
19 from .i18n import _
19 from .node import (
20 from .node import (
20 hex,
21 hex,
21 nullid,
22 nullid,
22 short,
23 short,
23 wdirid,
24 wdirid,
24 wdirrev,
25 wdirrev,
25 )
26 )
26
27
27 from . import (
28 from . import (
28 encoding,
29 encoding,
29 error,
30 error,
30 match as matchmod,
31 match as matchmod,
31 obsolete,
32 obsolete,
32 obsutil,
33 obsutil,
33 pathutil,
34 pathutil,
34 phases,
35 phases,
35 pycompat,
36 pycompat,
36 revsetlang,
37 revsetlang,
37 similar,
38 similar,
38 url,
39 url,
39 util,
40 util,
40 )
41 )
41
42
42 if pycompat.osname == 'nt':
43 if pycompat.osname == 'nt':
43 from . import scmwindows as scmplatform
44 from . import scmwindows as scmplatform
44 else:
45 else:
45 from . import scmposix as scmplatform
46 from . import scmposix as scmplatform
46
47
47 termsize = scmplatform.termsize
48 termsize = scmplatform.termsize
48
49
49 class status(tuple):
50 class status(tuple):
50 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
51 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
51 and 'ignored' properties are only relevant to the working copy.
52 and 'ignored' properties are only relevant to the working copy.
52 '''
53 '''
53
54
54 __slots__ = ()
55 __slots__ = ()
55
56
56 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
57 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
57 clean):
58 clean):
58 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
59 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
59 ignored, clean))
60 ignored, clean))
60
61
61 @property
62 @property
62 def modified(self):
63 def modified(self):
63 '''files that have been modified'''
64 '''files that have been modified'''
64 return self[0]
65 return self[0]
65
66
66 @property
67 @property
67 def added(self):
68 def added(self):
68 '''files that have been added'''
69 '''files that have been added'''
69 return self[1]
70 return self[1]
70
71
71 @property
72 @property
72 def removed(self):
73 def removed(self):
73 '''files that have been removed'''
74 '''files that have been removed'''
74 return self[2]
75 return self[2]
75
76
76 @property
77 @property
77 def deleted(self):
78 def deleted(self):
78 '''files that are in the dirstate, but have been deleted from the
79 '''files that are in the dirstate, but have been deleted from the
79 working copy (aka "missing")
80 working copy (aka "missing")
80 '''
81 '''
81 return self[3]
82 return self[3]
82
83
83 @property
84 @property
84 def unknown(self):
85 def unknown(self):
85 '''files not in the dirstate that are not ignored'''
86 '''files not in the dirstate that are not ignored'''
86 return self[4]
87 return self[4]
87
88
88 @property
89 @property
89 def ignored(self):
90 def ignored(self):
90 '''files not in the dirstate that are ignored (by _dirignore())'''
91 '''files not in the dirstate that are ignored (by _dirignore())'''
91 return self[5]
92 return self[5]
92
93
93 @property
94 @property
94 def clean(self):
95 def clean(self):
95 '''files that have not been modified'''
96 '''files that have not been modified'''
96 return self[6]
97 return self[6]
97
98
98 def __repr__(self, *args, **kwargs):
99 def __repr__(self, *args, **kwargs):
99 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
100 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
100 'unknown=%r, ignored=%r, clean=%r>') % self)
101 'unknown=%r, ignored=%r, clean=%r>') % self)
101
102
102 def itersubrepos(ctx1, ctx2):
103 def itersubrepos(ctx1, ctx2):
103 """find subrepos in ctx1 or ctx2"""
104 """find subrepos in ctx1 or ctx2"""
104 # Create a (subpath, ctx) mapping where we prefer subpaths from
105 # Create a (subpath, ctx) mapping where we prefer subpaths from
105 # ctx1. The subpaths from ctx2 are important when the .hgsub file
106 # ctx1. The subpaths from ctx2 are important when the .hgsub file
106 # has been modified (in ctx2) but not yet committed (in ctx1).
107 # has been modified (in ctx2) but not yet committed (in ctx1).
107 subpaths = dict.fromkeys(ctx2.substate, ctx2)
108 subpaths = dict.fromkeys(ctx2.substate, ctx2)
108 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
109 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
109
110
110 missing = set()
111 missing = set()
111
112
112 for subpath in ctx2.substate:
113 for subpath in ctx2.substate:
113 if subpath not in ctx1.substate:
114 if subpath not in ctx1.substate:
114 del subpaths[subpath]
115 del subpaths[subpath]
115 missing.add(subpath)
116 missing.add(subpath)
116
117
117 for subpath, ctx in sorted(subpaths.iteritems()):
118 for subpath, ctx in sorted(subpaths.iteritems()):
118 yield subpath, ctx.sub(subpath)
119 yield subpath, ctx.sub(subpath)
119
120
120 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
121 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
121 # status and diff will have an accurate result when it does
122 # status and diff will have an accurate result when it does
122 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
123 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
123 # against itself.
124 # against itself.
124 for subpath in missing:
125 for subpath in missing:
125 yield subpath, ctx2.nullsub(subpath, ctx1)
126 yield subpath, ctx2.nullsub(subpath, ctx1)
126
127
127 def nochangesfound(ui, repo, excluded=None):
128 def nochangesfound(ui, repo, excluded=None):
128 '''Report no changes for push/pull, excluded is None or a list of
129 '''Report no changes for push/pull, excluded is None or a list of
129 nodes excluded from the push/pull.
130 nodes excluded from the push/pull.
130 '''
131 '''
131 secretlist = []
132 secretlist = []
132 if excluded:
133 if excluded:
133 for n in excluded:
134 for n in excluded:
134 ctx = repo[n]
135 ctx = repo[n]
135 if ctx.phase() >= phases.secret and not ctx.extinct():
136 if ctx.phase() >= phases.secret and not ctx.extinct():
136 secretlist.append(n)
137 secretlist.append(n)
137
138
138 if secretlist:
139 if secretlist:
139 ui.status(_("no changes found (ignored %d secret changesets)\n")
140 ui.status(_("no changes found (ignored %d secret changesets)\n")
140 % len(secretlist))
141 % len(secretlist))
141 else:
142 else:
142 ui.status(_("no changes found\n"))
143 ui.status(_("no changes found\n"))
143
144
144 def callcatch(ui, func):
145 def callcatch(ui, func):
145 """call func() with global exception handling
146 """call func() with global exception handling
146
147
147 return func() if no exception happens. otherwise do some error handling
148 return func() if no exception happens. otherwise do some error handling
148 and return an exit code accordingly. does not handle all exceptions.
149 and return an exit code accordingly. does not handle all exceptions.
149 """
150 """
150 try:
151 try:
151 try:
152 try:
152 return func()
153 return func()
153 except: # re-raises
154 except: # re-raises
154 ui.traceback()
155 ui.traceback()
155 raise
156 raise
156 # Global exception handling, alphabetically
157 # Global exception handling, alphabetically
157 # Mercurial-specific first, followed by built-in and library exceptions
158 # Mercurial-specific first, followed by built-in and library exceptions
158 except error.LockHeld as inst:
159 except error.LockHeld as inst:
159 if inst.errno == errno.ETIMEDOUT:
160 if inst.errno == errno.ETIMEDOUT:
160 reason = _('timed out waiting for lock held by %r') % inst.locker
161 reason = _('timed out waiting for lock held by %r') % inst.locker
161 else:
162 else:
162 reason = _('lock held by %r') % inst.locker
163 reason = _('lock held by %r') % inst.locker
163 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
164 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
164 if not inst.locker:
165 if not inst.locker:
165 ui.warn(_("(lock might be very busy)\n"))
166 ui.warn(_("(lock might be very busy)\n"))
166 except error.LockUnavailable as inst:
167 except error.LockUnavailable as inst:
167 ui.warn(_("abort: could not lock %s: %s\n") %
168 ui.warn(_("abort: could not lock %s: %s\n") %
168 (inst.desc or inst.filename,
169 (inst.desc or inst.filename,
169 encoding.strtolocal(inst.strerror)))
170 encoding.strtolocal(inst.strerror)))
170 except error.OutOfBandError as inst:
171 except error.OutOfBandError as inst:
171 if inst.args:
172 if inst.args:
172 msg = _("abort: remote error:\n")
173 msg = _("abort: remote error:\n")
173 else:
174 else:
174 msg = _("abort: remote error\n")
175 msg = _("abort: remote error\n")
175 ui.warn(msg)
176 ui.warn(msg)
176 if inst.args:
177 if inst.args:
177 ui.warn(''.join(inst.args))
178 ui.warn(''.join(inst.args))
178 if inst.hint:
179 if inst.hint:
179 ui.warn('(%s)\n' % inst.hint)
180 ui.warn('(%s)\n' % inst.hint)
180 except error.RepoError as inst:
181 except error.RepoError as inst:
181 ui.warn(_("abort: %s!\n") % inst)
182 ui.warn(_("abort: %s!\n") % inst)
182 if inst.hint:
183 if inst.hint:
183 ui.warn(_("(%s)\n") % inst.hint)
184 ui.warn(_("(%s)\n") % inst.hint)
184 except error.ResponseError as inst:
185 except error.ResponseError as inst:
185 ui.warn(_("abort: %s") % inst.args[0])
186 ui.warn(_("abort: %s") % inst.args[0])
186 if not isinstance(inst.args[1], basestring):
187 if not isinstance(inst.args[1], basestring):
187 ui.warn(" %r\n" % (inst.args[1],))
188 ui.warn(" %r\n" % (inst.args[1],))
188 elif not inst.args[1]:
189 elif not inst.args[1]:
189 ui.warn(_(" empty string\n"))
190 ui.warn(_(" empty string\n"))
190 else:
191 else:
191 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
192 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
192 except error.CensoredNodeError as inst:
193 except error.CensoredNodeError as inst:
193 ui.warn(_("abort: file censored %s!\n") % inst)
194 ui.warn(_("abort: file censored %s!\n") % inst)
194 except error.RevlogError as inst:
195 except error.RevlogError as inst:
195 ui.warn(_("abort: %s!\n") % inst)
196 ui.warn(_("abort: %s!\n") % inst)
196 except error.InterventionRequired as inst:
197 except error.InterventionRequired as inst:
197 ui.warn("%s\n" % inst)
198 ui.warn("%s\n" % inst)
198 if inst.hint:
199 if inst.hint:
199 ui.warn(_("(%s)\n") % inst.hint)
200 ui.warn(_("(%s)\n") % inst.hint)
200 return 1
201 return 1
201 except error.WdirUnsupported:
202 except error.WdirUnsupported:
202 ui.warn(_("abort: working directory revision cannot be specified\n"))
203 ui.warn(_("abort: working directory revision cannot be specified\n"))
203 except error.Abort as inst:
204 except error.Abort as inst:
204 ui.warn(_("abort: %s\n") % inst)
205 ui.warn(_("abort: %s\n") % inst)
205 if inst.hint:
206 if inst.hint:
206 ui.warn(_("(%s)\n") % inst.hint)
207 ui.warn(_("(%s)\n") % inst.hint)
207 except ImportError as inst:
208 except ImportError as inst:
208 ui.warn(_("abort: %s!\n") % inst)
209 ui.warn(_("abort: %s!\n") % inst)
209 m = str(inst).split()[-1]
210 m = str(inst).split()[-1]
210 if m in "mpatch bdiff".split():
211 if m in "mpatch bdiff".split():
211 ui.warn(_("(did you forget to compile extensions?)\n"))
212 ui.warn(_("(did you forget to compile extensions?)\n"))
212 elif m in "zlib".split():
213 elif m in "zlib".split():
213 ui.warn(_("(is your Python install correct?)\n"))
214 ui.warn(_("(is your Python install correct?)\n"))
214 except IOError as inst:
215 except IOError as inst:
215 if util.safehasattr(inst, "code"):
216 if util.safehasattr(inst, "code"):
216 ui.warn(_("abort: %s\n") % inst)
217 ui.warn(_("abort: %s\n") % inst)
217 elif util.safehasattr(inst, "reason"):
218 elif util.safehasattr(inst, "reason"):
218 try: # usually it is in the form (errno, strerror)
219 try: # usually it is in the form (errno, strerror)
219 reason = inst.reason.args[1]
220 reason = inst.reason.args[1]
220 except (AttributeError, IndexError):
221 except (AttributeError, IndexError):
221 # it might be anything, for example a string
222 # it might be anything, for example a string
222 reason = inst.reason
223 reason = inst.reason
223 if isinstance(reason, unicode):
224 if isinstance(reason, unicode):
224 # SSLError of Python 2.7.9 contains a unicode
225 # SSLError of Python 2.7.9 contains a unicode
225 reason = encoding.unitolocal(reason)
226 reason = encoding.unitolocal(reason)
226 ui.warn(_("abort: error: %s\n") % reason)
227 ui.warn(_("abort: error: %s\n") % reason)
227 elif (util.safehasattr(inst, "args")
228 elif (util.safehasattr(inst, "args")
228 and inst.args and inst.args[0] == errno.EPIPE):
229 and inst.args and inst.args[0] == errno.EPIPE):
229 pass
230 pass
230 elif getattr(inst, "strerror", None):
231 elif getattr(inst, "strerror", None):
231 if getattr(inst, "filename", None):
232 if getattr(inst, "filename", None):
232 ui.warn(_("abort: %s: %s\n") % (
233 ui.warn(_("abort: %s: %s\n") % (
233 encoding.strtolocal(inst.strerror), inst.filename))
234 encoding.strtolocal(inst.strerror), inst.filename))
234 else:
235 else:
235 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
236 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
236 else:
237 else:
237 raise
238 raise
238 except OSError as inst:
239 except OSError as inst:
239 if getattr(inst, "filename", None) is not None:
240 if getattr(inst, "filename", None) is not None:
240 ui.warn(_("abort: %s: '%s'\n") % (
241 ui.warn(_("abort: %s: '%s'\n") % (
241 encoding.strtolocal(inst.strerror), inst.filename))
242 encoding.strtolocal(inst.strerror), inst.filename))
242 else:
243 else:
243 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
244 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
244 except MemoryError:
245 except MemoryError:
245 ui.warn(_("abort: out of memory\n"))
246 ui.warn(_("abort: out of memory\n"))
246 except SystemExit as inst:
247 except SystemExit as inst:
247 # Commands shouldn't sys.exit directly, but give a return code.
248 # Commands shouldn't sys.exit directly, but give a return code.
248 # Just in case catch this and and pass exit code to caller.
249 # Just in case catch this and and pass exit code to caller.
249 return inst.code
250 return inst.code
250 except socket.error as inst:
251 except socket.error as inst:
251 ui.warn(_("abort: %s\n") % inst.args[-1])
252 ui.warn(_("abort: %s\n") % inst.args[-1])
252
253
253 return -1
254 return -1
254
255
255 def checknewlabel(repo, lbl, kind):
256 def checknewlabel(repo, lbl, kind):
256 # Do not use the "kind" parameter in ui output.
257 # Do not use the "kind" parameter in ui output.
257 # It makes strings difficult to translate.
258 # It makes strings difficult to translate.
258 if lbl in ['tip', '.', 'null']:
259 if lbl in ['tip', '.', 'null']:
259 raise error.Abort(_("the name '%s' is reserved") % lbl)
260 raise error.Abort(_("the name '%s' is reserved") % lbl)
260 for c in (':', '\0', '\n', '\r'):
261 for c in (':', '\0', '\n', '\r'):
261 if c in lbl:
262 if c in lbl:
262 raise error.Abort(_("%r cannot be used in a name") % c)
263 raise error.Abort(_("%r cannot be used in a name") % c)
263 try:
264 try:
264 int(lbl)
265 int(lbl)
265 raise error.Abort(_("cannot use an integer as a name"))
266 raise error.Abort(_("cannot use an integer as a name"))
266 except ValueError:
267 except ValueError:
267 pass
268 pass
268
269
269 def checkfilename(f):
270 def checkfilename(f):
270 '''Check that the filename f is an acceptable filename for a tracked file'''
271 '''Check that the filename f is an acceptable filename for a tracked file'''
271 if '\r' in f or '\n' in f:
272 if '\r' in f or '\n' in f:
272 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
273 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
273
274
274 def checkportable(ui, f):
275 def checkportable(ui, f):
275 '''Check if filename f is portable and warn or abort depending on config'''
276 '''Check if filename f is portable and warn or abort depending on config'''
276 checkfilename(f)
277 checkfilename(f)
277 abort, warn = checkportabilityalert(ui)
278 abort, warn = checkportabilityalert(ui)
278 if abort or warn:
279 if abort or warn:
279 msg = util.checkwinfilename(f)
280 msg = util.checkwinfilename(f)
280 if msg:
281 if msg:
281 msg = "%s: %s" % (msg, util.shellquote(f))
282 msg = "%s: %s" % (msg, util.shellquote(f))
282 if abort:
283 if abort:
283 raise error.Abort(msg)
284 raise error.Abort(msg)
284 ui.warn(_("warning: %s\n") % msg)
285 ui.warn(_("warning: %s\n") % msg)
285
286
286 def checkportabilityalert(ui):
287 def checkportabilityalert(ui):
287 '''check if the user's config requests nothing, a warning, or abort for
288 '''check if the user's config requests nothing, a warning, or abort for
288 non-portable filenames'''
289 non-portable filenames'''
289 val = ui.config('ui', 'portablefilenames')
290 val = ui.config('ui', 'portablefilenames')
290 lval = val.lower()
291 lval = val.lower()
291 bval = util.parsebool(val)
292 bval = util.parsebool(val)
292 abort = pycompat.osname == 'nt' or lval == 'abort'
293 abort = pycompat.osname == 'nt' or lval == 'abort'
293 warn = bval or lval == 'warn'
294 warn = bval or lval == 'warn'
294 if bval is None and not (warn or abort or lval == 'ignore'):
295 if bval is None and not (warn or abort or lval == 'ignore'):
295 raise error.ConfigError(
296 raise error.ConfigError(
296 _("ui.portablefilenames value is invalid ('%s')") % val)
297 _("ui.portablefilenames value is invalid ('%s')") % val)
297 return abort, warn
298 return abort, warn
298
299
299 class casecollisionauditor(object):
300 class casecollisionauditor(object):
300 def __init__(self, ui, abort, dirstate):
301 def __init__(self, ui, abort, dirstate):
301 self._ui = ui
302 self._ui = ui
302 self._abort = abort
303 self._abort = abort
303 allfiles = '\0'.join(dirstate._map)
304 allfiles = '\0'.join(dirstate._map)
304 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
305 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
305 self._dirstate = dirstate
306 self._dirstate = dirstate
306 # The purpose of _newfiles is so that we don't complain about
307 # The purpose of _newfiles is so that we don't complain about
307 # case collisions if someone were to call this object with the
308 # case collisions if someone were to call this object with the
308 # same filename twice.
309 # same filename twice.
309 self._newfiles = set()
310 self._newfiles = set()
310
311
311 def __call__(self, f):
312 def __call__(self, f):
312 if f in self._newfiles:
313 if f in self._newfiles:
313 return
314 return
314 fl = encoding.lower(f)
315 fl = encoding.lower(f)
315 if fl in self._loweredfiles and f not in self._dirstate:
316 if fl in self._loweredfiles and f not in self._dirstate:
316 msg = _('possible case-folding collision for %s') % f
317 msg = _('possible case-folding collision for %s') % f
317 if self._abort:
318 if self._abort:
318 raise error.Abort(msg)
319 raise error.Abort(msg)
319 self._ui.warn(_("warning: %s\n") % msg)
320 self._ui.warn(_("warning: %s\n") % msg)
320 self._loweredfiles.add(fl)
321 self._loweredfiles.add(fl)
321 self._newfiles.add(f)
322 self._newfiles.add(f)
322
323
323 def filteredhash(repo, maxrev):
324 def filteredhash(repo, maxrev):
324 """build hash of filtered revisions in the current repoview.
325 """build hash of filtered revisions in the current repoview.
325
326
326 Multiple caches perform up-to-date validation by checking that the
327 Multiple caches perform up-to-date validation by checking that the
327 tiprev and tipnode stored in the cache file match the current repository.
328 tiprev and tipnode stored in the cache file match the current repository.
328 However, this is not sufficient for validating repoviews because the set
329 However, this is not sufficient for validating repoviews because the set
329 of revisions in the view may change without the repository tiprev and
330 of revisions in the view may change without the repository tiprev and
330 tipnode changing.
331 tipnode changing.
331
332
332 This function hashes all the revs filtered from the view and returns
333 This function hashes all the revs filtered from the view and returns
333 that SHA-1 digest.
334 that SHA-1 digest.
334 """
335 """
335 cl = repo.changelog
336 cl = repo.changelog
336 if not cl.filteredrevs:
337 if not cl.filteredrevs:
337 return None
338 return None
338 key = None
339 key = None
339 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
340 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
340 if revs:
341 if revs:
341 s = hashlib.sha1()
342 s = hashlib.sha1()
342 for rev in revs:
343 for rev in revs:
343 s.update('%d;' % rev)
344 s.update('%d;' % rev)
344 key = s.digest()
345 key = s.digest()
345 return key
346 return key
346
347
347 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
348 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
348 '''yield every hg repository under path, always recursively.
349 '''yield every hg repository under path, always recursively.
349 The recurse flag will only control recursion into repo working dirs'''
350 The recurse flag will only control recursion into repo working dirs'''
350 def errhandler(err):
351 def errhandler(err):
351 if err.filename == path:
352 if err.filename == path:
352 raise err
353 raise err
353 samestat = getattr(os.path, 'samestat', None)
354 samestat = getattr(os.path, 'samestat', None)
354 if followsym and samestat is not None:
355 if followsym and samestat is not None:
355 def adddir(dirlst, dirname):
356 def adddir(dirlst, dirname):
356 match = False
357 match = False
357 dirstat = os.stat(dirname)
358 dirstat = os.stat(dirname)
358 for lstdirstat in dirlst:
359 for lstdirstat in dirlst:
359 if samestat(dirstat, lstdirstat):
360 if samestat(dirstat, lstdirstat):
360 match = True
361 match = True
361 break
362 break
362 if not match:
363 if not match:
363 dirlst.append(dirstat)
364 dirlst.append(dirstat)
364 return not match
365 return not match
365 else:
366 else:
366 followsym = False
367 followsym = False
367
368
368 if (seen_dirs is None) and followsym:
369 if (seen_dirs is None) and followsym:
369 seen_dirs = []
370 seen_dirs = []
370 adddir(seen_dirs, path)
371 adddir(seen_dirs, path)
371 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
372 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
372 dirs.sort()
373 dirs.sort()
373 if '.hg' in dirs:
374 if '.hg' in dirs:
374 yield root # found a repository
375 yield root # found a repository
375 qroot = os.path.join(root, '.hg', 'patches')
376 qroot = os.path.join(root, '.hg', 'patches')
376 if os.path.isdir(os.path.join(qroot, '.hg')):
377 if os.path.isdir(os.path.join(qroot, '.hg')):
377 yield qroot # we have a patch queue repo here
378 yield qroot # we have a patch queue repo here
378 if recurse:
379 if recurse:
379 # avoid recursing inside the .hg directory
380 # avoid recursing inside the .hg directory
380 dirs.remove('.hg')
381 dirs.remove('.hg')
381 else:
382 else:
382 dirs[:] = [] # don't descend further
383 dirs[:] = [] # don't descend further
383 elif followsym:
384 elif followsym:
384 newdirs = []
385 newdirs = []
385 for d in dirs:
386 for d in dirs:
386 fname = os.path.join(root, d)
387 fname = os.path.join(root, d)
387 if adddir(seen_dirs, fname):
388 if adddir(seen_dirs, fname):
388 if os.path.islink(fname):
389 if os.path.islink(fname):
389 for hgname in walkrepos(fname, True, seen_dirs):
390 for hgname in walkrepos(fname, True, seen_dirs):
390 yield hgname
391 yield hgname
391 else:
392 else:
392 newdirs.append(d)
393 newdirs.append(d)
393 dirs[:] = newdirs
394 dirs[:] = newdirs
394
395
395 def binnode(ctx):
396 def binnode(ctx):
396 """Return binary node id for a given basectx"""
397 """Return binary node id for a given basectx"""
397 node = ctx.node()
398 node = ctx.node()
398 if node is None:
399 if node is None:
399 return wdirid
400 return wdirid
400 return node
401 return node
401
402
402 def intrev(ctx):
403 def intrev(ctx):
403 """Return integer for a given basectx that can be used in comparison or
404 """Return integer for a given basectx that can be used in comparison or
404 arithmetic operation"""
405 arithmetic operation"""
405 rev = ctx.rev()
406 rev = ctx.rev()
406 if rev is None:
407 if rev is None:
407 return wdirrev
408 return wdirrev
408 return rev
409 return rev
409
410
410 def formatchangeid(ctx):
411 def formatchangeid(ctx):
411 """Format changectx as '{rev}:{node|formatnode}', which is the default
412 """Format changectx as '{rev}:{node|formatnode}', which is the default
412 template provided by cmdutil.changeset_templater"""
413 template provided by cmdutil.changeset_templater"""
413 repo = ctx.repo()
414 repo = ctx.repo()
414 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
415 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
415
416
416 def formatrevnode(ui, rev, node):
417 def formatrevnode(ui, rev, node):
417 """Format given revision and node depending on the current verbosity"""
418 """Format given revision and node depending on the current verbosity"""
418 if ui.debugflag:
419 if ui.debugflag:
419 hexfunc = hex
420 hexfunc = hex
420 else:
421 else:
421 hexfunc = short
422 hexfunc = short
422 return '%d:%s' % (rev, hexfunc(node))
423 return '%d:%s' % (rev, hexfunc(node))
423
424
424 def revsingle(repo, revspec, default='.', localalias=None):
425 def revsingle(repo, revspec, default='.', localalias=None):
425 if not revspec and revspec != 0:
426 if not revspec and revspec != 0:
426 return repo[default]
427 return repo[default]
427
428
428 l = revrange(repo, [revspec], localalias=localalias)
429 l = revrange(repo, [revspec], localalias=localalias)
429 if not l:
430 if not l:
430 raise error.Abort(_('empty revision set'))
431 raise error.Abort(_('empty revision set'))
431 return repo[l.last()]
432 return repo[l.last()]
432
433
433 def _pairspec(revspec):
434 def _pairspec(revspec):
434 tree = revsetlang.parse(revspec)
435 tree = revsetlang.parse(revspec)
435 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
436 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
436
437
437 def revpair(repo, revs):
438 def revpair(repo, revs):
438 if not revs:
439 if not revs:
439 return repo.dirstate.p1(), None
440 return repo.dirstate.p1(), None
440
441
441 l = revrange(repo, revs)
442 l = revrange(repo, revs)
442
443
443 if not l:
444 if not l:
444 first = second = None
445 first = second = None
445 elif l.isascending():
446 elif l.isascending():
446 first = l.min()
447 first = l.min()
447 second = l.max()
448 second = l.max()
448 elif l.isdescending():
449 elif l.isdescending():
449 first = l.max()
450 first = l.max()
450 second = l.min()
451 second = l.min()
451 else:
452 else:
452 first = l.first()
453 first = l.first()
453 second = l.last()
454 second = l.last()
454
455
455 if first is None:
456 if first is None:
456 raise error.Abort(_('empty revision range'))
457 raise error.Abort(_('empty revision range'))
457 if (first == second and len(revs) >= 2
458 if (first == second and len(revs) >= 2
458 and not all(revrange(repo, [r]) for r in revs)):
459 and not all(revrange(repo, [r]) for r in revs)):
459 raise error.Abort(_('empty revision on one side of range'))
460 raise error.Abort(_('empty revision on one side of range'))
460
461
461 # if top-level is range expression, the result must always be a pair
462 # if top-level is range expression, the result must always be a pair
462 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
463 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
463 return repo.lookup(first), None
464 return repo.lookup(first), None
464
465
465 return repo.lookup(first), repo.lookup(second)
466 return repo.lookup(first), repo.lookup(second)
466
467
467 def revrange(repo, specs, localalias=None):
468 def revrange(repo, specs, localalias=None):
468 """Execute 1 to many revsets and return the union.
469 """Execute 1 to many revsets and return the union.
469
470
470 This is the preferred mechanism for executing revsets using user-specified
471 This is the preferred mechanism for executing revsets using user-specified
471 config options, such as revset aliases.
472 config options, such as revset aliases.
472
473
473 The revsets specified by ``specs`` will be executed via a chained ``OR``
474 The revsets specified by ``specs`` will be executed via a chained ``OR``
474 expression. If ``specs`` is empty, an empty result is returned.
475 expression. If ``specs`` is empty, an empty result is returned.
475
476
476 ``specs`` can contain integers, in which case they are assumed to be
477 ``specs`` can contain integers, in which case they are assumed to be
477 revision numbers.
478 revision numbers.
478
479
479 It is assumed the revsets are already formatted. If you have arguments
480 It is assumed the revsets are already formatted. If you have arguments
480 that need to be expanded in the revset, call ``revsetlang.formatspec()``
481 that need to be expanded in the revset, call ``revsetlang.formatspec()``
481 and pass the result as an element of ``specs``.
482 and pass the result as an element of ``specs``.
482
483
483 Specifying a single revset is allowed.
484 Specifying a single revset is allowed.
484
485
485 Returns a ``revset.abstractsmartset`` which is a list-like interface over
486 Returns a ``revset.abstractsmartset`` which is a list-like interface over
486 integer revisions.
487 integer revisions.
487 """
488 """
488 allspecs = []
489 allspecs = []
489 for spec in specs:
490 for spec in specs:
490 if isinstance(spec, int):
491 if isinstance(spec, int):
491 spec = revsetlang.formatspec('rev(%d)', spec)
492 spec = revsetlang.formatspec('rev(%d)', spec)
492 allspecs.append(spec)
493 allspecs.append(spec)
493 return repo.anyrevs(allspecs, user=True, localalias=localalias)
494 return repo.anyrevs(allspecs, user=True, localalias=localalias)
494
495
495 def meaningfulparents(repo, ctx):
496 def meaningfulparents(repo, ctx):
496 """Return list of meaningful (or all if debug) parentrevs for rev.
497 """Return list of meaningful (or all if debug) parentrevs for rev.
497
498
498 For merges (two non-nullrev revisions) both parents are meaningful.
499 For merges (two non-nullrev revisions) both parents are meaningful.
499 Otherwise the first parent revision is considered meaningful if it
500 Otherwise the first parent revision is considered meaningful if it
500 is not the preceding revision.
501 is not the preceding revision.
501 """
502 """
502 parents = ctx.parents()
503 parents = ctx.parents()
503 if len(parents) > 1:
504 if len(parents) > 1:
504 return parents
505 return parents
505 if repo.ui.debugflag:
506 if repo.ui.debugflag:
506 return [parents[0], repo['null']]
507 return [parents[0], repo['null']]
507 if parents[0].rev() >= intrev(ctx) - 1:
508 if parents[0].rev() >= intrev(ctx) - 1:
508 return []
509 return []
509 return parents
510 return parents
510
511
511 def expandpats(pats):
512 def expandpats(pats):
512 '''Expand bare globs when running on windows.
513 '''Expand bare globs when running on windows.
513 On posix we assume it already has already been done by sh.'''
514 On posix we assume it already has already been done by sh.'''
514 if not util.expandglobs:
515 if not util.expandglobs:
515 return list(pats)
516 return list(pats)
516 ret = []
517 ret = []
517 for kindpat in pats:
518 for kindpat in pats:
518 kind, pat = matchmod._patsplit(kindpat, None)
519 kind, pat = matchmod._patsplit(kindpat, None)
519 if kind is None:
520 if kind is None:
520 try:
521 try:
521 globbed = glob.glob(pat)
522 globbed = glob.glob(pat)
522 except re.error:
523 except re.error:
523 globbed = [pat]
524 globbed = [pat]
524 if globbed:
525 if globbed:
525 ret.extend(globbed)
526 ret.extend(globbed)
526 continue
527 continue
527 ret.append(kindpat)
528 ret.append(kindpat)
528 return ret
529 return ret
529
530
530 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
531 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
531 badfn=None):
532 badfn=None):
532 '''Return a matcher and the patterns that were used.
533 '''Return a matcher and the patterns that were used.
533 The matcher will warn about bad matches, unless an alternate badfn callback
534 The matcher will warn about bad matches, unless an alternate badfn callback
534 is provided.'''
535 is provided.'''
535 if pats == ("",):
536 if pats == ("",):
536 pats = []
537 pats = []
537 if opts is None:
538 if opts is None:
538 opts = {}
539 opts = {}
539 if not globbed and default == 'relpath':
540 if not globbed and default == 'relpath':
540 pats = expandpats(pats or [])
541 pats = expandpats(pats or [])
541
542
542 def bad(f, msg):
543 def bad(f, msg):
543 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
544 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
544
545
545 if badfn is None:
546 if badfn is None:
546 badfn = bad
547 badfn = bad
547
548
548 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
549 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
549 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
550 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
550
551
551 if m.always():
552 if m.always():
552 pats = []
553 pats = []
553 return m, pats
554 return m, pats
554
555
555 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
556 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
556 badfn=None):
557 badfn=None):
557 '''Return a matcher that will warn about bad matches.'''
558 '''Return a matcher that will warn about bad matches.'''
558 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
559 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
559
560
560 def matchall(repo):
561 def matchall(repo):
561 '''Return a matcher that will efficiently match everything.'''
562 '''Return a matcher that will efficiently match everything.'''
562 return matchmod.always(repo.root, repo.getcwd())
563 return matchmod.always(repo.root, repo.getcwd())
563
564
564 def matchfiles(repo, files, badfn=None):
565 def matchfiles(repo, files, badfn=None):
565 '''Return a matcher that will efficiently match exactly these files.'''
566 '''Return a matcher that will efficiently match exactly these files.'''
566 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
567 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
567
568
568 def origpath(ui, repo, filepath):
569 def origpath(ui, repo, filepath):
569 '''customize where .orig files are created
570 '''customize where .orig files are created
570
571
571 Fetch user defined path from config file: [ui] origbackuppath = <path>
572 Fetch user defined path from config file: [ui] origbackuppath = <path>
572 Fall back to default (filepath with .orig suffix) if not specified
573 Fall back to default (filepath with .orig suffix) if not specified
573 '''
574 '''
574 origbackuppath = ui.config('ui', 'origbackuppath')
575 origbackuppath = ui.config('ui', 'origbackuppath')
575 if origbackuppath is None:
576 if origbackuppath is None:
576 return filepath + ".orig"
577 return filepath + ".orig"
577
578
578 filepathfromroot = os.path.relpath(filepath, start=repo.root)
579 filepathfromroot = os.path.relpath(filepath, start=repo.root)
579 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
580 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
580
581
581 origbackupdir = repo.vfs.dirname(fullorigpath)
582 origbackupdir = repo.vfs.dirname(fullorigpath)
582 if not repo.vfs.exists(origbackupdir):
583 if not repo.vfs.exists(origbackupdir):
583 ui.note(_('creating directory: %s\n') % origbackupdir)
584 ui.note(_('creating directory: %s\n') % origbackupdir)
584 util.makedirs(origbackupdir)
585 util.makedirs(origbackupdir)
585
586
586 return fullorigpath
587 return fullorigpath
587
588
588 class _containsnode(object):
589 class _containsnode(object):
589 """proxy __contains__(node) to container.__contains__ which accepts revs"""
590 """proxy __contains__(node) to container.__contains__ which accepts revs"""
590
591
591 def __init__(self, repo, revcontainer):
592 def __init__(self, repo, revcontainer):
592 self._torev = repo.changelog.rev
593 self._torev = repo.changelog.rev
593 self._revcontains = revcontainer.__contains__
594 self._revcontains = revcontainer.__contains__
594
595
595 def __contains__(self, node):
596 def __contains__(self, node):
596 return self._revcontains(self._torev(node))
597 return self._revcontains(self._torev(node))
597
598
598 def cleanupnodes(repo, replacements, operation, moves=None):
599 def cleanupnodes(repo, replacements, operation, moves=None):
599 """do common cleanups when old nodes are replaced by new nodes
600 """do common cleanups when old nodes are replaced by new nodes
600
601
601 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
602 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
602 (we might also want to move working directory parent in the future)
603 (we might also want to move working directory parent in the future)
603
604
604 By default, bookmark moves are calculated automatically from 'replacements',
605 By default, bookmark moves are calculated automatically from 'replacements',
605 but 'moves' can be used to override that. Also, 'moves' may include
606 but 'moves' can be used to override that. Also, 'moves' may include
606 additional bookmark moves that should not have associated obsmarkers.
607 additional bookmark moves that should not have associated obsmarkers.
607
608
608 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
609 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
609 have replacements. operation is a string, like "rebase".
610 have replacements. operation is a string, like "rebase".
610 """
611 """
611 if not replacements and not moves:
612 if not replacements and not moves:
612 return
613 return
613
614
614 # translate mapping's other forms
615 # translate mapping's other forms
615 if not util.safehasattr(replacements, 'items'):
616 if not util.safehasattr(replacements, 'items'):
616 replacements = {n: () for n in replacements}
617 replacements = {n: () for n in replacements}
617
618
618 # Calculate bookmark movements
619 # Calculate bookmark movements
619 if moves is None:
620 if moves is None:
620 moves = {}
621 moves = {}
621 # Unfiltered repo is needed since nodes in replacements might be hidden.
622 # Unfiltered repo is needed since nodes in replacements might be hidden.
622 unfi = repo.unfiltered()
623 unfi = repo.unfiltered()
623 for oldnode, newnodes in replacements.items():
624 for oldnode, newnodes in replacements.items():
624 if oldnode in moves:
625 if oldnode in moves:
625 continue
626 continue
626 if len(newnodes) > 1:
627 if len(newnodes) > 1:
627 # usually a split, take the one with biggest rev number
628 # usually a split, take the one with biggest rev number
628 newnode = next(unfi.set('max(%ln)', newnodes)).node()
629 newnode = next(unfi.set('max(%ln)', newnodes)).node()
629 elif len(newnodes) == 0:
630 elif len(newnodes) == 0:
630 # move bookmark backwards
631 # move bookmark backwards
631 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
632 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
632 list(replacements)))
633 list(replacements)))
633 if roots:
634 if roots:
634 newnode = roots[0].node()
635 newnode = roots[0].node()
635 else:
636 else:
636 newnode = nullid
637 newnode = nullid
637 else:
638 else:
638 newnode = newnodes[0]
639 newnode = newnodes[0]
639 moves[oldnode] = newnode
640 moves[oldnode] = newnode
640
641
641 with repo.transaction('cleanup') as tr:
642 with repo.transaction('cleanup') as tr:
642 # Move bookmarks
643 # Move bookmarks
643 bmarks = repo._bookmarks
644 bmarks = repo._bookmarks
644 bmarkchanges = []
645 bmarkchanges = []
645 allnewnodes = [n for ns in replacements.values() for n in ns]
646 allnewnodes = [n for ns in replacements.values() for n in ns]
646 for oldnode, newnode in moves.items():
647 for oldnode, newnode in moves.items():
647 oldbmarks = repo.nodebookmarks(oldnode)
648 oldbmarks = repo.nodebookmarks(oldnode)
648 if not oldbmarks:
649 if not oldbmarks:
649 continue
650 continue
650 from . import bookmarks # avoid import cycle
651 from . import bookmarks # avoid import cycle
651 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
652 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
652 (oldbmarks, hex(oldnode), hex(newnode)))
653 (oldbmarks, hex(oldnode), hex(newnode)))
653 # Delete divergent bookmarks being parents of related newnodes
654 # Delete divergent bookmarks being parents of related newnodes
654 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
655 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
655 allnewnodes, newnode, oldnode)
656 allnewnodes, newnode, oldnode)
656 deletenodes = _containsnode(repo, deleterevs)
657 deletenodes = _containsnode(repo, deleterevs)
657 for name in oldbmarks:
658 for name in oldbmarks:
658 bmarkchanges.append((name, newnode))
659 bmarkchanges.append((name, newnode))
659 for b in bookmarks.divergent2delete(repo, deletenodes, name):
660 for b in bookmarks.divergent2delete(repo, deletenodes, name):
660 bmarkchanges.append((b, None))
661 bmarkchanges.append((b, None))
661
662
662 if bmarkchanges:
663 if bmarkchanges:
663 bmarks.applychanges(repo, tr, bmarkchanges)
664 bmarks.applychanges(repo, tr, bmarkchanges)
664
665
665 # Obsolete or strip nodes
666 # Obsolete or strip nodes
666 if obsolete.isenabled(repo, obsolete.createmarkersopt):
667 if obsolete.isenabled(repo, obsolete.createmarkersopt):
667 # If a node is already obsoleted, and we want to obsolete it
668 # If a node is already obsoleted, and we want to obsolete it
668 # without a successor, skip that obssolete request since it's
669 # without a successor, skip that obssolete request since it's
669 # unnecessary. That's the "if s or not isobs(n)" check below.
670 # unnecessary. That's the "if s or not isobs(n)" check below.
670 # Also sort the node in topology order, that might be useful for
671 # Also sort the node in topology order, that might be useful for
671 # some obsstore logic.
672 # some obsstore logic.
672 # NOTE: the filtering and sorting might belong to createmarkers.
673 # NOTE: the filtering and sorting might belong to createmarkers.
673 isobs = unfi.obsstore.successors.__contains__
674 isobs = unfi.obsstore.successors.__contains__
674 torev = unfi.changelog.rev
675 torev = unfi.changelog.rev
675 sortfunc = lambda ns: torev(ns[0])
676 sortfunc = lambda ns: torev(ns[0])
676 rels = [(unfi[n], tuple(unfi[m] for m in s))
677 rels = [(unfi[n], tuple(unfi[m] for m in s))
677 for n, s in sorted(replacements.items(), key=sortfunc)
678 for n, s in sorted(replacements.items(), key=sortfunc)
678 if s or not isobs(n)]
679 if s or not isobs(n)]
679 if rels:
680 if rels:
680 obsolete.createmarkers(repo, rels, operation=operation)
681 obsolete.createmarkers(repo, rels, operation=operation)
681 else:
682 else:
682 from . import repair # avoid import cycle
683 from . import repair # avoid import cycle
683 tostrip = list(replacements)
684 tostrip = list(replacements)
684 if tostrip:
685 if tostrip:
685 repair.delayedstrip(repo.ui, repo, tostrip, operation)
686 repair.delayedstrip(repo.ui, repo, tostrip, operation)
686
687
687 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
688 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
688 if opts is None:
689 if opts is None:
689 opts = {}
690 opts = {}
690 m = matcher
691 m = matcher
691 if dry_run is None:
692 if dry_run is None:
692 dry_run = opts.get('dry_run')
693 dry_run = opts.get('dry_run')
693 if similarity is None:
694 if similarity is None:
694 similarity = float(opts.get('similarity') or 0)
695 similarity = float(opts.get('similarity') or 0)
695
696
696 ret = 0
697 ret = 0
697 join = lambda f: os.path.join(prefix, f)
698 join = lambda f: os.path.join(prefix, f)
698
699
699 wctx = repo[None]
700 wctx = repo[None]
700 for subpath in sorted(wctx.substate):
701 for subpath in sorted(wctx.substate):
701 submatch = matchmod.subdirmatcher(subpath, m)
702 submatch = matchmod.subdirmatcher(subpath, m)
702 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
703 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
703 sub = wctx.sub(subpath)
704 sub = wctx.sub(subpath)
704 try:
705 try:
705 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
706 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
706 ret = 1
707 ret = 1
707 except error.LookupError:
708 except error.LookupError:
708 repo.ui.status(_("skipping missing subrepository: %s\n")
709 repo.ui.status(_("skipping missing subrepository: %s\n")
709 % join(subpath))
710 % join(subpath))
710
711
711 rejected = []
712 rejected = []
712 def badfn(f, msg):
713 def badfn(f, msg):
713 if f in m.files():
714 if f in m.files():
714 m.bad(f, msg)
715 m.bad(f, msg)
715 rejected.append(f)
716 rejected.append(f)
716
717
717 badmatch = matchmod.badmatch(m, badfn)
718 badmatch = matchmod.badmatch(m, badfn)
718 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
719 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
719 badmatch)
720 badmatch)
720
721
721 unknownset = set(unknown + forgotten)
722 unknownset = set(unknown + forgotten)
722 toprint = unknownset.copy()
723 toprint = unknownset.copy()
723 toprint.update(deleted)
724 toprint.update(deleted)
724 for abs in sorted(toprint):
725 for abs in sorted(toprint):
725 if repo.ui.verbose or not m.exact(abs):
726 if repo.ui.verbose or not m.exact(abs):
726 if abs in unknownset:
727 if abs in unknownset:
727 status = _('adding %s\n') % m.uipath(abs)
728 status = _('adding %s\n') % m.uipath(abs)
728 else:
729 else:
729 status = _('removing %s\n') % m.uipath(abs)
730 status = _('removing %s\n') % m.uipath(abs)
730 repo.ui.status(status)
731 repo.ui.status(status)
731
732
732 renames = _findrenames(repo, m, added + unknown, removed + deleted,
733 renames = _findrenames(repo, m, added + unknown, removed + deleted,
733 similarity)
734 similarity)
734
735
735 if not dry_run:
736 if not dry_run:
736 _markchanges(repo, unknown + forgotten, deleted, renames)
737 _markchanges(repo, unknown + forgotten, deleted, renames)
737
738
738 for f in rejected:
739 for f in rejected:
739 if f in m.files():
740 if f in m.files():
740 return 1
741 return 1
741 return ret
742 return ret
742
743
743 def marktouched(repo, files, similarity=0.0):
744 def marktouched(repo, files, similarity=0.0):
744 '''Assert that files have somehow been operated upon. files are relative to
745 '''Assert that files have somehow been operated upon. files are relative to
745 the repo root.'''
746 the repo root.'''
746 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
747 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
747 rejected = []
748 rejected = []
748
749
749 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
750 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
750
751
751 if repo.ui.verbose:
752 if repo.ui.verbose:
752 unknownset = set(unknown + forgotten)
753 unknownset = set(unknown + forgotten)
753 toprint = unknownset.copy()
754 toprint = unknownset.copy()
754 toprint.update(deleted)
755 toprint.update(deleted)
755 for abs in sorted(toprint):
756 for abs in sorted(toprint):
756 if abs in unknownset:
757 if abs in unknownset:
757 status = _('adding %s\n') % abs
758 status = _('adding %s\n') % abs
758 else:
759 else:
759 status = _('removing %s\n') % abs
760 status = _('removing %s\n') % abs
760 repo.ui.status(status)
761 repo.ui.status(status)
761
762
762 renames = _findrenames(repo, m, added + unknown, removed + deleted,
763 renames = _findrenames(repo, m, added + unknown, removed + deleted,
763 similarity)
764 similarity)
764
765
765 _markchanges(repo, unknown + forgotten, deleted, renames)
766 _markchanges(repo, unknown + forgotten, deleted, renames)
766
767
767 for f in rejected:
768 for f in rejected:
768 if f in m.files():
769 if f in m.files():
769 return 1
770 return 1
770 return 0
771 return 0
771
772
772 def _interestingfiles(repo, matcher):
773 def _interestingfiles(repo, matcher):
773 '''Walk dirstate with matcher, looking for files that addremove would care
774 '''Walk dirstate with matcher, looking for files that addremove would care
774 about.
775 about.
775
776
776 This is different from dirstate.status because it doesn't care about
777 This is different from dirstate.status because it doesn't care about
777 whether files are modified or clean.'''
778 whether files are modified or clean.'''
778 added, unknown, deleted, removed, forgotten = [], [], [], [], []
779 added, unknown, deleted, removed, forgotten = [], [], [], [], []
779 audit_path = pathutil.pathauditor(repo.root, cached=True)
780 audit_path = pathutil.pathauditor(repo.root, cached=True)
780
781
781 ctx = repo[None]
782 ctx = repo[None]
782 dirstate = repo.dirstate
783 dirstate = repo.dirstate
783 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
784 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
784 unknown=True, ignored=False, full=False)
785 unknown=True, ignored=False, full=False)
785 for abs, st in walkresults.iteritems():
786 for abs, st in walkresults.iteritems():
786 dstate = dirstate[abs]
787 dstate = dirstate[abs]
787 if dstate == '?' and audit_path.check(abs):
788 if dstate == '?' and audit_path.check(abs):
788 unknown.append(abs)
789 unknown.append(abs)
789 elif dstate != 'r' and not st:
790 elif dstate != 'r' and not st:
790 deleted.append(abs)
791 deleted.append(abs)
791 elif dstate == 'r' and st:
792 elif dstate == 'r' and st:
792 forgotten.append(abs)
793 forgotten.append(abs)
793 # for finding renames
794 # for finding renames
794 elif dstate == 'r' and not st:
795 elif dstate == 'r' and not st:
795 removed.append(abs)
796 removed.append(abs)
796 elif dstate == 'a':
797 elif dstate == 'a':
797 added.append(abs)
798 added.append(abs)
798
799
799 return added, unknown, deleted, removed, forgotten
800 return added, unknown, deleted, removed, forgotten
800
801
801 def _findrenames(repo, matcher, added, removed, similarity):
802 def _findrenames(repo, matcher, added, removed, similarity):
802 '''Find renames from removed files to added ones.'''
803 '''Find renames from removed files to added ones.'''
803 renames = {}
804 renames = {}
804 if similarity > 0:
805 if similarity > 0:
805 for old, new, score in similar.findrenames(repo, added, removed,
806 for old, new, score in similar.findrenames(repo, added, removed,
806 similarity):
807 similarity):
807 if (repo.ui.verbose or not matcher.exact(old)
808 if (repo.ui.verbose or not matcher.exact(old)
808 or not matcher.exact(new)):
809 or not matcher.exact(new)):
809 repo.ui.status(_('recording removal of %s as rename to %s '
810 repo.ui.status(_('recording removal of %s as rename to %s '
810 '(%d%% similar)\n') %
811 '(%d%% similar)\n') %
811 (matcher.rel(old), matcher.rel(new),
812 (matcher.rel(old), matcher.rel(new),
812 score * 100))
813 score * 100))
813 renames[new] = old
814 renames[new] = old
814 return renames
815 return renames
815
816
816 def _markchanges(repo, unknown, deleted, renames):
817 def _markchanges(repo, unknown, deleted, renames):
817 '''Marks the files in unknown as added, the files in deleted as removed,
818 '''Marks the files in unknown as added, the files in deleted as removed,
818 and the files in renames as copied.'''
819 and the files in renames as copied.'''
819 wctx = repo[None]
820 wctx = repo[None]
820 with repo.wlock():
821 with repo.wlock():
821 wctx.forget(deleted)
822 wctx.forget(deleted)
822 wctx.add(unknown)
823 wctx.add(unknown)
823 for new, old in renames.iteritems():
824 for new, old in renames.iteritems():
824 wctx.copy(old, new)
825 wctx.copy(old, new)
825
826
826 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
827 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
827 """Update the dirstate to reflect the intent of copying src to dst. For
828 """Update the dirstate to reflect the intent of copying src to dst. For
828 different reasons it might not end with dst being marked as copied from src.
829 different reasons it might not end with dst being marked as copied from src.
829 """
830 """
830 origsrc = repo.dirstate.copied(src) or src
831 origsrc = repo.dirstate.copied(src) or src
831 if dst == origsrc: # copying back a copy?
832 if dst == origsrc: # copying back a copy?
832 if repo.dirstate[dst] not in 'mn' and not dryrun:
833 if repo.dirstate[dst] not in 'mn' and not dryrun:
833 repo.dirstate.normallookup(dst)
834 repo.dirstate.normallookup(dst)
834 else:
835 else:
835 if repo.dirstate[origsrc] == 'a' and origsrc == src:
836 if repo.dirstate[origsrc] == 'a' and origsrc == src:
836 if not ui.quiet:
837 if not ui.quiet:
837 ui.warn(_("%s has not been committed yet, so no copy "
838 ui.warn(_("%s has not been committed yet, so no copy "
838 "data will be stored for %s.\n")
839 "data will be stored for %s.\n")
839 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
840 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
840 if repo.dirstate[dst] in '?r' and not dryrun:
841 if repo.dirstate[dst] in '?r' and not dryrun:
841 wctx.add([dst])
842 wctx.add([dst])
842 elif not dryrun:
843 elif not dryrun:
843 wctx.copy(origsrc, dst)
844 wctx.copy(origsrc, dst)
844
845
845 def readrequires(opener, supported):
846 def readrequires(opener, supported):
846 '''Reads and parses .hg/requires and checks if all entries found
847 '''Reads and parses .hg/requires and checks if all entries found
847 are in the list of supported features.'''
848 are in the list of supported features.'''
848 requirements = set(opener.read("requires").splitlines())
849 requirements = set(opener.read("requires").splitlines())
849 missings = []
850 missings = []
850 for r in requirements:
851 for r in requirements:
851 if r not in supported:
852 if r not in supported:
852 if not r or not r[0].isalnum():
853 if not r or not r[0].isalnum():
853 raise error.RequirementError(_(".hg/requires file is corrupt"))
854 raise error.RequirementError(_(".hg/requires file is corrupt"))
854 missings.append(r)
855 missings.append(r)
855 missings.sort()
856 missings.sort()
856 if missings:
857 if missings:
857 raise error.RequirementError(
858 raise error.RequirementError(
858 _("repository requires features unknown to this Mercurial: %s")
859 _("repository requires features unknown to this Mercurial: %s")
859 % " ".join(missings),
860 % " ".join(missings),
860 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
861 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
861 " for more information"))
862 " for more information"))
862 return requirements
863 return requirements
863
864
864 def writerequires(opener, requirements):
865 def writerequires(opener, requirements):
865 with opener('requires', 'w') as fp:
866 with opener('requires', 'w') as fp:
866 for r in sorted(requirements):
867 for r in sorted(requirements):
867 fp.write("%s\n" % r)
868 fp.write("%s\n" % r)
868
869
869 class filecachesubentry(object):
870 class filecachesubentry(object):
870 def __init__(self, path, stat):
871 def __init__(self, path, stat):
871 self.path = path
872 self.path = path
872 self.cachestat = None
873 self.cachestat = None
873 self._cacheable = None
874 self._cacheable = None
874
875
875 if stat:
876 if stat:
876 self.cachestat = filecachesubentry.stat(self.path)
877 self.cachestat = filecachesubentry.stat(self.path)
877
878
878 if self.cachestat:
879 if self.cachestat:
879 self._cacheable = self.cachestat.cacheable()
880 self._cacheable = self.cachestat.cacheable()
880 else:
881 else:
881 # None means we don't know yet
882 # None means we don't know yet
882 self._cacheable = None
883 self._cacheable = None
883
884
884 def refresh(self):
885 def refresh(self):
885 if self.cacheable():
886 if self.cacheable():
886 self.cachestat = filecachesubentry.stat(self.path)
887 self.cachestat = filecachesubentry.stat(self.path)
887
888
888 def cacheable(self):
889 def cacheable(self):
889 if self._cacheable is not None:
890 if self._cacheable is not None:
890 return self._cacheable
891 return self._cacheable
891
892
892 # we don't know yet, assume it is for now
893 # we don't know yet, assume it is for now
893 return True
894 return True
894
895
895 def changed(self):
896 def changed(self):
896 # no point in going further if we can't cache it
897 # no point in going further if we can't cache it
897 if not self.cacheable():
898 if not self.cacheable():
898 return True
899 return True
899
900
900 newstat = filecachesubentry.stat(self.path)
901 newstat = filecachesubentry.stat(self.path)
901
902
902 # we may not know if it's cacheable yet, check again now
903 # we may not know if it's cacheable yet, check again now
903 if newstat and self._cacheable is None:
904 if newstat and self._cacheable is None:
904 self._cacheable = newstat.cacheable()
905 self._cacheable = newstat.cacheable()
905
906
906 # check again
907 # check again
907 if not self._cacheable:
908 if not self._cacheable:
908 return True
909 return True
909
910
910 if self.cachestat != newstat:
911 if self.cachestat != newstat:
911 self.cachestat = newstat
912 self.cachestat = newstat
912 return True
913 return True
913 else:
914 else:
914 return False
915 return False
915
916
916 @staticmethod
917 @staticmethod
917 def stat(path):
918 def stat(path):
918 try:
919 try:
919 return util.cachestat(path)
920 return util.cachestat(path)
920 except OSError as e:
921 except OSError as e:
921 if e.errno != errno.ENOENT:
922 if e.errno != errno.ENOENT:
922 raise
923 raise
923
924
924 class filecacheentry(object):
925 class filecacheentry(object):
925 def __init__(self, paths, stat=True):
926 def __init__(self, paths, stat=True):
926 self._entries = []
927 self._entries = []
927 for path in paths:
928 for path in paths:
928 self._entries.append(filecachesubentry(path, stat))
929 self._entries.append(filecachesubentry(path, stat))
929
930
930 def changed(self):
931 def changed(self):
931 '''true if any entry has changed'''
932 '''true if any entry has changed'''
932 for entry in self._entries:
933 for entry in self._entries:
933 if entry.changed():
934 if entry.changed():
934 return True
935 return True
935 return False
936 return False
936
937
937 def refresh(self):
938 def refresh(self):
938 for entry in self._entries:
939 for entry in self._entries:
939 entry.refresh()
940 entry.refresh()
940
941
941 class filecache(object):
942 class filecache(object):
942 '''A property like decorator that tracks files under .hg/ for updates.
943 '''A property like decorator that tracks files under .hg/ for updates.
943
944
944 Records stat info when called in _filecache.
945 Records stat info when called in _filecache.
945
946
946 On subsequent calls, compares old stat info with new info, and recreates the
947 On subsequent calls, compares old stat info with new info, and recreates the
947 object when any of the files changes, updating the new stat info in
948 object when any of the files changes, updating the new stat info in
948 _filecache.
949 _filecache.
949
950
950 Mercurial either atomic renames or appends for files under .hg,
951 Mercurial either atomic renames or appends for files under .hg,
951 so to ensure the cache is reliable we need the filesystem to be able
952 so to ensure the cache is reliable we need the filesystem to be able
952 to tell us if a file has been replaced. If it can't, we fallback to
953 to tell us if a file has been replaced. If it can't, we fallback to
953 recreating the object on every call (essentially the same behavior as
954 recreating the object on every call (essentially the same behavior as
954 propertycache).
955 propertycache).
955
956
956 '''
957 '''
957 def __init__(self, *paths):
958 def __init__(self, *paths):
958 self.paths = paths
959 self.paths = paths
959
960
960 def join(self, obj, fname):
961 def join(self, obj, fname):
961 """Used to compute the runtime path of a cached file.
962 """Used to compute the runtime path of a cached file.
962
963
963 Users should subclass filecache and provide their own version of this
964 Users should subclass filecache and provide their own version of this
964 function to call the appropriate join function on 'obj' (an instance
965 function to call the appropriate join function on 'obj' (an instance
965 of the class that its member function was decorated).
966 of the class that its member function was decorated).
966 """
967 """
967 raise NotImplementedError
968 raise NotImplementedError
968
969
969 def __call__(self, func):
970 def __call__(self, func):
970 self.func = func
971 self.func = func
971 self.name = func.__name__.encode('ascii')
972 self.name = func.__name__.encode('ascii')
972 return self
973 return self
973
974
974 def __get__(self, obj, type=None):
975 def __get__(self, obj, type=None):
975 # if accessed on the class, return the descriptor itself.
976 # if accessed on the class, return the descriptor itself.
976 if obj is None:
977 if obj is None:
977 return self
978 return self
978 # do we need to check if the file changed?
979 # do we need to check if the file changed?
979 if self.name in obj.__dict__:
980 if self.name in obj.__dict__:
980 assert self.name in obj._filecache, self.name
981 assert self.name in obj._filecache, self.name
981 return obj.__dict__[self.name]
982 return obj.__dict__[self.name]
982
983
983 entry = obj._filecache.get(self.name)
984 entry = obj._filecache.get(self.name)
984
985
985 if entry:
986 if entry:
986 if entry.changed():
987 if entry.changed():
987 entry.obj = self.func(obj)
988 entry.obj = self.func(obj)
988 else:
989 else:
989 paths = [self.join(obj, path) for path in self.paths]
990 paths = [self.join(obj, path) for path in self.paths]
990
991
991 # We stat -before- creating the object so our cache doesn't lie if
992 # We stat -before- creating the object so our cache doesn't lie if
992 # a writer modified between the time we read and stat
993 # a writer modified between the time we read and stat
993 entry = filecacheentry(paths, True)
994 entry = filecacheentry(paths, True)
994 entry.obj = self.func(obj)
995 entry.obj = self.func(obj)
995
996
996 obj._filecache[self.name] = entry
997 obj._filecache[self.name] = entry
997
998
998 obj.__dict__[self.name] = entry.obj
999 obj.__dict__[self.name] = entry.obj
999 return entry.obj
1000 return entry.obj
1000
1001
1001 def __set__(self, obj, value):
1002 def __set__(self, obj, value):
1002 if self.name not in obj._filecache:
1003 if self.name not in obj._filecache:
1003 # we add an entry for the missing value because X in __dict__
1004 # we add an entry for the missing value because X in __dict__
1004 # implies X in _filecache
1005 # implies X in _filecache
1005 paths = [self.join(obj, path) for path in self.paths]
1006 paths = [self.join(obj, path) for path in self.paths]
1006 ce = filecacheentry(paths, False)
1007 ce = filecacheentry(paths, False)
1007 obj._filecache[self.name] = ce
1008 obj._filecache[self.name] = ce
1008 else:
1009 else:
1009 ce = obj._filecache[self.name]
1010 ce = obj._filecache[self.name]
1010
1011
1011 ce.obj = value # update cached copy
1012 ce.obj = value # update cached copy
1012 obj.__dict__[self.name] = value # update copy returned by obj.x
1013 obj.__dict__[self.name] = value # update copy returned by obj.x
1013
1014
1014 def __delete__(self, obj):
1015 def __delete__(self, obj):
1015 try:
1016 try:
1016 del obj.__dict__[self.name]
1017 del obj.__dict__[self.name]
1017 except KeyError:
1018 except KeyError:
1018 raise AttributeError(self.name)
1019 raise AttributeError(self.name)
1019
1020
1020 def extdatasource(repo, source):
1021 def extdatasource(repo, source):
1021 """Gather a map of rev -> value dict from the specified source
1022 """Gather a map of rev -> value dict from the specified source
1022
1023
1023 A source spec is treated as a URL, with a special case shell: type
1024 A source spec is treated as a URL, with a special case shell: type
1024 for parsing the output from a shell command.
1025 for parsing the output from a shell command.
1025
1026
1026 The data is parsed as a series of newline-separated records where
1027 The data is parsed as a series of newline-separated records where
1027 each record is a revision specifier optionally followed by a space
1028 each record is a revision specifier optionally followed by a space
1028 and a freeform string value. If the revision is known locally, it
1029 and a freeform string value. If the revision is known locally, it
1029 is converted to a rev, otherwise the record is skipped.
1030 is converted to a rev, otherwise the record is skipped.
1030
1031
1031 Note that both key and value are treated as UTF-8 and converted to
1032 Note that both key and value are treated as UTF-8 and converted to
1032 the local encoding. This allows uniformity between local and
1033 the local encoding. This allows uniformity between local and
1033 remote data sources.
1034 remote data sources.
1034 """
1035 """
1035
1036
1036 spec = repo.ui.config("extdata", source)
1037 spec = repo.ui.config("extdata", source)
1037 if not spec:
1038 if not spec:
1038 raise error.Abort(_("unknown extdata source '%s'") % source)
1039 raise error.Abort(_("unknown extdata source '%s'") % source)
1039
1040
1040 data = {}
1041 data = {}
1041 if spec.startswith("shell:"):
1042 src = proc = None
1042 # external commands should be run relative to the repo root
1043 cmd = spec[6:]
1044 cwd = os.getcwd()
1045 os.chdir(repo.root)
1046 try:
1047 src = util.popen(cmd)
1048 finally:
1049 os.chdir(cwd)
1050 else:
1051 # treat as a URL or file
1052 src = url.open(repo.ui, spec)
1053
1054 try:
1043 try:
1044 if spec.startswith("shell:"):
1045 # external commands should be run relative to the repo root
1046 cmd = spec[6:]
1047 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1048 close_fds=util.closefds,
1049 stdout=subprocess.PIPE, cwd=repo.root)
1050 src = proc.stdout
1051 else:
1052 # treat as a URL or file
1053 src = url.open(repo.ui, spec)
1055 for l in src:
1054 for l in src:
1056 if " " in l:
1055 if " " in l:
1057 k, v = l.strip().split(" ", 1)
1056 k, v = l.strip().split(" ", 1)
1058 else:
1057 else:
1059 k, v = l.strip(), ""
1058 k, v = l.strip(), ""
1060
1059
1061 k = encoding.tolocal(k)
1060 k = encoding.tolocal(k)
1062 try:
1061 try:
1063 data[repo[k].rev()] = encoding.tolocal(v)
1062 data[repo[k].rev()] = encoding.tolocal(v)
1064 except (error.LookupError, error.RepoLookupError):
1063 except (error.LookupError, error.RepoLookupError):
1065 pass # we ignore data for nodes that don't exist locally
1064 pass # we ignore data for nodes that don't exist locally
1066 finally:
1065 finally:
1067 src.close()
1066 if proc:
1067 proc.communicate()
1068 if src:
1069 src.close()
1068
1070
1069 return data
1071 return data
1070
1072
1071 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1073 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1072 if lock is None:
1074 if lock is None:
1073 raise error.LockInheritanceContractViolation(
1075 raise error.LockInheritanceContractViolation(
1074 'lock can only be inherited while held')
1076 'lock can only be inherited while held')
1075 if environ is None:
1077 if environ is None:
1076 environ = {}
1078 environ = {}
1077 with lock.inherit() as locker:
1079 with lock.inherit() as locker:
1078 environ[envvar] = locker
1080 environ[envvar] = locker
1079 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1081 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1080
1082
1081 def wlocksub(repo, cmd, *args, **kwargs):
1083 def wlocksub(repo, cmd, *args, **kwargs):
1082 """run cmd as a subprocess that allows inheriting repo's wlock
1084 """run cmd as a subprocess that allows inheriting repo's wlock
1083
1085
1084 This can only be called while the wlock is held. This takes all the
1086 This can only be called while the wlock is held. This takes all the
1085 arguments that ui.system does, and returns the exit code of the
1087 arguments that ui.system does, and returns the exit code of the
1086 subprocess."""
1088 subprocess."""
1087 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1089 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1088 **kwargs)
1090 **kwargs)
1089
1091
1090 def gdinitconfig(ui):
1092 def gdinitconfig(ui):
1091 """helper function to know if a repo should be created as general delta
1093 """helper function to know if a repo should be created as general delta
1092 """
1094 """
1093 # experimental config: format.generaldelta
1095 # experimental config: format.generaldelta
1094 return (ui.configbool('format', 'generaldelta')
1096 return (ui.configbool('format', 'generaldelta')
1095 or ui.configbool('format', 'usegeneraldelta'))
1097 or ui.configbool('format', 'usegeneraldelta'))
1096
1098
1097 def gddeltaconfig(ui):
1099 def gddeltaconfig(ui):
1098 """helper function to know if incoming delta should be optimised
1100 """helper function to know if incoming delta should be optimised
1099 """
1101 """
1100 # experimental config: format.generaldelta
1102 # experimental config: format.generaldelta
1101 return ui.configbool('format', 'generaldelta')
1103 return ui.configbool('format', 'generaldelta')
1102
1104
1103 class simplekeyvaluefile(object):
1105 class simplekeyvaluefile(object):
1104 """A simple file with key=value lines
1106 """A simple file with key=value lines
1105
1107
1106 Keys must be alphanumerics and start with a letter, values must not
1108 Keys must be alphanumerics and start with a letter, values must not
1107 contain '\n' characters"""
1109 contain '\n' characters"""
1108 firstlinekey = '__firstline'
1110 firstlinekey = '__firstline'
1109
1111
1110 def __init__(self, vfs, path, keys=None):
1112 def __init__(self, vfs, path, keys=None):
1111 self.vfs = vfs
1113 self.vfs = vfs
1112 self.path = path
1114 self.path = path
1113
1115
1114 def read(self, firstlinenonkeyval=False):
1116 def read(self, firstlinenonkeyval=False):
1115 """Read the contents of a simple key-value file
1117 """Read the contents of a simple key-value file
1116
1118
1117 'firstlinenonkeyval' indicates whether the first line of file should
1119 'firstlinenonkeyval' indicates whether the first line of file should
1118 be treated as a key-value pair or reuturned fully under the
1120 be treated as a key-value pair or reuturned fully under the
1119 __firstline key."""
1121 __firstline key."""
1120 lines = self.vfs.readlines(self.path)
1122 lines = self.vfs.readlines(self.path)
1121 d = {}
1123 d = {}
1122 if firstlinenonkeyval:
1124 if firstlinenonkeyval:
1123 if not lines:
1125 if not lines:
1124 e = _("empty simplekeyvalue file")
1126 e = _("empty simplekeyvalue file")
1125 raise error.CorruptedState(e)
1127 raise error.CorruptedState(e)
1126 # we don't want to include '\n' in the __firstline
1128 # we don't want to include '\n' in the __firstline
1127 d[self.firstlinekey] = lines[0][:-1]
1129 d[self.firstlinekey] = lines[0][:-1]
1128 del lines[0]
1130 del lines[0]
1129
1131
1130 try:
1132 try:
1131 # the 'if line.strip()' part prevents us from failing on empty
1133 # the 'if line.strip()' part prevents us from failing on empty
1132 # lines which only contain '\n' therefore are not skipped
1134 # lines which only contain '\n' therefore are not skipped
1133 # by 'if line'
1135 # by 'if line'
1134 updatedict = dict(line[:-1].split('=', 1) for line in lines
1136 updatedict = dict(line[:-1].split('=', 1) for line in lines
1135 if line.strip())
1137 if line.strip())
1136 if self.firstlinekey in updatedict:
1138 if self.firstlinekey in updatedict:
1137 e = _("%r can't be used as a key")
1139 e = _("%r can't be used as a key")
1138 raise error.CorruptedState(e % self.firstlinekey)
1140 raise error.CorruptedState(e % self.firstlinekey)
1139 d.update(updatedict)
1141 d.update(updatedict)
1140 except ValueError as e:
1142 except ValueError as e:
1141 raise error.CorruptedState(str(e))
1143 raise error.CorruptedState(str(e))
1142 return d
1144 return d
1143
1145
1144 def write(self, data, firstline=None):
1146 def write(self, data, firstline=None):
1145 """Write key=>value mapping to a file
1147 """Write key=>value mapping to a file
1146 data is a dict. Keys must be alphanumerical and start with a letter.
1148 data is a dict. Keys must be alphanumerical and start with a letter.
1147 Values must not contain newline characters.
1149 Values must not contain newline characters.
1148
1150
1149 If 'firstline' is not None, it is written to file before
1151 If 'firstline' is not None, it is written to file before
1150 everything else, as it is, not in a key=value form"""
1152 everything else, as it is, not in a key=value form"""
1151 lines = []
1153 lines = []
1152 if firstline is not None:
1154 if firstline is not None:
1153 lines.append('%s\n' % firstline)
1155 lines.append('%s\n' % firstline)
1154
1156
1155 for k, v in data.items():
1157 for k, v in data.items():
1156 if k == self.firstlinekey:
1158 if k == self.firstlinekey:
1157 e = "key name '%s' is reserved" % self.firstlinekey
1159 e = "key name '%s' is reserved" % self.firstlinekey
1158 raise error.ProgrammingError(e)
1160 raise error.ProgrammingError(e)
1159 if not k[0].isalpha():
1161 if not k[0].isalpha():
1160 e = "keys must start with a letter in a key-value file"
1162 e = "keys must start with a letter in a key-value file"
1161 raise error.ProgrammingError(e)
1163 raise error.ProgrammingError(e)
1162 if not k.isalnum():
1164 if not k.isalnum():
1163 e = "invalid key name in a simple key-value file"
1165 e = "invalid key name in a simple key-value file"
1164 raise error.ProgrammingError(e)
1166 raise error.ProgrammingError(e)
1165 if '\n' in v:
1167 if '\n' in v:
1166 e = "invalid value in a simple key-value file"
1168 e = "invalid value in a simple key-value file"
1167 raise error.ProgrammingError(e)
1169 raise error.ProgrammingError(e)
1168 lines.append("%s=%s\n" % (k, v))
1170 lines.append("%s=%s\n" % (k, v))
1169 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1171 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1170 fp.write(''.join(lines))
1172 fp.write(''.join(lines))
1171
1173
1172 _reportobsoletedsource = [
1174 _reportobsoletedsource = [
1173 'debugobsolete',
1175 'debugobsolete',
1174 'pull',
1176 'pull',
1175 'push',
1177 'push',
1176 'serve',
1178 'serve',
1177 'unbundle',
1179 'unbundle',
1178 ]
1180 ]
1179
1181
1180 def registersummarycallback(repo, otr, txnname=''):
1182 def registersummarycallback(repo, otr, txnname=''):
1181 """register a callback to issue a summary after the transaction is closed
1183 """register a callback to issue a summary after the transaction is closed
1182 """
1184 """
1183 for source in _reportobsoletedsource:
1185 for source in _reportobsoletedsource:
1184 if txnname.startswith(source):
1186 if txnname.startswith(source):
1185 reporef = weakref.ref(repo)
1187 reporef = weakref.ref(repo)
1186 def reportsummary(tr):
1188 def reportsummary(tr):
1187 """the actual callback reporting the summary"""
1189 """the actual callback reporting the summary"""
1188 repo = reporef()
1190 repo = reporef()
1189 obsoleted = obsutil.getobsoleted(repo, tr)
1191 obsoleted = obsutil.getobsoleted(repo, tr)
1190 if obsoleted:
1192 if obsoleted:
1191 repo.ui.status(_('obsoleted %i changesets\n')
1193 repo.ui.status(_('obsoleted %i changesets\n')
1192 % len(obsoleted))
1194 % len(obsoleted))
1193 otr.addpostclose('00-txnreport', reportsummary)
1195 otr.addpostclose('00-txnreport', reportsummary)
1194 break
1196 break
General Comments 0
You need to be logged in to leave comments. Login now