##// END OF EJS Templates
extdata: add extdatasource reader...
Matt Mackall -
r34457:7757cc48 default
parent child Browse files
Show More
@@ -1,1142 +1,1193
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 hex,
20 hex,
21 nullid,
21 nullid,
22 short,
22 short,
23 wdirid,
23 wdirid,
24 wdirrev,
24 wdirrev,
25 )
25 )
26
26
27 from . import (
27 from . import (
28 encoding,
28 encoding,
29 error,
29 error,
30 match as matchmod,
30 match as matchmod,
31 obsolete,
31 obsolete,
32 obsutil,
32 obsutil,
33 pathutil,
33 pathutil,
34 phases,
34 phases,
35 pycompat,
35 pycompat,
36 revsetlang,
36 revsetlang,
37 similar,
37 similar,
38 url,
38 util,
39 util,
39 )
40 )
40
41
41 if pycompat.osname == 'nt':
42 if pycompat.osname == 'nt':
42 from . import scmwindows as scmplatform
43 from . import scmwindows as scmplatform
43 else:
44 else:
44 from . import scmposix as scmplatform
45 from . import scmposix as scmplatform
45
46
46 termsize = scmplatform.termsize
47 termsize = scmplatform.termsize
47
48
48 class status(tuple):
49 class status(tuple):
49 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
50 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
50 and 'ignored' properties are only relevant to the working copy.
51 and 'ignored' properties are only relevant to the working copy.
51 '''
52 '''
52
53
53 __slots__ = ()
54 __slots__ = ()
54
55
55 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
56 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
56 clean):
57 clean):
57 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
58 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
58 ignored, clean))
59 ignored, clean))
59
60
60 @property
61 @property
61 def modified(self):
62 def modified(self):
62 '''files that have been modified'''
63 '''files that have been modified'''
63 return self[0]
64 return self[0]
64
65
65 @property
66 @property
66 def added(self):
67 def added(self):
67 '''files that have been added'''
68 '''files that have been added'''
68 return self[1]
69 return self[1]
69
70
70 @property
71 @property
71 def removed(self):
72 def removed(self):
72 '''files that have been removed'''
73 '''files that have been removed'''
73 return self[2]
74 return self[2]
74
75
75 @property
76 @property
76 def deleted(self):
77 def deleted(self):
77 '''files that are in the dirstate, but have been deleted from the
78 '''files that are in the dirstate, but have been deleted from the
78 working copy (aka "missing")
79 working copy (aka "missing")
79 '''
80 '''
80 return self[3]
81 return self[3]
81
82
82 @property
83 @property
83 def unknown(self):
84 def unknown(self):
84 '''files not in the dirstate that are not ignored'''
85 '''files not in the dirstate that are not ignored'''
85 return self[4]
86 return self[4]
86
87
87 @property
88 @property
88 def ignored(self):
89 def ignored(self):
89 '''files not in the dirstate that are ignored (by _dirignore())'''
90 '''files not in the dirstate that are ignored (by _dirignore())'''
90 return self[5]
91 return self[5]
91
92
92 @property
93 @property
93 def clean(self):
94 def clean(self):
94 '''files that have not been modified'''
95 '''files that have not been modified'''
95 return self[6]
96 return self[6]
96
97
97 def __repr__(self, *args, **kwargs):
98 def __repr__(self, *args, **kwargs):
98 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
99 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
99 'unknown=%r, ignored=%r, clean=%r>') % self)
100 'unknown=%r, ignored=%r, clean=%r>') % self)
100
101
101 def itersubrepos(ctx1, ctx2):
102 def itersubrepos(ctx1, ctx2):
102 """find subrepos in ctx1 or ctx2"""
103 """find subrepos in ctx1 or ctx2"""
103 # Create a (subpath, ctx) mapping where we prefer subpaths from
104 # Create a (subpath, ctx) mapping where we prefer subpaths from
104 # ctx1. The subpaths from ctx2 are important when the .hgsub file
105 # ctx1. The subpaths from ctx2 are important when the .hgsub file
105 # has been modified (in ctx2) but not yet committed (in ctx1).
106 # has been modified (in ctx2) but not yet committed (in ctx1).
106 subpaths = dict.fromkeys(ctx2.substate, ctx2)
107 subpaths = dict.fromkeys(ctx2.substate, ctx2)
107 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
108 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
108
109
109 missing = set()
110 missing = set()
110
111
111 for subpath in ctx2.substate:
112 for subpath in ctx2.substate:
112 if subpath not in ctx1.substate:
113 if subpath not in ctx1.substate:
113 del subpaths[subpath]
114 del subpaths[subpath]
114 missing.add(subpath)
115 missing.add(subpath)
115
116
116 for subpath, ctx in sorted(subpaths.iteritems()):
117 for subpath, ctx in sorted(subpaths.iteritems()):
117 yield subpath, ctx.sub(subpath)
118 yield subpath, ctx.sub(subpath)
118
119
119 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
120 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
120 # status and diff will have an accurate result when it does
121 # status and diff will have an accurate result when it does
121 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
122 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
122 # against itself.
123 # against itself.
123 for subpath in missing:
124 for subpath in missing:
124 yield subpath, ctx2.nullsub(subpath, ctx1)
125 yield subpath, ctx2.nullsub(subpath, ctx1)
125
126
126 def nochangesfound(ui, repo, excluded=None):
127 def nochangesfound(ui, repo, excluded=None):
127 '''Report no changes for push/pull, excluded is None or a list of
128 '''Report no changes for push/pull, excluded is None or a list of
128 nodes excluded from the push/pull.
129 nodes excluded from the push/pull.
129 '''
130 '''
130 secretlist = []
131 secretlist = []
131 if excluded:
132 if excluded:
132 for n in excluded:
133 for n in excluded:
133 ctx = repo[n]
134 ctx = repo[n]
134 if ctx.phase() >= phases.secret and not ctx.extinct():
135 if ctx.phase() >= phases.secret and not ctx.extinct():
135 secretlist.append(n)
136 secretlist.append(n)
136
137
137 if secretlist:
138 if secretlist:
138 ui.status(_("no changes found (ignored %d secret changesets)\n")
139 ui.status(_("no changes found (ignored %d secret changesets)\n")
139 % len(secretlist))
140 % len(secretlist))
140 else:
141 else:
141 ui.status(_("no changes found\n"))
142 ui.status(_("no changes found\n"))
142
143
143 def callcatch(ui, func):
144 def callcatch(ui, func):
144 """call func() with global exception handling
145 """call func() with global exception handling
145
146
146 return func() if no exception happens. otherwise do some error handling
147 return func() if no exception happens. otherwise do some error handling
147 and return an exit code accordingly. does not handle all exceptions.
148 and return an exit code accordingly. does not handle all exceptions.
148 """
149 """
149 try:
150 try:
150 try:
151 try:
151 return func()
152 return func()
152 except: # re-raises
153 except: # re-raises
153 ui.traceback()
154 ui.traceback()
154 raise
155 raise
155 # Global exception handling, alphabetically
156 # Global exception handling, alphabetically
156 # Mercurial-specific first, followed by built-in and library exceptions
157 # Mercurial-specific first, followed by built-in and library exceptions
157 except error.LockHeld as inst:
158 except error.LockHeld as inst:
158 if inst.errno == errno.ETIMEDOUT:
159 if inst.errno == errno.ETIMEDOUT:
159 reason = _('timed out waiting for lock held by %r') % inst.locker
160 reason = _('timed out waiting for lock held by %r') % inst.locker
160 else:
161 else:
161 reason = _('lock held by %r') % inst.locker
162 reason = _('lock held by %r') % inst.locker
162 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
163 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
163 if not inst.locker:
164 if not inst.locker:
164 ui.warn(_("(lock might be very busy)\n"))
165 ui.warn(_("(lock might be very busy)\n"))
165 except error.LockUnavailable as inst:
166 except error.LockUnavailable as inst:
166 ui.warn(_("abort: could not lock %s: %s\n") %
167 ui.warn(_("abort: could not lock %s: %s\n") %
167 (inst.desc or inst.filename,
168 (inst.desc or inst.filename,
168 encoding.strtolocal(inst.strerror)))
169 encoding.strtolocal(inst.strerror)))
169 except error.OutOfBandError as inst:
170 except error.OutOfBandError as inst:
170 if inst.args:
171 if inst.args:
171 msg = _("abort: remote error:\n")
172 msg = _("abort: remote error:\n")
172 else:
173 else:
173 msg = _("abort: remote error\n")
174 msg = _("abort: remote error\n")
174 ui.warn(msg)
175 ui.warn(msg)
175 if inst.args:
176 if inst.args:
176 ui.warn(''.join(inst.args))
177 ui.warn(''.join(inst.args))
177 if inst.hint:
178 if inst.hint:
178 ui.warn('(%s)\n' % inst.hint)
179 ui.warn('(%s)\n' % inst.hint)
179 except error.RepoError as inst:
180 except error.RepoError as inst:
180 ui.warn(_("abort: %s!\n") % inst)
181 ui.warn(_("abort: %s!\n") % inst)
181 if inst.hint:
182 if inst.hint:
182 ui.warn(_("(%s)\n") % inst.hint)
183 ui.warn(_("(%s)\n") % inst.hint)
183 except error.ResponseError as inst:
184 except error.ResponseError as inst:
184 ui.warn(_("abort: %s") % inst.args[0])
185 ui.warn(_("abort: %s") % inst.args[0])
185 if not isinstance(inst.args[1], basestring):
186 if not isinstance(inst.args[1], basestring):
186 ui.warn(" %r\n" % (inst.args[1],))
187 ui.warn(" %r\n" % (inst.args[1],))
187 elif not inst.args[1]:
188 elif not inst.args[1]:
188 ui.warn(_(" empty string\n"))
189 ui.warn(_(" empty string\n"))
189 else:
190 else:
190 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
191 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
191 except error.CensoredNodeError as inst:
192 except error.CensoredNodeError as inst:
192 ui.warn(_("abort: file censored %s!\n") % inst)
193 ui.warn(_("abort: file censored %s!\n") % inst)
193 except error.RevlogError as inst:
194 except error.RevlogError as inst:
194 ui.warn(_("abort: %s!\n") % inst)
195 ui.warn(_("abort: %s!\n") % inst)
195 except error.InterventionRequired as inst:
196 except error.InterventionRequired as inst:
196 ui.warn("%s\n" % inst)
197 ui.warn("%s\n" % inst)
197 if inst.hint:
198 if inst.hint:
198 ui.warn(_("(%s)\n") % inst.hint)
199 ui.warn(_("(%s)\n") % inst.hint)
199 return 1
200 return 1
200 except error.WdirUnsupported:
201 except error.WdirUnsupported:
201 ui.warn(_("abort: working directory revision cannot be specified\n"))
202 ui.warn(_("abort: working directory revision cannot be specified\n"))
202 except error.Abort as inst:
203 except error.Abort as inst:
203 ui.warn(_("abort: %s\n") % inst)
204 ui.warn(_("abort: %s\n") % inst)
204 if inst.hint:
205 if inst.hint:
205 ui.warn(_("(%s)\n") % inst.hint)
206 ui.warn(_("(%s)\n") % inst.hint)
206 except ImportError as inst:
207 except ImportError as inst:
207 ui.warn(_("abort: %s!\n") % inst)
208 ui.warn(_("abort: %s!\n") % inst)
208 m = str(inst).split()[-1]
209 m = str(inst).split()[-1]
209 if m in "mpatch bdiff".split():
210 if m in "mpatch bdiff".split():
210 ui.warn(_("(did you forget to compile extensions?)\n"))
211 ui.warn(_("(did you forget to compile extensions?)\n"))
211 elif m in "zlib".split():
212 elif m in "zlib".split():
212 ui.warn(_("(is your Python install correct?)\n"))
213 ui.warn(_("(is your Python install correct?)\n"))
213 except IOError as inst:
214 except IOError as inst:
214 if util.safehasattr(inst, "code"):
215 if util.safehasattr(inst, "code"):
215 ui.warn(_("abort: %s\n") % inst)
216 ui.warn(_("abort: %s\n") % inst)
216 elif util.safehasattr(inst, "reason"):
217 elif util.safehasattr(inst, "reason"):
217 try: # usually it is in the form (errno, strerror)
218 try: # usually it is in the form (errno, strerror)
218 reason = inst.reason.args[1]
219 reason = inst.reason.args[1]
219 except (AttributeError, IndexError):
220 except (AttributeError, IndexError):
220 # it might be anything, for example a string
221 # it might be anything, for example a string
221 reason = inst.reason
222 reason = inst.reason
222 if isinstance(reason, unicode):
223 if isinstance(reason, unicode):
223 # SSLError of Python 2.7.9 contains a unicode
224 # SSLError of Python 2.7.9 contains a unicode
224 reason = encoding.unitolocal(reason)
225 reason = encoding.unitolocal(reason)
225 ui.warn(_("abort: error: %s\n") % reason)
226 ui.warn(_("abort: error: %s\n") % reason)
226 elif (util.safehasattr(inst, "args")
227 elif (util.safehasattr(inst, "args")
227 and inst.args and inst.args[0] == errno.EPIPE):
228 and inst.args and inst.args[0] == errno.EPIPE):
228 pass
229 pass
229 elif getattr(inst, "strerror", None):
230 elif getattr(inst, "strerror", None):
230 if getattr(inst, "filename", None):
231 if getattr(inst, "filename", None):
231 ui.warn(_("abort: %s: %s\n") % (
232 ui.warn(_("abort: %s: %s\n") % (
232 encoding.strtolocal(inst.strerror), inst.filename))
233 encoding.strtolocal(inst.strerror), inst.filename))
233 else:
234 else:
234 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
235 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
235 else:
236 else:
236 raise
237 raise
237 except OSError as inst:
238 except OSError as inst:
238 if getattr(inst, "filename", None) is not None:
239 if getattr(inst, "filename", None) is not None:
239 ui.warn(_("abort: %s: '%s'\n") % (
240 ui.warn(_("abort: %s: '%s'\n") % (
240 encoding.strtolocal(inst.strerror), inst.filename))
241 encoding.strtolocal(inst.strerror), inst.filename))
241 else:
242 else:
242 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
243 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
243 except MemoryError:
244 except MemoryError:
244 ui.warn(_("abort: out of memory\n"))
245 ui.warn(_("abort: out of memory\n"))
245 except SystemExit as inst:
246 except SystemExit as inst:
246 # Commands shouldn't sys.exit directly, but give a return code.
247 # Commands shouldn't sys.exit directly, but give a return code.
247 # Just in case catch this and and pass exit code to caller.
248 # Just in case catch this and and pass exit code to caller.
248 return inst.code
249 return inst.code
249 except socket.error as inst:
250 except socket.error as inst:
250 ui.warn(_("abort: %s\n") % inst.args[-1])
251 ui.warn(_("abort: %s\n") % inst.args[-1])
251
252
252 return -1
253 return -1
253
254
254 def checknewlabel(repo, lbl, kind):
255 def checknewlabel(repo, lbl, kind):
255 # Do not use the "kind" parameter in ui output.
256 # Do not use the "kind" parameter in ui output.
256 # It makes strings difficult to translate.
257 # It makes strings difficult to translate.
257 if lbl in ['tip', '.', 'null']:
258 if lbl in ['tip', '.', 'null']:
258 raise error.Abort(_("the name '%s' is reserved") % lbl)
259 raise error.Abort(_("the name '%s' is reserved") % lbl)
259 for c in (':', '\0', '\n', '\r'):
260 for c in (':', '\0', '\n', '\r'):
260 if c in lbl:
261 if c in lbl:
261 raise error.Abort(_("%r cannot be used in a name") % c)
262 raise error.Abort(_("%r cannot be used in a name") % c)
262 try:
263 try:
263 int(lbl)
264 int(lbl)
264 raise error.Abort(_("cannot use an integer as a name"))
265 raise error.Abort(_("cannot use an integer as a name"))
265 except ValueError:
266 except ValueError:
266 pass
267 pass
267
268
268 def checkfilename(f):
269 def checkfilename(f):
269 '''Check that the filename f is an acceptable filename for a tracked file'''
270 '''Check that the filename f is an acceptable filename for a tracked file'''
270 if '\r' in f or '\n' in f:
271 if '\r' in f or '\n' in f:
271 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
272 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
272
273
273 def checkportable(ui, f):
274 def checkportable(ui, f):
274 '''Check if filename f is portable and warn or abort depending on config'''
275 '''Check if filename f is portable and warn or abort depending on config'''
275 checkfilename(f)
276 checkfilename(f)
276 abort, warn = checkportabilityalert(ui)
277 abort, warn = checkportabilityalert(ui)
277 if abort or warn:
278 if abort or warn:
278 msg = util.checkwinfilename(f)
279 msg = util.checkwinfilename(f)
279 if msg:
280 if msg:
280 msg = "%s: %s" % (msg, util.shellquote(f))
281 msg = "%s: %s" % (msg, util.shellquote(f))
281 if abort:
282 if abort:
282 raise error.Abort(msg)
283 raise error.Abort(msg)
283 ui.warn(_("warning: %s\n") % msg)
284 ui.warn(_("warning: %s\n") % msg)
284
285
285 def checkportabilityalert(ui):
286 def checkportabilityalert(ui):
286 '''check if the user's config requests nothing, a warning, or abort for
287 '''check if the user's config requests nothing, a warning, or abort for
287 non-portable filenames'''
288 non-portable filenames'''
288 val = ui.config('ui', 'portablefilenames')
289 val = ui.config('ui', 'portablefilenames')
289 lval = val.lower()
290 lval = val.lower()
290 bval = util.parsebool(val)
291 bval = util.parsebool(val)
291 abort = pycompat.osname == 'nt' or lval == 'abort'
292 abort = pycompat.osname == 'nt' or lval == 'abort'
292 warn = bval or lval == 'warn'
293 warn = bval or lval == 'warn'
293 if bval is None and not (warn or abort or lval == 'ignore'):
294 if bval is None and not (warn or abort or lval == 'ignore'):
294 raise error.ConfigError(
295 raise error.ConfigError(
295 _("ui.portablefilenames value is invalid ('%s')") % val)
296 _("ui.portablefilenames value is invalid ('%s')") % val)
296 return abort, warn
297 return abort, warn
297
298
298 class casecollisionauditor(object):
299 class casecollisionauditor(object):
299 def __init__(self, ui, abort, dirstate):
300 def __init__(self, ui, abort, dirstate):
300 self._ui = ui
301 self._ui = ui
301 self._abort = abort
302 self._abort = abort
302 allfiles = '\0'.join(dirstate._map)
303 allfiles = '\0'.join(dirstate._map)
303 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
304 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
304 self._dirstate = dirstate
305 self._dirstate = dirstate
305 # The purpose of _newfiles is so that we don't complain about
306 # The purpose of _newfiles is so that we don't complain about
306 # case collisions if someone were to call this object with the
307 # case collisions if someone were to call this object with the
307 # same filename twice.
308 # same filename twice.
308 self._newfiles = set()
309 self._newfiles = set()
309
310
310 def __call__(self, f):
311 def __call__(self, f):
311 if f in self._newfiles:
312 if f in self._newfiles:
312 return
313 return
313 fl = encoding.lower(f)
314 fl = encoding.lower(f)
314 if fl in self._loweredfiles and f not in self._dirstate:
315 if fl in self._loweredfiles and f not in self._dirstate:
315 msg = _('possible case-folding collision for %s') % f
316 msg = _('possible case-folding collision for %s') % f
316 if self._abort:
317 if self._abort:
317 raise error.Abort(msg)
318 raise error.Abort(msg)
318 self._ui.warn(_("warning: %s\n") % msg)
319 self._ui.warn(_("warning: %s\n") % msg)
319 self._loweredfiles.add(fl)
320 self._loweredfiles.add(fl)
320 self._newfiles.add(f)
321 self._newfiles.add(f)
321
322
322 def filteredhash(repo, maxrev):
323 def filteredhash(repo, maxrev):
323 """build hash of filtered revisions in the current repoview.
324 """build hash of filtered revisions in the current repoview.
324
325
325 Multiple caches perform up-to-date validation by checking that the
326 Multiple caches perform up-to-date validation by checking that the
326 tiprev and tipnode stored in the cache file match the current repository.
327 tiprev and tipnode stored in the cache file match the current repository.
327 However, this is not sufficient for validating repoviews because the set
328 However, this is not sufficient for validating repoviews because the set
328 of revisions in the view may change without the repository tiprev and
329 of revisions in the view may change without the repository tiprev and
329 tipnode changing.
330 tipnode changing.
330
331
331 This function hashes all the revs filtered from the view and returns
332 This function hashes all the revs filtered from the view and returns
332 that SHA-1 digest.
333 that SHA-1 digest.
333 """
334 """
334 cl = repo.changelog
335 cl = repo.changelog
335 if not cl.filteredrevs:
336 if not cl.filteredrevs:
336 return None
337 return None
337 key = None
338 key = None
338 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
339 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
339 if revs:
340 if revs:
340 s = hashlib.sha1()
341 s = hashlib.sha1()
341 for rev in revs:
342 for rev in revs:
342 s.update('%d;' % rev)
343 s.update('%d;' % rev)
343 key = s.digest()
344 key = s.digest()
344 return key
345 return key
345
346
346 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
347 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
347 '''yield every hg repository under path, always recursively.
348 '''yield every hg repository under path, always recursively.
348 The recurse flag will only control recursion into repo working dirs'''
349 The recurse flag will only control recursion into repo working dirs'''
349 def errhandler(err):
350 def errhandler(err):
350 if err.filename == path:
351 if err.filename == path:
351 raise err
352 raise err
352 samestat = getattr(os.path, 'samestat', None)
353 samestat = getattr(os.path, 'samestat', None)
353 if followsym and samestat is not None:
354 if followsym and samestat is not None:
354 def adddir(dirlst, dirname):
355 def adddir(dirlst, dirname):
355 match = False
356 match = False
356 dirstat = os.stat(dirname)
357 dirstat = os.stat(dirname)
357 for lstdirstat in dirlst:
358 for lstdirstat in dirlst:
358 if samestat(dirstat, lstdirstat):
359 if samestat(dirstat, lstdirstat):
359 match = True
360 match = True
360 break
361 break
361 if not match:
362 if not match:
362 dirlst.append(dirstat)
363 dirlst.append(dirstat)
363 return not match
364 return not match
364 else:
365 else:
365 followsym = False
366 followsym = False
366
367
367 if (seen_dirs is None) and followsym:
368 if (seen_dirs is None) and followsym:
368 seen_dirs = []
369 seen_dirs = []
369 adddir(seen_dirs, path)
370 adddir(seen_dirs, path)
370 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
371 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
371 dirs.sort()
372 dirs.sort()
372 if '.hg' in dirs:
373 if '.hg' in dirs:
373 yield root # found a repository
374 yield root # found a repository
374 qroot = os.path.join(root, '.hg', 'patches')
375 qroot = os.path.join(root, '.hg', 'patches')
375 if os.path.isdir(os.path.join(qroot, '.hg')):
376 if os.path.isdir(os.path.join(qroot, '.hg')):
376 yield qroot # we have a patch queue repo here
377 yield qroot # we have a patch queue repo here
377 if recurse:
378 if recurse:
378 # avoid recursing inside the .hg directory
379 # avoid recursing inside the .hg directory
379 dirs.remove('.hg')
380 dirs.remove('.hg')
380 else:
381 else:
381 dirs[:] = [] # don't descend further
382 dirs[:] = [] # don't descend further
382 elif followsym:
383 elif followsym:
383 newdirs = []
384 newdirs = []
384 for d in dirs:
385 for d in dirs:
385 fname = os.path.join(root, d)
386 fname = os.path.join(root, d)
386 if adddir(seen_dirs, fname):
387 if adddir(seen_dirs, fname):
387 if os.path.islink(fname):
388 if os.path.islink(fname):
388 for hgname in walkrepos(fname, True, seen_dirs):
389 for hgname in walkrepos(fname, True, seen_dirs):
389 yield hgname
390 yield hgname
390 else:
391 else:
391 newdirs.append(d)
392 newdirs.append(d)
392 dirs[:] = newdirs
393 dirs[:] = newdirs
393
394
394 def binnode(ctx):
395 def binnode(ctx):
395 """Return binary node id for a given basectx"""
396 """Return binary node id for a given basectx"""
396 node = ctx.node()
397 node = ctx.node()
397 if node is None:
398 if node is None:
398 return wdirid
399 return wdirid
399 return node
400 return node
400
401
401 def intrev(ctx):
402 def intrev(ctx):
402 """Return integer for a given basectx that can be used in comparison or
403 """Return integer for a given basectx that can be used in comparison or
403 arithmetic operation"""
404 arithmetic operation"""
404 rev = ctx.rev()
405 rev = ctx.rev()
405 if rev is None:
406 if rev is None:
406 return wdirrev
407 return wdirrev
407 return rev
408 return rev
408
409
409 def formatchangeid(ctx):
410 def formatchangeid(ctx):
410 """Format changectx as '{rev}:{node|formatnode}', which is the default
411 """Format changectx as '{rev}:{node|formatnode}', which is the default
411 template provided by cmdutil.changeset_templater"""
412 template provided by cmdutil.changeset_templater"""
412 repo = ctx.repo()
413 repo = ctx.repo()
413 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
414 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
414
415
415 def formatrevnode(ui, rev, node):
416 def formatrevnode(ui, rev, node):
416 """Format given revision and node depending on the current verbosity"""
417 """Format given revision and node depending on the current verbosity"""
417 if ui.debugflag:
418 if ui.debugflag:
418 hexfunc = hex
419 hexfunc = hex
419 else:
420 else:
420 hexfunc = short
421 hexfunc = short
421 return '%d:%s' % (rev, hexfunc(node))
422 return '%d:%s' % (rev, hexfunc(node))
422
423
423 def revsingle(repo, revspec, default='.', localalias=None):
424 def revsingle(repo, revspec, default='.', localalias=None):
424 if not revspec and revspec != 0:
425 if not revspec and revspec != 0:
425 return repo[default]
426 return repo[default]
426
427
427 l = revrange(repo, [revspec], localalias=localalias)
428 l = revrange(repo, [revspec], localalias=localalias)
428 if not l:
429 if not l:
429 raise error.Abort(_('empty revision set'))
430 raise error.Abort(_('empty revision set'))
430 return repo[l.last()]
431 return repo[l.last()]
431
432
432 def _pairspec(revspec):
433 def _pairspec(revspec):
433 tree = revsetlang.parse(revspec)
434 tree = revsetlang.parse(revspec)
434 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
435 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
435
436
436 def revpair(repo, revs):
437 def revpair(repo, revs):
437 if not revs:
438 if not revs:
438 return repo.dirstate.p1(), None
439 return repo.dirstate.p1(), None
439
440
440 l = revrange(repo, revs)
441 l = revrange(repo, revs)
441
442
442 if not l:
443 if not l:
443 first = second = None
444 first = second = None
444 elif l.isascending():
445 elif l.isascending():
445 first = l.min()
446 first = l.min()
446 second = l.max()
447 second = l.max()
447 elif l.isdescending():
448 elif l.isdescending():
448 first = l.max()
449 first = l.max()
449 second = l.min()
450 second = l.min()
450 else:
451 else:
451 first = l.first()
452 first = l.first()
452 second = l.last()
453 second = l.last()
453
454
454 if first is None:
455 if first is None:
455 raise error.Abort(_('empty revision range'))
456 raise error.Abort(_('empty revision range'))
456 if (first == second and len(revs) >= 2
457 if (first == second and len(revs) >= 2
457 and not all(revrange(repo, [r]) for r in revs)):
458 and not all(revrange(repo, [r]) for r in revs)):
458 raise error.Abort(_('empty revision on one side of range'))
459 raise error.Abort(_('empty revision on one side of range'))
459
460
460 # if top-level is range expression, the result must always be a pair
461 # if top-level is range expression, the result must always be a pair
461 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
462 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
462 return repo.lookup(first), None
463 return repo.lookup(first), None
463
464
464 return repo.lookup(first), repo.lookup(second)
465 return repo.lookup(first), repo.lookup(second)
465
466
466 def revrange(repo, specs, localalias=None):
467 def revrange(repo, specs, localalias=None):
467 """Execute 1 to many revsets and return the union.
468 """Execute 1 to many revsets and return the union.
468
469
469 This is the preferred mechanism for executing revsets using user-specified
470 This is the preferred mechanism for executing revsets using user-specified
470 config options, such as revset aliases.
471 config options, such as revset aliases.
471
472
472 The revsets specified by ``specs`` will be executed via a chained ``OR``
473 The revsets specified by ``specs`` will be executed via a chained ``OR``
473 expression. If ``specs`` is empty, an empty result is returned.
474 expression. If ``specs`` is empty, an empty result is returned.
474
475
475 ``specs`` can contain integers, in which case they are assumed to be
476 ``specs`` can contain integers, in which case they are assumed to be
476 revision numbers.
477 revision numbers.
477
478
478 It is assumed the revsets are already formatted. If you have arguments
479 It is assumed the revsets are already formatted. If you have arguments
479 that need to be expanded in the revset, call ``revsetlang.formatspec()``
480 that need to be expanded in the revset, call ``revsetlang.formatspec()``
480 and pass the result as an element of ``specs``.
481 and pass the result as an element of ``specs``.
481
482
482 Specifying a single revset is allowed.
483 Specifying a single revset is allowed.
483
484
484 Returns a ``revset.abstractsmartset`` which is a list-like interface over
485 Returns a ``revset.abstractsmartset`` which is a list-like interface over
485 integer revisions.
486 integer revisions.
486 """
487 """
487 allspecs = []
488 allspecs = []
488 for spec in specs:
489 for spec in specs:
489 if isinstance(spec, int):
490 if isinstance(spec, int):
490 spec = revsetlang.formatspec('rev(%d)', spec)
491 spec = revsetlang.formatspec('rev(%d)', spec)
491 allspecs.append(spec)
492 allspecs.append(spec)
492 return repo.anyrevs(allspecs, user=True, localalias=localalias)
493 return repo.anyrevs(allspecs, user=True, localalias=localalias)
493
494
494 def meaningfulparents(repo, ctx):
495 def meaningfulparents(repo, ctx):
495 """Return list of meaningful (or all if debug) parentrevs for rev.
496 """Return list of meaningful (or all if debug) parentrevs for rev.
496
497
497 For merges (two non-nullrev revisions) both parents are meaningful.
498 For merges (two non-nullrev revisions) both parents are meaningful.
498 Otherwise the first parent revision is considered meaningful if it
499 Otherwise the first parent revision is considered meaningful if it
499 is not the preceding revision.
500 is not the preceding revision.
500 """
501 """
501 parents = ctx.parents()
502 parents = ctx.parents()
502 if len(parents) > 1:
503 if len(parents) > 1:
503 return parents
504 return parents
504 if repo.ui.debugflag:
505 if repo.ui.debugflag:
505 return [parents[0], repo['null']]
506 return [parents[0], repo['null']]
506 if parents[0].rev() >= intrev(ctx) - 1:
507 if parents[0].rev() >= intrev(ctx) - 1:
507 return []
508 return []
508 return parents
509 return parents
509
510
510 def expandpats(pats):
511 def expandpats(pats):
511 '''Expand bare globs when running on windows.
512 '''Expand bare globs when running on windows.
512 On posix we assume it already has already been done by sh.'''
513 On posix we assume it already has already been done by sh.'''
513 if not util.expandglobs:
514 if not util.expandglobs:
514 return list(pats)
515 return list(pats)
515 ret = []
516 ret = []
516 for kindpat in pats:
517 for kindpat in pats:
517 kind, pat = matchmod._patsplit(kindpat, None)
518 kind, pat = matchmod._patsplit(kindpat, None)
518 if kind is None:
519 if kind is None:
519 try:
520 try:
520 globbed = glob.glob(pat)
521 globbed = glob.glob(pat)
521 except re.error:
522 except re.error:
522 globbed = [pat]
523 globbed = [pat]
523 if globbed:
524 if globbed:
524 ret.extend(globbed)
525 ret.extend(globbed)
525 continue
526 continue
526 ret.append(kindpat)
527 ret.append(kindpat)
527 return ret
528 return ret
528
529
529 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
530 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
530 badfn=None):
531 badfn=None):
531 '''Return a matcher and the patterns that were used.
532 '''Return a matcher and the patterns that were used.
532 The matcher will warn about bad matches, unless an alternate badfn callback
533 The matcher will warn about bad matches, unless an alternate badfn callback
533 is provided.'''
534 is provided.'''
534 if pats == ("",):
535 if pats == ("",):
535 pats = []
536 pats = []
536 if opts is None:
537 if opts is None:
537 opts = {}
538 opts = {}
538 if not globbed and default == 'relpath':
539 if not globbed and default == 'relpath':
539 pats = expandpats(pats or [])
540 pats = expandpats(pats or [])
540
541
541 def bad(f, msg):
542 def bad(f, msg):
542 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
543 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
543
544
544 if badfn is None:
545 if badfn is None:
545 badfn = bad
546 badfn = bad
546
547
547 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
548 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
548 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
549 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
549
550
550 if m.always():
551 if m.always():
551 pats = []
552 pats = []
552 return m, pats
553 return m, pats
553
554
554 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
555 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
555 badfn=None):
556 badfn=None):
556 '''Return a matcher that will warn about bad matches.'''
557 '''Return a matcher that will warn about bad matches.'''
557 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
558 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
558
559
559 def matchall(repo):
560 def matchall(repo):
560 '''Return a matcher that will efficiently match everything.'''
561 '''Return a matcher that will efficiently match everything.'''
561 return matchmod.always(repo.root, repo.getcwd())
562 return matchmod.always(repo.root, repo.getcwd())
562
563
563 def matchfiles(repo, files, badfn=None):
564 def matchfiles(repo, files, badfn=None):
564 '''Return a matcher that will efficiently match exactly these files.'''
565 '''Return a matcher that will efficiently match exactly these files.'''
565 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
566 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
566
567
567 def origpath(ui, repo, filepath):
568 def origpath(ui, repo, filepath):
568 '''customize where .orig files are created
569 '''customize where .orig files are created
569
570
570 Fetch user defined path from config file: [ui] origbackuppath = <path>
571 Fetch user defined path from config file: [ui] origbackuppath = <path>
571 Fall back to default (filepath with .orig suffix) if not specified
572 Fall back to default (filepath with .orig suffix) if not specified
572 '''
573 '''
573 origbackuppath = ui.config('ui', 'origbackuppath')
574 origbackuppath = ui.config('ui', 'origbackuppath')
574 if origbackuppath is None:
575 if origbackuppath is None:
575 return filepath + ".orig"
576 return filepath + ".orig"
576
577
577 filepathfromroot = os.path.relpath(filepath, start=repo.root)
578 filepathfromroot = os.path.relpath(filepath, start=repo.root)
578 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
579 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
579
580
580 origbackupdir = repo.vfs.dirname(fullorigpath)
581 origbackupdir = repo.vfs.dirname(fullorigpath)
581 if not repo.vfs.exists(origbackupdir):
582 if not repo.vfs.exists(origbackupdir):
582 ui.note(_('creating directory: %s\n') % origbackupdir)
583 ui.note(_('creating directory: %s\n') % origbackupdir)
583 util.makedirs(origbackupdir)
584 util.makedirs(origbackupdir)
584
585
585 return fullorigpath
586 return fullorigpath
586
587
587 class _containsnode(object):
588 class _containsnode(object):
588 """proxy __contains__(node) to container.__contains__ which accepts revs"""
589 """proxy __contains__(node) to container.__contains__ which accepts revs"""
589
590
590 def __init__(self, repo, revcontainer):
591 def __init__(self, repo, revcontainer):
591 self._torev = repo.changelog.rev
592 self._torev = repo.changelog.rev
592 self._revcontains = revcontainer.__contains__
593 self._revcontains = revcontainer.__contains__
593
594
594 def __contains__(self, node):
595 def __contains__(self, node):
595 return self._revcontains(self._torev(node))
596 return self._revcontains(self._torev(node))
596
597
597 def cleanupnodes(repo, replacements, operation, moves=None):
598 def cleanupnodes(repo, replacements, operation, moves=None):
598 """do common cleanups when old nodes are replaced by new nodes
599 """do common cleanups when old nodes are replaced by new nodes
599
600
600 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
601 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
601 (we might also want to move working directory parent in the future)
602 (we might also want to move working directory parent in the future)
602
603
603 By default, bookmark moves are calculated automatically from 'replacements',
604 By default, bookmark moves are calculated automatically from 'replacements',
604 but 'moves' can be used to override that. Also, 'moves' may include
605 but 'moves' can be used to override that. Also, 'moves' may include
605 additional bookmark moves that should not have associated obsmarkers.
606 additional bookmark moves that should not have associated obsmarkers.
606
607
607 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
608 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
608 have replacements. operation is a string, like "rebase".
609 have replacements. operation is a string, like "rebase".
609 """
610 """
610 if not replacements and not moves:
611 if not replacements and not moves:
611 return
612 return
612
613
613 # translate mapping's other forms
614 # translate mapping's other forms
614 if not util.safehasattr(replacements, 'items'):
615 if not util.safehasattr(replacements, 'items'):
615 replacements = {n: () for n in replacements}
616 replacements = {n: () for n in replacements}
616
617
617 # Calculate bookmark movements
618 # Calculate bookmark movements
618 if moves is None:
619 if moves is None:
619 moves = {}
620 moves = {}
620 # Unfiltered repo is needed since nodes in replacements might be hidden.
621 # Unfiltered repo is needed since nodes in replacements might be hidden.
621 unfi = repo.unfiltered()
622 unfi = repo.unfiltered()
622 for oldnode, newnodes in replacements.items():
623 for oldnode, newnodes in replacements.items():
623 if oldnode in moves:
624 if oldnode in moves:
624 continue
625 continue
625 if len(newnodes) > 1:
626 if len(newnodes) > 1:
626 # usually a split, take the one with biggest rev number
627 # usually a split, take the one with biggest rev number
627 newnode = next(unfi.set('max(%ln)', newnodes)).node()
628 newnode = next(unfi.set('max(%ln)', newnodes)).node()
628 elif len(newnodes) == 0:
629 elif len(newnodes) == 0:
629 # move bookmark backwards
630 # move bookmark backwards
630 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
631 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
631 list(replacements)))
632 list(replacements)))
632 if roots:
633 if roots:
633 newnode = roots[0].node()
634 newnode = roots[0].node()
634 else:
635 else:
635 newnode = nullid
636 newnode = nullid
636 else:
637 else:
637 newnode = newnodes[0]
638 newnode = newnodes[0]
638 moves[oldnode] = newnode
639 moves[oldnode] = newnode
639
640
640 with repo.transaction('cleanup') as tr:
641 with repo.transaction('cleanup') as tr:
641 # Move bookmarks
642 # Move bookmarks
642 bmarks = repo._bookmarks
643 bmarks = repo._bookmarks
643 bmarkchanges = []
644 bmarkchanges = []
644 allnewnodes = [n for ns in replacements.values() for n in ns]
645 allnewnodes = [n for ns in replacements.values() for n in ns]
645 for oldnode, newnode in moves.items():
646 for oldnode, newnode in moves.items():
646 oldbmarks = repo.nodebookmarks(oldnode)
647 oldbmarks = repo.nodebookmarks(oldnode)
647 if not oldbmarks:
648 if not oldbmarks:
648 continue
649 continue
649 from . import bookmarks # avoid import cycle
650 from . import bookmarks # avoid import cycle
650 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
651 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
651 (oldbmarks, hex(oldnode), hex(newnode)))
652 (oldbmarks, hex(oldnode), hex(newnode)))
652 # Delete divergent bookmarks being parents of related newnodes
653 # Delete divergent bookmarks being parents of related newnodes
653 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
654 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
654 allnewnodes, newnode, oldnode)
655 allnewnodes, newnode, oldnode)
655 deletenodes = _containsnode(repo, deleterevs)
656 deletenodes = _containsnode(repo, deleterevs)
656 for name in oldbmarks:
657 for name in oldbmarks:
657 bmarkchanges.append((name, newnode))
658 bmarkchanges.append((name, newnode))
658 for b in bookmarks.divergent2delete(repo, deletenodes, name):
659 for b in bookmarks.divergent2delete(repo, deletenodes, name):
659 bmarkchanges.append((b, None))
660 bmarkchanges.append((b, None))
660
661
661 if bmarkchanges:
662 if bmarkchanges:
662 bmarks.applychanges(repo, tr, bmarkchanges)
663 bmarks.applychanges(repo, tr, bmarkchanges)
663
664
664 # Obsolete or strip nodes
665 # Obsolete or strip nodes
665 if obsolete.isenabled(repo, obsolete.createmarkersopt):
666 if obsolete.isenabled(repo, obsolete.createmarkersopt):
666 # If a node is already obsoleted, and we want to obsolete it
667 # If a node is already obsoleted, and we want to obsolete it
667 # without a successor, skip that obssolete request since it's
668 # without a successor, skip that obssolete request since it's
668 # unnecessary. That's the "if s or not isobs(n)" check below.
669 # unnecessary. That's the "if s or not isobs(n)" check below.
669 # Also sort the node in topology order, that might be useful for
670 # Also sort the node in topology order, that might be useful for
670 # some obsstore logic.
671 # some obsstore logic.
671 # NOTE: the filtering and sorting might belong to createmarkers.
672 # NOTE: the filtering and sorting might belong to createmarkers.
672 isobs = unfi.obsstore.successors.__contains__
673 isobs = unfi.obsstore.successors.__contains__
673 torev = unfi.changelog.rev
674 torev = unfi.changelog.rev
674 sortfunc = lambda ns: torev(ns[0])
675 sortfunc = lambda ns: torev(ns[0])
675 rels = [(unfi[n], tuple(unfi[m] for m in s))
676 rels = [(unfi[n], tuple(unfi[m] for m in s))
676 for n, s in sorted(replacements.items(), key=sortfunc)
677 for n, s in sorted(replacements.items(), key=sortfunc)
677 if s or not isobs(n)]
678 if s or not isobs(n)]
678 if rels:
679 if rels:
679 obsolete.createmarkers(repo, rels, operation=operation)
680 obsolete.createmarkers(repo, rels, operation=operation)
680 else:
681 else:
681 from . import repair # avoid import cycle
682 from . import repair # avoid import cycle
682 tostrip = list(replacements)
683 tostrip = list(replacements)
683 if tostrip:
684 if tostrip:
684 repair.delayedstrip(repo.ui, repo, tostrip, operation)
685 repair.delayedstrip(repo.ui, repo, tostrip, operation)
685
686
686 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
687 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
687 if opts is None:
688 if opts is None:
688 opts = {}
689 opts = {}
689 m = matcher
690 m = matcher
690 if dry_run is None:
691 if dry_run is None:
691 dry_run = opts.get('dry_run')
692 dry_run = opts.get('dry_run')
692 if similarity is None:
693 if similarity is None:
693 similarity = float(opts.get('similarity') or 0)
694 similarity = float(opts.get('similarity') or 0)
694
695
695 ret = 0
696 ret = 0
696 join = lambda f: os.path.join(prefix, f)
697 join = lambda f: os.path.join(prefix, f)
697
698
698 wctx = repo[None]
699 wctx = repo[None]
699 for subpath in sorted(wctx.substate):
700 for subpath in sorted(wctx.substate):
700 submatch = matchmod.subdirmatcher(subpath, m)
701 submatch = matchmod.subdirmatcher(subpath, m)
701 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
702 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
702 sub = wctx.sub(subpath)
703 sub = wctx.sub(subpath)
703 try:
704 try:
704 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
705 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
705 ret = 1
706 ret = 1
706 except error.LookupError:
707 except error.LookupError:
707 repo.ui.status(_("skipping missing subrepository: %s\n")
708 repo.ui.status(_("skipping missing subrepository: %s\n")
708 % join(subpath))
709 % join(subpath))
709
710
710 rejected = []
711 rejected = []
711 def badfn(f, msg):
712 def badfn(f, msg):
712 if f in m.files():
713 if f in m.files():
713 m.bad(f, msg)
714 m.bad(f, msg)
714 rejected.append(f)
715 rejected.append(f)
715
716
716 badmatch = matchmod.badmatch(m, badfn)
717 badmatch = matchmod.badmatch(m, badfn)
717 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
718 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
718 badmatch)
719 badmatch)
719
720
720 unknownset = set(unknown + forgotten)
721 unknownset = set(unknown + forgotten)
721 toprint = unknownset.copy()
722 toprint = unknownset.copy()
722 toprint.update(deleted)
723 toprint.update(deleted)
723 for abs in sorted(toprint):
724 for abs in sorted(toprint):
724 if repo.ui.verbose or not m.exact(abs):
725 if repo.ui.verbose or not m.exact(abs):
725 if abs in unknownset:
726 if abs in unknownset:
726 status = _('adding %s\n') % m.uipath(abs)
727 status = _('adding %s\n') % m.uipath(abs)
727 else:
728 else:
728 status = _('removing %s\n') % m.uipath(abs)
729 status = _('removing %s\n') % m.uipath(abs)
729 repo.ui.status(status)
730 repo.ui.status(status)
730
731
731 renames = _findrenames(repo, m, added + unknown, removed + deleted,
732 renames = _findrenames(repo, m, added + unknown, removed + deleted,
732 similarity)
733 similarity)
733
734
734 if not dry_run:
735 if not dry_run:
735 _markchanges(repo, unknown + forgotten, deleted, renames)
736 _markchanges(repo, unknown + forgotten, deleted, renames)
736
737
737 for f in rejected:
738 for f in rejected:
738 if f in m.files():
739 if f in m.files():
739 return 1
740 return 1
740 return ret
741 return ret
741
742
742 def marktouched(repo, files, similarity=0.0):
743 def marktouched(repo, files, similarity=0.0):
743 '''Assert that files have somehow been operated upon. files are relative to
744 '''Assert that files have somehow been operated upon. files are relative to
744 the repo root.'''
745 the repo root.'''
745 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
746 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
746 rejected = []
747 rejected = []
747
748
748 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
749 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
749
750
750 if repo.ui.verbose:
751 if repo.ui.verbose:
751 unknownset = set(unknown + forgotten)
752 unknownset = set(unknown + forgotten)
752 toprint = unknownset.copy()
753 toprint = unknownset.copy()
753 toprint.update(deleted)
754 toprint.update(deleted)
754 for abs in sorted(toprint):
755 for abs in sorted(toprint):
755 if abs in unknownset:
756 if abs in unknownset:
756 status = _('adding %s\n') % abs
757 status = _('adding %s\n') % abs
757 else:
758 else:
758 status = _('removing %s\n') % abs
759 status = _('removing %s\n') % abs
759 repo.ui.status(status)
760 repo.ui.status(status)
760
761
761 renames = _findrenames(repo, m, added + unknown, removed + deleted,
762 renames = _findrenames(repo, m, added + unknown, removed + deleted,
762 similarity)
763 similarity)
763
764
764 _markchanges(repo, unknown + forgotten, deleted, renames)
765 _markchanges(repo, unknown + forgotten, deleted, renames)
765
766
766 for f in rejected:
767 for f in rejected:
767 if f in m.files():
768 if f in m.files():
768 return 1
769 return 1
769 return 0
770 return 0
770
771
771 def _interestingfiles(repo, matcher):
772 def _interestingfiles(repo, matcher):
772 '''Walk dirstate with matcher, looking for files that addremove would care
773 '''Walk dirstate with matcher, looking for files that addremove would care
773 about.
774 about.
774
775
775 This is different from dirstate.status because it doesn't care about
776 This is different from dirstate.status because it doesn't care about
776 whether files are modified or clean.'''
777 whether files are modified or clean.'''
777 added, unknown, deleted, removed, forgotten = [], [], [], [], []
778 added, unknown, deleted, removed, forgotten = [], [], [], [], []
778 audit_path = pathutil.pathauditor(repo.root, cached=True)
779 audit_path = pathutil.pathauditor(repo.root, cached=True)
779
780
780 ctx = repo[None]
781 ctx = repo[None]
781 dirstate = repo.dirstate
782 dirstate = repo.dirstate
782 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
783 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
783 unknown=True, ignored=False, full=False)
784 unknown=True, ignored=False, full=False)
784 for abs, st in walkresults.iteritems():
785 for abs, st in walkresults.iteritems():
785 dstate = dirstate[abs]
786 dstate = dirstate[abs]
786 if dstate == '?' and audit_path.check(abs):
787 if dstate == '?' and audit_path.check(abs):
787 unknown.append(abs)
788 unknown.append(abs)
788 elif dstate != 'r' and not st:
789 elif dstate != 'r' and not st:
789 deleted.append(abs)
790 deleted.append(abs)
790 elif dstate == 'r' and st:
791 elif dstate == 'r' and st:
791 forgotten.append(abs)
792 forgotten.append(abs)
792 # for finding renames
793 # for finding renames
793 elif dstate == 'r' and not st:
794 elif dstate == 'r' and not st:
794 removed.append(abs)
795 removed.append(abs)
795 elif dstate == 'a':
796 elif dstate == 'a':
796 added.append(abs)
797 added.append(abs)
797
798
798 return added, unknown, deleted, removed, forgotten
799 return added, unknown, deleted, removed, forgotten
799
800
800 def _findrenames(repo, matcher, added, removed, similarity):
801 def _findrenames(repo, matcher, added, removed, similarity):
801 '''Find renames from removed files to added ones.'''
802 '''Find renames from removed files to added ones.'''
802 renames = {}
803 renames = {}
803 if similarity > 0:
804 if similarity > 0:
804 for old, new, score in similar.findrenames(repo, added, removed,
805 for old, new, score in similar.findrenames(repo, added, removed,
805 similarity):
806 similarity):
806 if (repo.ui.verbose or not matcher.exact(old)
807 if (repo.ui.verbose or not matcher.exact(old)
807 or not matcher.exact(new)):
808 or not matcher.exact(new)):
808 repo.ui.status(_('recording removal of %s as rename to %s '
809 repo.ui.status(_('recording removal of %s as rename to %s '
809 '(%d%% similar)\n') %
810 '(%d%% similar)\n') %
810 (matcher.rel(old), matcher.rel(new),
811 (matcher.rel(old), matcher.rel(new),
811 score * 100))
812 score * 100))
812 renames[new] = old
813 renames[new] = old
813 return renames
814 return renames
814
815
815 def _markchanges(repo, unknown, deleted, renames):
816 def _markchanges(repo, unknown, deleted, renames):
816 '''Marks the files in unknown as added, the files in deleted as removed,
817 '''Marks the files in unknown as added, the files in deleted as removed,
817 and the files in renames as copied.'''
818 and the files in renames as copied.'''
818 wctx = repo[None]
819 wctx = repo[None]
819 with repo.wlock():
820 with repo.wlock():
820 wctx.forget(deleted)
821 wctx.forget(deleted)
821 wctx.add(unknown)
822 wctx.add(unknown)
822 for new, old in renames.iteritems():
823 for new, old in renames.iteritems():
823 wctx.copy(old, new)
824 wctx.copy(old, new)
824
825
825 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
826 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
826 """Update the dirstate to reflect the intent of copying src to dst. For
827 """Update the dirstate to reflect the intent of copying src to dst. For
827 different reasons it might not end with dst being marked as copied from src.
828 different reasons it might not end with dst being marked as copied from src.
828 """
829 """
829 origsrc = repo.dirstate.copied(src) or src
830 origsrc = repo.dirstate.copied(src) or src
830 if dst == origsrc: # copying back a copy?
831 if dst == origsrc: # copying back a copy?
831 if repo.dirstate[dst] not in 'mn' and not dryrun:
832 if repo.dirstate[dst] not in 'mn' and not dryrun:
832 repo.dirstate.normallookup(dst)
833 repo.dirstate.normallookup(dst)
833 else:
834 else:
834 if repo.dirstate[origsrc] == 'a' and origsrc == src:
835 if repo.dirstate[origsrc] == 'a' and origsrc == src:
835 if not ui.quiet:
836 if not ui.quiet:
836 ui.warn(_("%s has not been committed yet, so no copy "
837 ui.warn(_("%s has not been committed yet, so no copy "
837 "data will be stored for %s.\n")
838 "data will be stored for %s.\n")
838 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
839 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
839 if repo.dirstate[dst] in '?r' and not dryrun:
840 if repo.dirstate[dst] in '?r' and not dryrun:
840 wctx.add([dst])
841 wctx.add([dst])
841 elif not dryrun:
842 elif not dryrun:
842 wctx.copy(origsrc, dst)
843 wctx.copy(origsrc, dst)
843
844
844 def readrequires(opener, supported):
845 def readrequires(opener, supported):
845 '''Reads and parses .hg/requires and checks if all entries found
846 '''Reads and parses .hg/requires and checks if all entries found
846 are in the list of supported features.'''
847 are in the list of supported features.'''
847 requirements = set(opener.read("requires").splitlines())
848 requirements = set(opener.read("requires").splitlines())
848 missings = []
849 missings = []
849 for r in requirements:
850 for r in requirements:
850 if r not in supported:
851 if r not in supported:
851 if not r or not r[0].isalnum():
852 if not r or not r[0].isalnum():
852 raise error.RequirementError(_(".hg/requires file is corrupt"))
853 raise error.RequirementError(_(".hg/requires file is corrupt"))
853 missings.append(r)
854 missings.append(r)
854 missings.sort()
855 missings.sort()
855 if missings:
856 if missings:
856 raise error.RequirementError(
857 raise error.RequirementError(
857 _("repository requires features unknown to this Mercurial: %s")
858 _("repository requires features unknown to this Mercurial: %s")
858 % " ".join(missings),
859 % " ".join(missings),
859 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
860 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
860 " for more information"))
861 " for more information"))
861 return requirements
862 return requirements
862
863
863 def writerequires(opener, requirements):
864 def writerequires(opener, requirements):
864 with opener('requires', 'w') as fp:
865 with opener('requires', 'w') as fp:
865 for r in sorted(requirements):
866 for r in sorted(requirements):
866 fp.write("%s\n" % r)
867 fp.write("%s\n" % r)
867
868
868 class filecachesubentry(object):
869 class filecachesubentry(object):
869 def __init__(self, path, stat):
870 def __init__(self, path, stat):
870 self.path = path
871 self.path = path
871 self.cachestat = None
872 self.cachestat = None
872 self._cacheable = None
873 self._cacheable = None
873
874
874 if stat:
875 if stat:
875 self.cachestat = filecachesubentry.stat(self.path)
876 self.cachestat = filecachesubentry.stat(self.path)
876
877
877 if self.cachestat:
878 if self.cachestat:
878 self._cacheable = self.cachestat.cacheable()
879 self._cacheable = self.cachestat.cacheable()
879 else:
880 else:
880 # None means we don't know yet
881 # None means we don't know yet
881 self._cacheable = None
882 self._cacheable = None
882
883
883 def refresh(self):
884 def refresh(self):
884 if self.cacheable():
885 if self.cacheable():
885 self.cachestat = filecachesubentry.stat(self.path)
886 self.cachestat = filecachesubentry.stat(self.path)
886
887
887 def cacheable(self):
888 def cacheable(self):
888 if self._cacheable is not None:
889 if self._cacheable is not None:
889 return self._cacheable
890 return self._cacheable
890
891
891 # we don't know yet, assume it is for now
892 # we don't know yet, assume it is for now
892 return True
893 return True
893
894
894 def changed(self):
895 def changed(self):
895 # no point in going further if we can't cache it
896 # no point in going further if we can't cache it
896 if not self.cacheable():
897 if not self.cacheable():
897 return True
898 return True
898
899
899 newstat = filecachesubentry.stat(self.path)
900 newstat = filecachesubentry.stat(self.path)
900
901
901 # we may not know if it's cacheable yet, check again now
902 # we may not know if it's cacheable yet, check again now
902 if newstat and self._cacheable is None:
903 if newstat and self._cacheable is None:
903 self._cacheable = newstat.cacheable()
904 self._cacheable = newstat.cacheable()
904
905
905 # check again
906 # check again
906 if not self._cacheable:
907 if not self._cacheable:
907 return True
908 return True
908
909
909 if self.cachestat != newstat:
910 if self.cachestat != newstat:
910 self.cachestat = newstat
911 self.cachestat = newstat
911 return True
912 return True
912 else:
913 else:
913 return False
914 return False
914
915
915 @staticmethod
916 @staticmethod
916 def stat(path):
917 def stat(path):
917 try:
918 try:
918 return util.cachestat(path)
919 return util.cachestat(path)
919 except OSError as e:
920 except OSError as e:
920 if e.errno != errno.ENOENT:
921 if e.errno != errno.ENOENT:
921 raise
922 raise
922
923
923 class filecacheentry(object):
924 class filecacheentry(object):
924 def __init__(self, paths, stat=True):
925 def __init__(self, paths, stat=True):
925 self._entries = []
926 self._entries = []
926 for path in paths:
927 for path in paths:
927 self._entries.append(filecachesubentry(path, stat))
928 self._entries.append(filecachesubentry(path, stat))
928
929
929 def changed(self):
930 def changed(self):
930 '''true if any entry has changed'''
931 '''true if any entry has changed'''
931 for entry in self._entries:
932 for entry in self._entries:
932 if entry.changed():
933 if entry.changed():
933 return True
934 return True
934 return False
935 return False
935
936
936 def refresh(self):
937 def refresh(self):
937 for entry in self._entries:
938 for entry in self._entries:
938 entry.refresh()
939 entry.refresh()
939
940
940 class filecache(object):
941 class filecache(object):
941 '''A property like decorator that tracks files under .hg/ for updates.
942 '''A property like decorator that tracks files under .hg/ for updates.
942
943
943 Records stat info when called in _filecache.
944 Records stat info when called in _filecache.
944
945
945 On subsequent calls, compares old stat info with new info, and recreates the
946 On subsequent calls, compares old stat info with new info, and recreates the
946 object when any of the files changes, updating the new stat info in
947 object when any of the files changes, updating the new stat info in
947 _filecache.
948 _filecache.
948
949
949 Mercurial either atomic renames or appends for files under .hg,
950 Mercurial either atomic renames or appends for files under .hg,
950 so to ensure the cache is reliable we need the filesystem to be able
951 so to ensure the cache is reliable we need the filesystem to be able
951 to tell us if a file has been replaced. If it can't, we fallback to
952 to tell us if a file has been replaced. If it can't, we fallback to
952 recreating the object on every call (essentially the same behavior as
953 recreating the object on every call (essentially the same behavior as
953 propertycache).
954 propertycache).
954
955
955 '''
956 '''
956 def __init__(self, *paths):
957 def __init__(self, *paths):
957 self.paths = paths
958 self.paths = paths
958
959
959 def join(self, obj, fname):
960 def join(self, obj, fname):
960 """Used to compute the runtime path of a cached file.
961 """Used to compute the runtime path of a cached file.
961
962
962 Users should subclass filecache and provide their own version of this
963 Users should subclass filecache and provide their own version of this
963 function to call the appropriate join function on 'obj' (an instance
964 function to call the appropriate join function on 'obj' (an instance
964 of the class that its member function was decorated).
965 of the class that its member function was decorated).
965 """
966 """
966 raise NotImplementedError
967 raise NotImplementedError
967
968
968 def __call__(self, func):
969 def __call__(self, func):
969 self.func = func
970 self.func = func
970 self.name = func.__name__.encode('ascii')
971 self.name = func.__name__.encode('ascii')
971 return self
972 return self
972
973
973 def __get__(self, obj, type=None):
974 def __get__(self, obj, type=None):
974 # if accessed on the class, return the descriptor itself.
975 # if accessed on the class, return the descriptor itself.
975 if obj is None:
976 if obj is None:
976 return self
977 return self
977 # do we need to check if the file changed?
978 # do we need to check if the file changed?
978 if self.name in obj.__dict__:
979 if self.name in obj.__dict__:
979 assert self.name in obj._filecache, self.name
980 assert self.name in obj._filecache, self.name
980 return obj.__dict__[self.name]
981 return obj.__dict__[self.name]
981
982
982 entry = obj._filecache.get(self.name)
983 entry = obj._filecache.get(self.name)
983
984
984 if entry:
985 if entry:
985 if entry.changed():
986 if entry.changed():
986 entry.obj = self.func(obj)
987 entry.obj = self.func(obj)
987 else:
988 else:
988 paths = [self.join(obj, path) for path in self.paths]
989 paths = [self.join(obj, path) for path in self.paths]
989
990
990 # We stat -before- creating the object so our cache doesn't lie if
991 # We stat -before- creating the object so our cache doesn't lie if
991 # a writer modified between the time we read and stat
992 # a writer modified between the time we read and stat
992 entry = filecacheentry(paths, True)
993 entry = filecacheentry(paths, True)
993 entry.obj = self.func(obj)
994 entry.obj = self.func(obj)
994
995
995 obj._filecache[self.name] = entry
996 obj._filecache[self.name] = entry
996
997
997 obj.__dict__[self.name] = entry.obj
998 obj.__dict__[self.name] = entry.obj
998 return entry.obj
999 return entry.obj
999
1000
1000 def __set__(self, obj, value):
1001 def __set__(self, obj, value):
1001 if self.name not in obj._filecache:
1002 if self.name not in obj._filecache:
1002 # we add an entry for the missing value because X in __dict__
1003 # we add an entry for the missing value because X in __dict__
1003 # implies X in _filecache
1004 # implies X in _filecache
1004 paths = [self.join(obj, path) for path in self.paths]
1005 paths = [self.join(obj, path) for path in self.paths]
1005 ce = filecacheentry(paths, False)
1006 ce = filecacheentry(paths, False)
1006 obj._filecache[self.name] = ce
1007 obj._filecache[self.name] = ce
1007 else:
1008 else:
1008 ce = obj._filecache[self.name]
1009 ce = obj._filecache[self.name]
1009
1010
1010 ce.obj = value # update cached copy
1011 ce.obj = value # update cached copy
1011 obj.__dict__[self.name] = value # update copy returned by obj.x
1012 obj.__dict__[self.name] = value # update copy returned by obj.x
1012
1013
1013 def __delete__(self, obj):
1014 def __delete__(self, obj):
1014 try:
1015 try:
1015 del obj.__dict__[self.name]
1016 del obj.__dict__[self.name]
1016 except KeyError:
1017 except KeyError:
1017 raise AttributeError(self.name)
1018 raise AttributeError(self.name)
1018
1019
1020 def extdatasource(repo, source):
1021 """Gather a map of rev -> value dict from the specified source
1022
1023 A source spec is treated as a URL, with a special case shell: type
1024 for parsing the output from a shell command.
1025
1026 The data is parsed as a series of newline-separated records where
1027 each record is a revision specifier optionally followed by a space
1028 and a freeform string value. If the revision is known locally, it
1029 is converted to a rev, otherwise the record is skipped.
1030
1031 Note that both key and value are treated as UTF-8 and converted to
1032 the local encoding. This allows uniformity between local and
1033 remote data sources.
1034 """
1035
1036 spec = repo.ui.config("extdata", source)
1037 if not spec:
1038 raise error.Abort(_("unknown extdata source '%s'") % source)
1039
1040 data = {}
1041 if spec.startswith("shell:"):
1042 # external commands should be run relative to the repo root
1043 cmd = spec[6:]
1044 cwd = os.getcwd()
1045 os.chdir(repo.root)
1046 try:
1047 src = util.popen(cmd)
1048 finally:
1049 os.chdir(cwd)
1050 else:
1051 # treat as a URL or file
1052 src = url.open(repo.ui, spec)
1053
1054 try:
1055 for l in src.readlines():
1056 if " " in l:
1057 k, v = l.strip().split(" ", 1)
1058 else:
1059 k, v = l.strip(), ""
1060
1061 k = encoding.tolocal(k)
1062 if k in repo:
1063 # we ignore data for nodes that don't exist locally
1064 data[repo[k].rev()] = encoding.tolocal(v)
1065 finally:
1066 src.close()
1067
1068 return data
1069
1019 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1070 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1020 if lock is None:
1071 if lock is None:
1021 raise error.LockInheritanceContractViolation(
1072 raise error.LockInheritanceContractViolation(
1022 'lock can only be inherited while held')
1073 'lock can only be inherited while held')
1023 if environ is None:
1074 if environ is None:
1024 environ = {}
1075 environ = {}
1025 with lock.inherit() as locker:
1076 with lock.inherit() as locker:
1026 environ[envvar] = locker
1077 environ[envvar] = locker
1027 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1078 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1028
1079
1029 def wlocksub(repo, cmd, *args, **kwargs):
1080 def wlocksub(repo, cmd, *args, **kwargs):
1030 """run cmd as a subprocess that allows inheriting repo's wlock
1081 """run cmd as a subprocess that allows inheriting repo's wlock
1031
1082
1032 This can only be called while the wlock is held. This takes all the
1083 This can only be called while the wlock is held. This takes all the
1033 arguments that ui.system does, and returns the exit code of the
1084 arguments that ui.system does, and returns the exit code of the
1034 subprocess."""
1085 subprocess."""
1035 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1086 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1036 **kwargs)
1087 **kwargs)
1037
1088
1038 def gdinitconfig(ui):
1089 def gdinitconfig(ui):
1039 """helper function to know if a repo should be created as general delta
1090 """helper function to know if a repo should be created as general delta
1040 """
1091 """
1041 # experimental config: format.generaldelta
1092 # experimental config: format.generaldelta
1042 return (ui.configbool('format', 'generaldelta')
1093 return (ui.configbool('format', 'generaldelta')
1043 or ui.configbool('format', 'usegeneraldelta'))
1094 or ui.configbool('format', 'usegeneraldelta'))
1044
1095
1045 def gddeltaconfig(ui):
1096 def gddeltaconfig(ui):
1046 """helper function to know if incoming delta should be optimised
1097 """helper function to know if incoming delta should be optimised
1047 """
1098 """
1048 # experimental config: format.generaldelta
1099 # experimental config: format.generaldelta
1049 return ui.configbool('format', 'generaldelta')
1100 return ui.configbool('format', 'generaldelta')
1050
1101
1051 class simplekeyvaluefile(object):
1102 class simplekeyvaluefile(object):
1052 """A simple file with key=value lines
1103 """A simple file with key=value lines
1053
1104
1054 Keys must be alphanumerics and start with a letter, values must not
1105 Keys must be alphanumerics and start with a letter, values must not
1055 contain '\n' characters"""
1106 contain '\n' characters"""
1056 firstlinekey = '__firstline'
1107 firstlinekey = '__firstline'
1057
1108
1058 def __init__(self, vfs, path, keys=None):
1109 def __init__(self, vfs, path, keys=None):
1059 self.vfs = vfs
1110 self.vfs = vfs
1060 self.path = path
1111 self.path = path
1061
1112
1062 def read(self, firstlinenonkeyval=False):
1113 def read(self, firstlinenonkeyval=False):
1063 """Read the contents of a simple key-value file
1114 """Read the contents of a simple key-value file
1064
1115
1065 'firstlinenonkeyval' indicates whether the first line of file should
1116 'firstlinenonkeyval' indicates whether the first line of file should
1066 be treated as a key-value pair or reuturned fully under the
1117 be treated as a key-value pair or reuturned fully under the
1067 __firstline key."""
1118 __firstline key."""
1068 lines = self.vfs.readlines(self.path)
1119 lines = self.vfs.readlines(self.path)
1069 d = {}
1120 d = {}
1070 if firstlinenonkeyval:
1121 if firstlinenonkeyval:
1071 if not lines:
1122 if not lines:
1072 e = _("empty simplekeyvalue file")
1123 e = _("empty simplekeyvalue file")
1073 raise error.CorruptedState(e)
1124 raise error.CorruptedState(e)
1074 # we don't want to include '\n' in the __firstline
1125 # we don't want to include '\n' in the __firstline
1075 d[self.firstlinekey] = lines[0][:-1]
1126 d[self.firstlinekey] = lines[0][:-1]
1076 del lines[0]
1127 del lines[0]
1077
1128
1078 try:
1129 try:
1079 # the 'if line.strip()' part prevents us from failing on empty
1130 # the 'if line.strip()' part prevents us from failing on empty
1080 # lines which only contain '\n' therefore are not skipped
1131 # lines which only contain '\n' therefore are not skipped
1081 # by 'if line'
1132 # by 'if line'
1082 updatedict = dict(line[:-1].split('=', 1) for line in lines
1133 updatedict = dict(line[:-1].split('=', 1) for line in lines
1083 if line.strip())
1134 if line.strip())
1084 if self.firstlinekey in updatedict:
1135 if self.firstlinekey in updatedict:
1085 e = _("%r can't be used as a key")
1136 e = _("%r can't be used as a key")
1086 raise error.CorruptedState(e % self.firstlinekey)
1137 raise error.CorruptedState(e % self.firstlinekey)
1087 d.update(updatedict)
1138 d.update(updatedict)
1088 except ValueError as e:
1139 except ValueError as e:
1089 raise error.CorruptedState(str(e))
1140 raise error.CorruptedState(str(e))
1090 return d
1141 return d
1091
1142
1092 def write(self, data, firstline=None):
1143 def write(self, data, firstline=None):
1093 """Write key=>value mapping to a file
1144 """Write key=>value mapping to a file
1094 data is a dict. Keys must be alphanumerical and start with a letter.
1145 data is a dict. Keys must be alphanumerical and start with a letter.
1095 Values must not contain newline characters.
1146 Values must not contain newline characters.
1096
1147
1097 If 'firstline' is not None, it is written to file before
1148 If 'firstline' is not None, it is written to file before
1098 everything else, as it is, not in a key=value form"""
1149 everything else, as it is, not in a key=value form"""
1099 lines = []
1150 lines = []
1100 if firstline is not None:
1151 if firstline is not None:
1101 lines.append('%s\n' % firstline)
1152 lines.append('%s\n' % firstline)
1102
1153
1103 for k, v in data.items():
1154 for k, v in data.items():
1104 if k == self.firstlinekey:
1155 if k == self.firstlinekey:
1105 e = "key name '%s' is reserved" % self.firstlinekey
1156 e = "key name '%s' is reserved" % self.firstlinekey
1106 raise error.ProgrammingError(e)
1157 raise error.ProgrammingError(e)
1107 if not k[0].isalpha():
1158 if not k[0].isalpha():
1108 e = "keys must start with a letter in a key-value file"
1159 e = "keys must start with a letter in a key-value file"
1109 raise error.ProgrammingError(e)
1160 raise error.ProgrammingError(e)
1110 if not k.isalnum():
1161 if not k.isalnum():
1111 e = "invalid key name in a simple key-value file"
1162 e = "invalid key name in a simple key-value file"
1112 raise error.ProgrammingError(e)
1163 raise error.ProgrammingError(e)
1113 if '\n' in v:
1164 if '\n' in v:
1114 e = "invalid value in a simple key-value file"
1165 e = "invalid value in a simple key-value file"
1115 raise error.ProgrammingError(e)
1166 raise error.ProgrammingError(e)
1116 lines.append("%s=%s\n" % (k, v))
1167 lines.append("%s=%s\n" % (k, v))
1117 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1168 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1118 fp.write(''.join(lines))
1169 fp.write(''.join(lines))
1119
1170
1120 _reportobsoletedsource = [
1171 _reportobsoletedsource = [
1121 'debugobsolete',
1172 'debugobsolete',
1122 'pull',
1173 'pull',
1123 'push',
1174 'push',
1124 'serve',
1175 'serve',
1125 'unbundle',
1176 'unbundle',
1126 ]
1177 ]
1127
1178
1128 def registersummarycallback(repo, otr, txnname=''):
1179 def registersummarycallback(repo, otr, txnname=''):
1129 """register a callback to issue a summary after the transaction is closed
1180 """register a callback to issue a summary after the transaction is closed
1130 """
1181 """
1131 for source in _reportobsoletedsource:
1182 for source in _reportobsoletedsource:
1132 if txnname.startswith(source):
1183 if txnname.startswith(source):
1133 reporef = weakref.ref(repo)
1184 reporef = weakref.ref(repo)
1134 def reportsummary(tr):
1185 def reportsummary(tr):
1135 """the actual callback reporting the summary"""
1186 """the actual callback reporting the summary"""
1136 repo = reporef()
1187 repo = reporef()
1137 obsoleted = obsutil.getobsoleted(repo, tr)
1188 obsoleted = obsutil.getobsoleted(repo, tr)
1138 if obsoleted:
1189 if obsoleted:
1139 repo.ui.status(_('obsoleted %i changesets\n')
1190 repo.ui.status(_('obsoleted %i changesets\n')
1140 % len(obsoleted))
1191 % len(obsoleted))
1141 otr.addpostclose('00-txnreport', reportsummary)
1192 otr.addpostclose('00-txnreport', reportsummary)
1142 break
1193 break
General Comments 0
You need to be logged in to leave comments. Login now