##// END OF EJS Templates
vfs: deprecate all old classes in scmutil...
Pierre-Yves David -
r31951:f23d579a default
parent child Browse files
Show More
@@ -1,963 +1,972 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16
16
17 from .i18n import _
17 from .i18n import _
18 from .node import wdirrev
18 from .node import wdirrev
19 from . import (
19 from . import (
20 encoding,
20 encoding,
21 error,
21 error,
22 match as matchmod,
22 match as matchmod,
23 pathutil,
23 pathutil,
24 phases,
24 phases,
25 pycompat,
25 pycompat,
26 revsetlang,
26 revsetlang,
27 similar,
27 similar,
28 util,
28 util,
29 vfs as vfsmod,
29 vfs as vfsmod,
30 )
30 )
31
31
32 if pycompat.osname == 'nt':
32 if pycompat.osname == 'nt':
33 from . import scmwindows as scmplatform
33 from . import scmwindows as scmplatform
34 else:
34 else:
35 from . import scmposix as scmplatform
35 from . import scmposix as scmplatform
36
36
37 termsize = scmplatform.termsize
37 termsize = scmplatform.termsize
38
38
39 class status(tuple):
39 class status(tuple):
40 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
40 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
41 and 'ignored' properties are only relevant to the working copy.
41 and 'ignored' properties are only relevant to the working copy.
42 '''
42 '''
43
43
44 __slots__ = ()
44 __slots__ = ()
45
45
46 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
46 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
47 clean):
47 clean):
48 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
48 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
49 ignored, clean))
49 ignored, clean))
50
50
51 @property
51 @property
52 def modified(self):
52 def modified(self):
53 '''files that have been modified'''
53 '''files that have been modified'''
54 return self[0]
54 return self[0]
55
55
56 @property
56 @property
57 def added(self):
57 def added(self):
58 '''files that have been added'''
58 '''files that have been added'''
59 return self[1]
59 return self[1]
60
60
61 @property
61 @property
62 def removed(self):
62 def removed(self):
63 '''files that have been removed'''
63 '''files that have been removed'''
64 return self[2]
64 return self[2]
65
65
66 @property
66 @property
67 def deleted(self):
67 def deleted(self):
68 '''files that are in the dirstate, but have been deleted from the
68 '''files that are in the dirstate, but have been deleted from the
69 working copy (aka "missing")
69 working copy (aka "missing")
70 '''
70 '''
71 return self[3]
71 return self[3]
72
72
73 @property
73 @property
74 def unknown(self):
74 def unknown(self):
75 '''files not in the dirstate that are not ignored'''
75 '''files not in the dirstate that are not ignored'''
76 return self[4]
76 return self[4]
77
77
78 @property
78 @property
79 def ignored(self):
79 def ignored(self):
80 '''files not in the dirstate that are ignored (by _dirignore())'''
80 '''files not in the dirstate that are ignored (by _dirignore())'''
81 return self[5]
81 return self[5]
82
82
83 @property
83 @property
84 def clean(self):
84 def clean(self):
85 '''files that have not been modified'''
85 '''files that have not been modified'''
86 return self[6]
86 return self[6]
87
87
88 def __repr__(self, *args, **kwargs):
88 def __repr__(self, *args, **kwargs):
89 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
89 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
90 'unknown=%r, ignored=%r, clean=%r>') % self)
90 'unknown=%r, ignored=%r, clean=%r>') % self)
91
91
92 def itersubrepos(ctx1, ctx2):
92 def itersubrepos(ctx1, ctx2):
93 """find subrepos in ctx1 or ctx2"""
93 """find subrepos in ctx1 or ctx2"""
94 # Create a (subpath, ctx) mapping where we prefer subpaths from
94 # Create a (subpath, ctx) mapping where we prefer subpaths from
95 # ctx1. The subpaths from ctx2 are important when the .hgsub file
95 # ctx1. The subpaths from ctx2 are important when the .hgsub file
96 # has been modified (in ctx2) but not yet committed (in ctx1).
96 # has been modified (in ctx2) but not yet committed (in ctx1).
97 subpaths = dict.fromkeys(ctx2.substate, ctx2)
97 subpaths = dict.fromkeys(ctx2.substate, ctx2)
98 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
98 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
99
99
100 missing = set()
100 missing = set()
101
101
102 for subpath in ctx2.substate:
102 for subpath in ctx2.substate:
103 if subpath not in ctx1.substate:
103 if subpath not in ctx1.substate:
104 del subpaths[subpath]
104 del subpaths[subpath]
105 missing.add(subpath)
105 missing.add(subpath)
106
106
107 for subpath, ctx in sorted(subpaths.iteritems()):
107 for subpath, ctx in sorted(subpaths.iteritems()):
108 yield subpath, ctx.sub(subpath)
108 yield subpath, ctx.sub(subpath)
109
109
110 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
110 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
111 # status and diff will have an accurate result when it does
111 # status and diff will have an accurate result when it does
112 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
112 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
113 # against itself.
113 # against itself.
114 for subpath in missing:
114 for subpath in missing:
115 yield subpath, ctx2.nullsub(subpath, ctx1)
115 yield subpath, ctx2.nullsub(subpath, ctx1)
116
116
117 def nochangesfound(ui, repo, excluded=None):
117 def nochangesfound(ui, repo, excluded=None):
118 '''Report no changes for push/pull, excluded is None or a list of
118 '''Report no changes for push/pull, excluded is None or a list of
119 nodes excluded from the push/pull.
119 nodes excluded from the push/pull.
120 '''
120 '''
121 secretlist = []
121 secretlist = []
122 if excluded:
122 if excluded:
123 for n in excluded:
123 for n in excluded:
124 if n not in repo:
124 if n not in repo:
125 # discovery should not have included the filtered revision,
125 # discovery should not have included the filtered revision,
126 # we have to explicitly exclude it until discovery is cleanup.
126 # we have to explicitly exclude it until discovery is cleanup.
127 continue
127 continue
128 ctx = repo[n]
128 ctx = repo[n]
129 if ctx.phase() >= phases.secret and not ctx.extinct():
129 if ctx.phase() >= phases.secret and not ctx.extinct():
130 secretlist.append(n)
130 secretlist.append(n)
131
131
132 if secretlist:
132 if secretlist:
133 ui.status(_("no changes found (ignored %d secret changesets)\n")
133 ui.status(_("no changes found (ignored %d secret changesets)\n")
134 % len(secretlist))
134 % len(secretlist))
135 else:
135 else:
136 ui.status(_("no changes found\n"))
136 ui.status(_("no changes found\n"))
137
137
138 def callcatch(ui, func):
138 def callcatch(ui, func):
139 """call func() with global exception handling
139 """call func() with global exception handling
140
140
141 return func() if no exception happens. otherwise do some error handling
141 return func() if no exception happens. otherwise do some error handling
142 and return an exit code accordingly. does not handle all exceptions.
142 and return an exit code accordingly. does not handle all exceptions.
143 """
143 """
144 try:
144 try:
145 return func()
145 return func()
146 # Global exception handling, alphabetically
146 # Global exception handling, alphabetically
147 # Mercurial-specific first, followed by built-in and library exceptions
147 # Mercurial-specific first, followed by built-in and library exceptions
148 except error.LockHeld as inst:
148 except error.LockHeld as inst:
149 if inst.errno == errno.ETIMEDOUT:
149 if inst.errno == errno.ETIMEDOUT:
150 reason = _('timed out waiting for lock held by %s') % inst.locker
150 reason = _('timed out waiting for lock held by %s') % inst.locker
151 else:
151 else:
152 reason = _('lock held by %s') % inst.locker
152 reason = _('lock held by %s') % inst.locker
153 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
153 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
154 except error.LockUnavailable as inst:
154 except error.LockUnavailable as inst:
155 ui.warn(_("abort: could not lock %s: %s\n") %
155 ui.warn(_("abort: could not lock %s: %s\n") %
156 (inst.desc or inst.filename, inst.strerror))
156 (inst.desc or inst.filename, inst.strerror))
157 except error.OutOfBandError as inst:
157 except error.OutOfBandError as inst:
158 if inst.args:
158 if inst.args:
159 msg = _("abort: remote error:\n")
159 msg = _("abort: remote error:\n")
160 else:
160 else:
161 msg = _("abort: remote error\n")
161 msg = _("abort: remote error\n")
162 ui.warn(msg)
162 ui.warn(msg)
163 if inst.args:
163 if inst.args:
164 ui.warn(''.join(inst.args))
164 ui.warn(''.join(inst.args))
165 if inst.hint:
165 if inst.hint:
166 ui.warn('(%s)\n' % inst.hint)
166 ui.warn('(%s)\n' % inst.hint)
167 except error.RepoError as inst:
167 except error.RepoError as inst:
168 ui.warn(_("abort: %s!\n") % inst)
168 ui.warn(_("abort: %s!\n") % inst)
169 if inst.hint:
169 if inst.hint:
170 ui.warn(_("(%s)\n") % inst.hint)
170 ui.warn(_("(%s)\n") % inst.hint)
171 except error.ResponseError as inst:
171 except error.ResponseError as inst:
172 ui.warn(_("abort: %s") % inst.args[0])
172 ui.warn(_("abort: %s") % inst.args[0])
173 if not isinstance(inst.args[1], basestring):
173 if not isinstance(inst.args[1], basestring):
174 ui.warn(" %r\n" % (inst.args[1],))
174 ui.warn(" %r\n" % (inst.args[1],))
175 elif not inst.args[1]:
175 elif not inst.args[1]:
176 ui.warn(_(" empty string\n"))
176 ui.warn(_(" empty string\n"))
177 else:
177 else:
178 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
178 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
179 except error.CensoredNodeError as inst:
179 except error.CensoredNodeError as inst:
180 ui.warn(_("abort: file censored %s!\n") % inst)
180 ui.warn(_("abort: file censored %s!\n") % inst)
181 except error.RevlogError as inst:
181 except error.RevlogError as inst:
182 ui.warn(_("abort: %s!\n") % inst)
182 ui.warn(_("abort: %s!\n") % inst)
183 except error.SignalInterrupt:
183 except error.SignalInterrupt:
184 ui.warn(_("killed!\n"))
184 ui.warn(_("killed!\n"))
185 except error.InterventionRequired as inst:
185 except error.InterventionRequired as inst:
186 ui.warn("%s\n" % inst)
186 ui.warn("%s\n" % inst)
187 if inst.hint:
187 if inst.hint:
188 ui.warn(_("(%s)\n") % inst.hint)
188 ui.warn(_("(%s)\n") % inst.hint)
189 return 1
189 return 1
190 except error.Abort as inst:
190 except error.Abort as inst:
191 ui.warn(_("abort: %s\n") % inst)
191 ui.warn(_("abort: %s\n") % inst)
192 if inst.hint:
192 if inst.hint:
193 ui.warn(_("(%s)\n") % inst.hint)
193 ui.warn(_("(%s)\n") % inst.hint)
194 except ImportError as inst:
194 except ImportError as inst:
195 ui.warn(_("abort: %s!\n") % inst)
195 ui.warn(_("abort: %s!\n") % inst)
196 m = str(inst).split()[-1]
196 m = str(inst).split()[-1]
197 if m in "mpatch bdiff".split():
197 if m in "mpatch bdiff".split():
198 ui.warn(_("(did you forget to compile extensions?)\n"))
198 ui.warn(_("(did you forget to compile extensions?)\n"))
199 elif m in "zlib".split():
199 elif m in "zlib".split():
200 ui.warn(_("(is your Python install correct?)\n"))
200 ui.warn(_("(is your Python install correct?)\n"))
201 except IOError as inst:
201 except IOError as inst:
202 if util.safehasattr(inst, "code"):
202 if util.safehasattr(inst, "code"):
203 ui.warn(_("abort: %s\n") % inst)
203 ui.warn(_("abort: %s\n") % inst)
204 elif util.safehasattr(inst, "reason"):
204 elif util.safehasattr(inst, "reason"):
205 try: # usually it is in the form (errno, strerror)
205 try: # usually it is in the form (errno, strerror)
206 reason = inst.reason.args[1]
206 reason = inst.reason.args[1]
207 except (AttributeError, IndexError):
207 except (AttributeError, IndexError):
208 # it might be anything, for example a string
208 # it might be anything, for example a string
209 reason = inst.reason
209 reason = inst.reason
210 if isinstance(reason, unicode):
210 if isinstance(reason, unicode):
211 # SSLError of Python 2.7.9 contains a unicode
211 # SSLError of Python 2.7.9 contains a unicode
212 reason = reason.encode(encoding.encoding, 'replace')
212 reason = reason.encode(encoding.encoding, 'replace')
213 ui.warn(_("abort: error: %s\n") % reason)
213 ui.warn(_("abort: error: %s\n") % reason)
214 elif (util.safehasattr(inst, "args")
214 elif (util.safehasattr(inst, "args")
215 and inst.args and inst.args[0] == errno.EPIPE):
215 and inst.args and inst.args[0] == errno.EPIPE):
216 pass
216 pass
217 elif getattr(inst, "strerror", None):
217 elif getattr(inst, "strerror", None):
218 if getattr(inst, "filename", None):
218 if getattr(inst, "filename", None):
219 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
219 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
220 else:
220 else:
221 ui.warn(_("abort: %s\n") % inst.strerror)
221 ui.warn(_("abort: %s\n") % inst.strerror)
222 else:
222 else:
223 raise
223 raise
224 except OSError as inst:
224 except OSError as inst:
225 if getattr(inst, "filename", None) is not None:
225 if getattr(inst, "filename", None) is not None:
226 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
226 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
227 else:
227 else:
228 ui.warn(_("abort: %s\n") % inst.strerror)
228 ui.warn(_("abort: %s\n") % inst.strerror)
229 except MemoryError:
229 except MemoryError:
230 ui.warn(_("abort: out of memory\n"))
230 ui.warn(_("abort: out of memory\n"))
231 except SystemExit as inst:
231 except SystemExit as inst:
232 # Commands shouldn't sys.exit directly, but give a return code.
232 # Commands shouldn't sys.exit directly, but give a return code.
233 # Just in case catch this and and pass exit code to caller.
233 # Just in case catch this and and pass exit code to caller.
234 return inst.code
234 return inst.code
235 except socket.error as inst:
235 except socket.error as inst:
236 ui.warn(_("abort: %s\n") % inst.args[-1])
236 ui.warn(_("abort: %s\n") % inst.args[-1])
237
237
238 return -1
238 return -1
239
239
240 def checknewlabel(repo, lbl, kind):
240 def checknewlabel(repo, lbl, kind):
241 # Do not use the "kind" parameter in ui output.
241 # Do not use the "kind" parameter in ui output.
242 # It makes strings difficult to translate.
242 # It makes strings difficult to translate.
243 if lbl in ['tip', '.', 'null']:
243 if lbl in ['tip', '.', 'null']:
244 raise error.Abort(_("the name '%s' is reserved") % lbl)
244 raise error.Abort(_("the name '%s' is reserved") % lbl)
245 for c in (':', '\0', '\n', '\r'):
245 for c in (':', '\0', '\n', '\r'):
246 if c in lbl:
246 if c in lbl:
247 raise error.Abort(_("%r cannot be used in a name") % c)
247 raise error.Abort(_("%r cannot be used in a name") % c)
248 try:
248 try:
249 int(lbl)
249 int(lbl)
250 raise error.Abort(_("cannot use an integer as a name"))
250 raise error.Abort(_("cannot use an integer as a name"))
251 except ValueError:
251 except ValueError:
252 pass
252 pass
253
253
254 def checkfilename(f):
254 def checkfilename(f):
255 '''Check that the filename f is an acceptable filename for a tracked file'''
255 '''Check that the filename f is an acceptable filename for a tracked file'''
256 if '\r' in f or '\n' in f:
256 if '\r' in f or '\n' in f:
257 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
257 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
258
258
259 def checkportable(ui, f):
259 def checkportable(ui, f):
260 '''Check if filename f is portable and warn or abort depending on config'''
260 '''Check if filename f is portable and warn or abort depending on config'''
261 checkfilename(f)
261 checkfilename(f)
262 abort, warn = checkportabilityalert(ui)
262 abort, warn = checkportabilityalert(ui)
263 if abort or warn:
263 if abort or warn:
264 msg = util.checkwinfilename(f)
264 msg = util.checkwinfilename(f)
265 if msg:
265 if msg:
266 msg = "%s: %r" % (msg, f)
266 msg = "%s: %r" % (msg, f)
267 if abort:
267 if abort:
268 raise error.Abort(msg)
268 raise error.Abort(msg)
269 ui.warn(_("warning: %s\n") % msg)
269 ui.warn(_("warning: %s\n") % msg)
270
270
271 def checkportabilityalert(ui):
271 def checkportabilityalert(ui):
272 '''check if the user's config requests nothing, a warning, or abort for
272 '''check if the user's config requests nothing, a warning, or abort for
273 non-portable filenames'''
273 non-portable filenames'''
274 val = ui.config('ui', 'portablefilenames', 'warn')
274 val = ui.config('ui', 'portablefilenames', 'warn')
275 lval = val.lower()
275 lval = val.lower()
276 bval = util.parsebool(val)
276 bval = util.parsebool(val)
277 abort = pycompat.osname == 'nt' or lval == 'abort'
277 abort = pycompat.osname == 'nt' or lval == 'abort'
278 warn = bval or lval == 'warn'
278 warn = bval or lval == 'warn'
279 if bval is None and not (warn or abort or lval == 'ignore'):
279 if bval is None and not (warn or abort or lval == 'ignore'):
280 raise error.ConfigError(
280 raise error.ConfigError(
281 _("ui.portablefilenames value is invalid ('%s')") % val)
281 _("ui.portablefilenames value is invalid ('%s')") % val)
282 return abort, warn
282 return abort, warn
283
283
284 class casecollisionauditor(object):
284 class casecollisionauditor(object):
285 def __init__(self, ui, abort, dirstate):
285 def __init__(self, ui, abort, dirstate):
286 self._ui = ui
286 self._ui = ui
287 self._abort = abort
287 self._abort = abort
288 allfiles = '\0'.join(dirstate._map)
288 allfiles = '\0'.join(dirstate._map)
289 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
289 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
290 self._dirstate = dirstate
290 self._dirstate = dirstate
291 # The purpose of _newfiles is so that we don't complain about
291 # The purpose of _newfiles is so that we don't complain about
292 # case collisions if someone were to call this object with the
292 # case collisions if someone were to call this object with the
293 # same filename twice.
293 # same filename twice.
294 self._newfiles = set()
294 self._newfiles = set()
295
295
296 def __call__(self, f):
296 def __call__(self, f):
297 if f in self._newfiles:
297 if f in self._newfiles:
298 return
298 return
299 fl = encoding.lower(f)
299 fl = encoding.lower(f)
300 if fl in self._loweredfiles and f not in self._dirstate:
300 if fl in self._loweredfiles and f not in self._dirstate:
301 msg = _('possible case-folding collision for %s') % f
301 msg = _('possible case-folding collision for %s') % f
302 if self._abort:
302 if self._abort:
303 raise error.Abort(msg)
303 raise error.Abort(msg)
304 self._ui.warn(_("warning: %s\n") % msg)
304 self._ui.warn(_("warning: %s\n") % msg)
305 self._loweredfiles.add(fl)
305 self._loweredfiles.add(fl)
306 self._newfiles.add(f)
306 self._newfiles.add(f)
307
307
308 def filteredhash(repo, maxrev):
308 def filteredhash(repo, maxrev):
309 """build hash of filtered revisions in the current repoview.
309 """build hash of filtered revisions in the current repoview.
310
310
311 Multiple caches perform up-to-date validation by checking that the
311 Multiple caches perform up-to-date validation by checking that the
312 tiprev and tipnode stored in the cache file match the current repository.
312 tiprev and tipnode stored in the cache file match the current repository.
313 However, this is not sufficient for validating repoviews because the set
313 However, this is not sufficient for validating repoviews because the set
314 of revisions in the view may change without the repository tiprev and
314 of revisions in the view may change without the repository tiprev and
315 tipnode changing.
315 tipnode changing.
316
316
317 This function hashes all the revs filtered from the view and returns
317 This function hashes all the revs filtered from the view and returns
318 that SHA-1 digest.
318 that SHA-1 digest.
319 """
319 """
320 cl = repo.changelog
320 cl = repo.changelog
321 if not cl.filteredrevs:
321 if not cl.filteredrevs:
322 return None
322 return None
323 key = None
323 key = None
324 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
324 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
325 if revs:
325 if revs:
326 s = hashlib.sha1()
326 s = hashlib.sha1()
327 for rev in revs:
327 for rev in revs:
328 s.update('%d;' % rev)
328 s.update('%d;' % rev)
329 key = s.digest()
329 key = s.digest()
330 return key
330 return key
331
331
332 def _deprecated(old, new, func):
333 msg = ('class at mercurial.scmutil.%s moved to mercurial.vfs.%s'
334 % (old, new))
335 def wrapper(*args, **kwargs):
336 util.nouideprecwarn(msg, '4.2')
337 return func(*args, **kwargs)
338 return wrapper
339
332 # compatibility layer since all 'vfs' code moved to 'mercurial.vfs'
340 # compatibility layer since all 'vfs' code moved to 'mercurial.vfs'
333 #
341 #
334 # This is hard to instal deprecation warning to this since we do not have
342 # This is hard to instal deprecation warning to this since we do not have
335 # access to a 'ui' object.
343 # access to a 'ui' object.
336 opener = vfs = vfsmod.vfs
344 opener = _deprecated('opener', 'vfs', vfsmod.vfs)
337 filteropener = filtervfs = vfsmod.filtervfs
345 vfs = _deprecated('vfs', 'vfs', vfsmod.vfs)
338 abstractvfs = vfsmod.abstractvfs
346 filteropener = _deprecated('filteropener', 'filtervfs', vfsmod.filtervfs)
339 readonlyvfs = vfsmod.readonlyvfs
347 filtervfs = _deprecated('filtervfs', 'filtervfs', vfsmod.filtervfs)
340 auditvfs = vfsmod.auditvfs
348 abstractvfs = _deprecated('abstractvfs', 'abstractvfs', vfsmod.abstractvfs)
349 readonlyvfs = _deprecated('readonlyvfs', 'readonlyvfs', vfsmod.readonlyvfs)
350 auditvfs = _deprecated('auditvfs', 'auditvfs', vfsmod.auditvfs)
341 checkambigatclosing = vfsmod.checkambigatclosing
351 checkambigatclosing = vfsmod.checkambigatclosing
342
352
343 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
353 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
344 '''yield every hg repository under path, always recursively.
354 '''yield every hg repository under path, always recursively.
345 The recurse flag will only control recursion into repo working dirs'''
355 The recurse flag will only control recursion into repo working dirs'''
346 def errhandler(err):
356 def errhandler(err):
347 if err.filename == path:
357 if err.filename == path:
348 raise err
358 raise err
349 samestat = getattr(os.path, 'samestat', None)
359 samestat = getattr(os.path, 'samestat', None)
350 if followsym and samestat is not None:
360 if followsym and samestat is not None:
351 def adddir(dirlst, dirname):
361 def adddir(dirlst, dirname):
352 match = False
362 match = False
353 dirstat = os.stat(dirname)
363 dirstat = os.stat(dirname)
354 for lstdirstat in dirlst:
364 for lstdirstat in dirlst:
355 if samestat(dirstat, lstdirstat):
365 if samestat(dirstat, lstdirstat):
356 match = True
366 match = True
357 break
367 break
358 if not match:
368 if not match:
359 dirlst.append(dirstat)
369 dirlst.append(dirstat)
360 return not match
370 return not match
361 else:
371 else:
362 followsym = False
372 followsym = False
363
373
364 if (seen_dirs is None) and followsym:
374 if (seen_dirs is None) and followsym:
365 seen_dirs = []
375 seen_dirs = []
366 adddir(seen_dirs, path)
376 adddir(seen_dirs, path)
367 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
377 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
368 dirs.sort()
378 dirs.sort()
369 if '.hg' in dirs:
379 if '.hg' in dirs:
370 yield root # found a repository
380 yield root # found a repository
371 qroot = os.path.join(root, '.hg', 'patches')
381 qroot = os.path.join(root, '.hg', 'patches')
372 if os.path.isdir(os.path.join(qroot, '.hg')):
382 if os.path.isdir(os.path.join(qroot, '.hg')):
373 yield qroot # we have a patch queue repo here
383 yield qroot # we have a patch queue repo here
374 if recurse:
384 if recurse:
375 # avoid recursing inside the .hg directory
385 # avoid recursing inside the .hg directory
376 dirs.remove('.hg')
386 dirs.remove('.hg')
377 else:
387 else:
378 dirs[:] = [] # don't descend further
388 dirs[:] = [] # don't descend further
379 elif followsym:
389 elif followsym:
380 newdirs = []
390 newdirs = []
381 for d in dirs:
391 for d in dirs:
382 fname = os.path.join(root, d)
392 fname = os.path.join(root, d)
383 if adddir(seen_dirs, fname):
393 if adddir(seen_dirs, fname):
384 if os.path.islink(fname):
394 if os.path.islink(fname):
385 for hgname in walkrepos(fname, True, seen_dirs):
395 for hgname in walkrepos(fname, True, seen_dirs):
386 yield hgname
396 yield hgname
387 else:
397 else:
388 newdirs.append(d)
398 newdirs.append(d)
389 dirs[:] = newdirs
399 dirs[:] = newdirs
390
400
391 def intrev(rev):
401 def intrev(rev):
392 """Return integer for a given revision that can be used in comparison or
402 """Return integer for a given revision that can be used in comparison or
393 arithmetic operation"""
403 arithmetic operation"""
394 if rev is None:
404 if rev is None:
395 return wdirrev
405 return wdirrev
396 return rev
406 return rev
397
407
398 def revsingle(repo, revspec, default='.'):
408 def revsingle(repo, revspec, default='.'):
399 if not revspec and revspec != 0:
409 if not revspec and revspec != 0:
400 return repo[default]
410 return repo[default]
401
411
402 l = revrange(repo, [revspec])
412 l = revrange(repo, [revspec])
403 if not l:
413 if not l:
404 raise error.Abort(_('empty revision set'))
414 raise error.Abort(_('empty revision set'))
405 return repo[l.last()]
415 return repo[l.last()]
406
416
407 def _pairspec(revspec):
417 def _pairspec(revspec):
408 tree = revsetlang.parse(revspec)
418 tree = revsetlang.parse(revspec)
409 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
419 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
410
420
411 def revpair(repo, revs):
421 def revpair(repo, revs):
412 if not revs:
422 if not revs:
413 return repo.dirstate.p1(), None
423 return repo.dirstate.p1(), None
414
424
415 l = revrange(repo, revs)
425 l = revrange(repo, revs)
416
426
417 if not l:
427 if not l:
418 first = second = None
428 first = second = None
419 elif l.isascending():
429 elif l.isascending():
420 first = l.min()
430 first = l.min()
421 second = l.max()
431 second = l.max()
422 elif l.isdescending():
432 elif l.isdescending():
423 first = l.max()
433 first = l.max()
424 second = l.min()
434 second = l.min()
425 else:
435 else:
426 first = l.first()
436 first = l.first()
427 second = l.last()
437 second = l.last()
428
438
429 if first is None:
439 if first is None:
430 raise error.Abort(_('empty revision range'))
440 raise error.Abort(_('empty revision range'))
431 if (first == second and len(revs) >= 2
441 if (first == second and len(revs) >= 2
432 and not all(revrange(repo, [r]) for r in revs)):
442 and not all(revrange(repo, [r]) for r in revs)):
433 raise error.Abort(_('empty revision on one side of range'))
443 raise error.Abort(_('empty revision on one side of range'))
434
444
435 # if top-level is range expression, the result must always be a pair
445 # if top-level is range expression, the result must always be a pair
436 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
446 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
437 return repo.lookup(first), None
447 return repo.lookup(first), None
438
448
439 return repo.lookup(first), repo.lookup(second)
449 return repo.lookup(first), repo.lookup(second)
440
450
441 def revrange(repo, specs):
451 def revrange(repo, specs):
442 """Execute 1 to many revsets and return the union.
452 """Execute 1 to many revsets and return the union.
443
453
444 This is the preferred mechanism for executing revsets using user-specified
454 This is the preferred mechanism for executing revsets using user-specified
445 config options, such as revset aliases.
455 config options, such as revset aliases.
446
456
447 The revsets specified by ``specs`` will be executed via a chained ``OR``
457 The revsets specified by ``specs`` will be executed via a chained ``OR``
448 expression. If ``specs`` is empty, an empty result is returned.
458 expression. If ``specs`` is empty, an empty result is returned.
449
459
450 ``specs`` can contain integers, in which case they are assumed to be
460 ``specs`` can contain integers, in which case they are assumed to be
451 revision numbers.
461 revision numbers.
452
462
453 It is assumed the revsets are already formatted. If you have arguments
463 It is assumed the revsets are already formatted. If you have arguments
454 that need to be expanded in the revset, call ``revsetlang.formatspec()``
464 that need to be expanded in the revset, call ``revsetlang.formatspec()``
455 and pass the result as an element of ``specs``.
465 and pass the result as an element of ``specs``.
456
466
457 Specifying a single revset is allowed.
467 Specifying a single revset is allowed.
458
468
459 Returns a ``revset.abstractsmartset`` which is a list-like interface over
469 Returns a ``revset.abstractsmartset`` which is a list-like interface over
460 integer revisions.
470 integer revisions.
461 """
471 """
462 allspecs = []
472 allspecs = []
463 for spec in specs:
473 for spec in specs:
464 if isinstance(spec, int):
474 if isinstance(spec, int):
465 spec = revsetlang.formatspec('rev(%d)', spec)
475 spec = revsetlang.formatspec('rev(%d)', spec)
466 allspecs.append(spec)
476 allspecs.append(spec)
467 return repo.anyrevs(allspecs, user=True)
477 return repo.anyrevs(allspecs, user=True)
468
478
469 def meaningfulparents(repo, ctx):
479 def meaningfulparents(repo, ctx):
470 """Return list of meaningful (or all if debug) parentrevs for rev.
480 """Return list of meaningful (or all if debug) parentrevs for rev.
471
481
472 For merges (two non-nullrev revisions) both parents are meaningful.
482 For merges (two non-nullrev revisions) both parents are meaningful.
473 Otherwise the first parent revision is considered meaningful if it
483 Otherwise the first parent revision is considered meaningful if it
474 is not the preceding revision.
484 is not the preceding revision.
475 """
485 """
476 parents = ctx.parents()
486 parents = ctx.parents()
477 if len(parents) > 1:
487 if len(parents) > 1:
478 return parents
488 return parents
479 if repo.ui.debugflag:
489 if repo.ui.debugflag:
480 return [parents[0], repo['null']]
490 return [parents[0], repo['null']]
481 if parents[0].rev() >= intrev(ctx.rev()) - 1:
491 if parents[0].rev() >= intrev(ctx.rev()) - 1:
482 return []
492 return []
483 return parents
493 return parents
484
494
485 def expandpats(pats):
495 def expandpats(pats):
486 '''Expand bare globs when running on windows.
496 '''Expand bare globs when running on windows.
487 On posix we assume it already has already been done by sh.'''
497 On posix we assume it already has already been done by sh.'''
488 if not util.expandglobs:
498 if not util.expandglobs:
489 return list(pats)
499 return list(pats)
490 ret = []
500 ret = []
491 for kindpat in pats:
501 for kindpat in pats:
492 kind, pat = matchmod._patsplit(kindpat, None)
502 kind, pat = matchmod._patsplit(kindpat, None)
493 if kind is None:
503 if kind is None:
494 try:
504 try:
495 globbed = glob.glob(pat)
505 globbed = glob.glob(pat)
496 except re.error:
506 except re.error:
497 globbed = [pat]
507 globbed = [pat]
498 if globbed:
508 if globbed:
499 ret.extend(globbed)
509 ret.extend(globbed)
500 continue
510 continue
501 ret.append(kindpat)
511 ret.append(kindpat)
502 return ret
512 return ret
503
513
504 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
514 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
505 badfn=None):
515 badfn=None):
506 '''Return a matcher and the patterns that were used.
516 '''Return a matcher and the patterns that were used.
507 The matcher will warn about bad matches, unless an alternate badfn callback
517 The matcher will warn about bad matches, unless an alternate badfn callback
508 is provided.'''
518 is provided.'''
509 if pats == ("",):
519 if pats == ("",):
510 pats = []
520 pats = []
511 if opts is None:
521 if opts is None:
512 opts = {}
522 opts = {}
513 if not globbed and default == 'relpath':
523 if not globbed and default == 'relpath':
514 pats = expandpats(pats or [])
524 pats = expandpats(pats or [])
515
525
516 def bad(f, msg):
526 def bad(f, msg):
517 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
527 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
518
528
519 if badfn is None:
529 if badfn is None:
520 badfn = bad
530 badfn = bad
521
531
522 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
532 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
523 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
533 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
524
534
525 if m.always():
535 if m.always():
526 pats = []
536 pats = []
527 return m, pats
537 return m, pats
528
538
529 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
539 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
530 badfn=None):
540 badfn=None):
531 '''Return a matcher that will warn about bad matches.'''
541 '''Return a matcher that will warn about bad matches.'''
532 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
542 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
533
543
534 def matchall(repo):
544 def matchall(repo):
535 '''Return a matcher that will efficiently match everything.'''
545 '''Return a matcher that will efficiently match everything.'''
536 return matchmod.always(repo.root, repo.getcwd())
546 return matchmod.always(repo.root, repo.getcwd())
537
547
538 def matchfiles(repo, files, badfn=None):
548 def matchfiles(repo, files, badfn=None):
539 '''Return a matcher that will efficiently match exactly these files.'''
549 '''Return a matcher that will efficiently match exactly these files.'''
540 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
550 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
541
551
542 def origpath(ui, repo, filepath):
552 def origpath(ui, repo, filepath):
543 '''customize where .orig files are created
553 '''customize where .orig files are created
544
554
545 Fetch user defined path from config file: [ui] origbackuppath = <path>
555 Fetch user defined path from config file: [ui] origbackuppath = <path>
546 Fall back to default (filepath) if not specified
556 Fall back to default (filepath) if not specified
547 '''
557 '''
548 origbackuppath = ui.config('ui', 'origbackuppath', None)
558 origbackuppath = ui.config('ui', 'origbackuppath', None)
549 if origbackuppath is None:
559 if origbackuppath is None:
550 return filepath + ".orig"
560 return filepath + ".orig"
551
561
552 filepathfromroot = os.path.relpath(filepath, start=repo.root)
562 filepathfromroot = os.path.relpath(filepath, start=repo.root)
553 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
563 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
554
564
555 origbackupdir = repo.vfs.dirname(fullorigpath)
565 origbackupdir = repo.vfs.dirname(fullorigpath)
556 if not repo.vfs.exists(origbackupdir):
566 if not repo.vfs.exists(origbackupdir):
557 ui.note(_('creating directory: %s\n') % origbackupdir)
567 ui.note(_('creating directory: %s\n') % origbackupdir)
558 util.makedirs(origbackupdir)
568 util.makedirs(origbackupdir)
559
569
560 return fullorigpath + ".orig"
570 return fullorigpath + ".orig"
561
571
562 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
572 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
563 if opts is None:
573 if opts is None:
564 opts = {}
574 opts = {}
565 m = matcher
575 m = matcher
566 if dry_run is None:
576 if dry_run is None:
567 dry_run = opts.get('dry_run')
577 dry_run = opts.get('dry_run')
568 if similarity is None:
578 if similarity is None:
569 similarity = float(opts.get('similarity') or 0)
579 similarity = float(opts.get('similarity') or 0)
570
580
571 ret = 0
581 ret = 0
572 join = lambda f: os.path.join(prefix, f)
582 join = lambda f: os.path.join(prefix, f)
573
583
574 wctx = repo[None]
584 wctx = repo[None]
575 for subpath in sorted(wctx.substate):
585 for subpath in sorted(wctx.substate):
576 submatch = matchmod.subdirmatcher(subpath, m)
586 submatch = matchmod.subdirmatcher(subpath, m)
577 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
587 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
578 sub = wctx.sub(subpath)
588 sub = wctx.sub(subpath)
579 try:
589 try:
580 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
590 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
581 ret = 1
591 ret = 1
582 except error.LookupError:
592 except error.LookupError:
583 repo.ui.status(_("skipping missing subrepository: %s\n")
593 repo.ui.status(_("skipping missing subrepository: %s\n")
584 % join(subpath))
594 % join(subpath))
585
595
586 rejected = []
596 rejected = []
587 def badfn(f, msg):
597 def badfn(f, msg):
588 if f in m.files():
598 if f in m.files():
589 m.bad(f, msg)
599 m.bad(f, msg)
590 rejected.append(f)
600 rejected.append(f)
591
601
592 badmatch = matchmod.badmatch(m, badfn)
602 badmatch = matchmod.badmatch(m, badfn)
593 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
603 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
594 badmatch)
604 badmatch)
595
605
596 unknownset = set(unknown + forgotten)
606 unknownset = set(unknown + forgotten)
597 toprint = unknownset.copy()
607 toprint = unknownset.copy()
598 toprint.update(deleted)
608 toprint.update(deleted)
599 for abs in sorted(toprint):
609 for abs in sorted(toprint):
600 if repo.ui.verbose or not m.exact(abs):
610 if repo.ui.verbose or not m.exact(abs):
601 if abs in unknownset:
611 if abs in unknownset:
602 status = _('adding %s\n') % m.uipath(abs)
612 status = _('adding %s\n') % m.uipath(abs)
603 else:
613 else:
604 status = _('removing %s\n') % m.uipath(abs)
614 status = _('removing %s\n') % m.uipath(abs)
605 repo.ui.status(status)
615 repo.ui.status(status)
606
616
607 renames = _findrenames(repo, m, added + unknown, removed + deleted,
617 renames = _findrenames(repo, m, added + unknown, removed + deleted,
608 similarity)
618 similarity)
609
619
610 if not dry_run:
620 if not dry_run:
611 _markchanges(repo, unknown + forgotten, deleted, renames)
621 _markchanges(repo, unknown + forgotten, deleted, renames)
612
622
613 for f in rejected:
623 for f in rejected:
614 if f in m.files():
624 if f in m.files():
615 return 1
625 return 1
616 return ret
626 return ret
617
627
618 def marktouched(repo, files, similarity=0.0):
628 def marktouched(repo, files, similarity=0.0):
619 '''Assert that files have somehow been operated upon. files are relative to
629 '''Assert that files have somehow been operated upon. files are relative to
620 the repo root.'''
630 the repo root.'''
621 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
631 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
622 rejected = []
632 rejected = []
623
633
624 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
634 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
625
635
626 if repo.ui.verbose:
636 if repo.ui.verbose:
627 unknownset = set(unknown + forgotten)
637 unknownset = set(unknown + forgotten)
628 toprint = unknownset.copy()
638 toprint = unknownset.copy()
629 toprint.update(deleted)
639 toprint.update(deleted)
630 for abs in sorted(toprint):
640 for abs in sorted(toprint):
631 if abs in unknownset:
641 if abs in unknownset:
632 status = _('adding %s\n') % abs
642 status = _('adding %s\n') % abs
633 else:
643 else:
634 status = _('removing %s\n') % abs
644 status = _('removing %s\n') % abs
635 repo.ui.status(status)
645 repo.ui.status(status)
636
646
637 renames = _findrenames(repo, m, added + unknown, removed + deleted,
647 renames = _findrenames(repo, m, added + unknown, removed + deleted,
638 similarity)
648 similarity)
639
649
640 _markchanges(repo, unknown + forgotten, deleted, renames)
650 _markchanges(repo, unknown + forgotten, deleted, renames)
641
651
642 for f in rejected:
652 for f in rejected:
643 if f in m.files():
653 if f in m.files():
644 return 1
654 return 1
645 return 0
655 return 0
646
656
647 def _interestingfiles(repo, matcher):
657 def _interestingfiles(repo, matcher):
648 '''Walk dirstate with matcher, looking for files that addremove would care
658 '''Walk dirstate with matcher, looking for files that addremove would care
649 about.
659 about.
650
660
651 This is different from dirstate.status because it doesn't care about
661 This is different from dirstate.status because it doesn't care about
652 whether files are modified or clean.'''
662 whether files are modified or clean.'''
653 added, unknown, deleted, removed, forgotten = [], [], [], [], []
663 added, unknown, deleted, removed, forgotten = [], [], [], [], []
654 audit_path = pathutil.pathauditor(repo.root)
664 audit_path = pathutil.pathauditor(repo.root)
655
665
656 ctx = repo[None]
666 ctx = repo[None]
657 dirstate = repo.dirstate
667 dirstate = repo.dirstate
658 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
668 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
659 full=False)
669 full=False)
660 for abs, st in walkresults.iteritems():
670 for abs, st in walkresults.iteritems():
661 dstate = dirstate[abs]
671 dstate = dirstate[abs]
662 if dstate == '?' and audit_path.check(abs):
672 if dstate == '?' and audit_path.check(abs):
663 unknown.append(abs)
673 unknown.append(abs)
664 elif dstate != 'r' and not st:
674 elif dstate != 'r' and not st:
665 deleted.append(abs)
675 deleted.append(abs)
666 elif dstate == 'r' and st:
676 elif dstate == 'r' and st:
667 forgotten.append(abs)
677 forgotten.append(abs)
668 # for finding renames
678 # for finding renames
669 elif dstate == 'r' and not st:
679 elif dstate == 'r' and not st:
670 removed.append(abs)
680 removed.append(abs)
671 elif dstate == 'a':
681 elif dstate == 'a':
672 added.append(abs)
682 added.append(abs)
673
683
674 return added, unknown, deleted, removed, forgotten
684 return added, unknown, deleted, removed, forgotten
675
685
676 def _findrenames(repo, matcher, added, removed, similarity):
686 def _findrenames(repo, matcher, added, removed, similarity):
677 '''Find renames from removed files to added ones.'''
687 '''Find renames from removed files to added ones.'''
678 renames = {}
688 renames = {}
679 if similarity > 0:
689 if similarity > 0:
680 for old, new, score in similar.findrenames(repo, added, removed,
690 for old, new, score in similar.findrenames(repo, added, removed,
681 similarity):
691 similarity):
682 if (repo.ui.verbose or not matcher.exact(old)
692 if (repo.ui.verbose or not matcher.exact(old)
683 or not matcher.exact(new)):
693 or not matcher.exact(new)):
684 repo.ui.status(_('recording removal of %s as rename to %s '
694 repo.ui.status(_('recording removal of %s as rename to %s '
685 '(%d%% similar)\n') %
695 '(%d%% similar)\n') %
686 (matcher.rel(old), matcher.rel(new),
696 (matcher.rel(old), matcher.rel(new),
687 score * 100))
697 score * 100))
688 renames[new] = old
698 renames[new] = old
689 return renames
699 return renames
690
700
691 def _markchanges(repo, unknown, deleted, renames):
701 def _markchanges(repo, unknown, deleted, renames):
692 '''Marks the files in unknown as added, the files in deleted as removed,
702 '''Marks the files in unknown as added, the files in deleted as removed,
693 and the files in renames as copied.'''
703 and the files in renames as copied.'''
694 wctx = repo[None]
704 wctx = repo[None]
695 with repo.wlock():
705 with repo.wlock():
696 wctx.forget(deleted)
706 wctx.forget(deleted)
697 wctx.add(unknown)
707 wctx.add(unknown)
698 for new, old in renames.iteritems():
708 for new, old in renames.iteritems():
699 wctx.copy(old, new)
709 wctx.copy(old, new)
700
710
701 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
711 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
702 """Update the dirstate to reflect the intent of copying src to dst. For
712 """Update the dirstate to reflect the intent of copying src to dst. For
703 different reasons it might not end with dst being marked as copied from src.
713 different reasons it might not end with dst being marked as copied from src.
704 """
714 """
705 origsrc = repo.dirstate.copied(src) or src
715 origsrc = repo.dirstate.copied(src) or src
706 if dst == origsrc: # copying back a copy?
716 if dst == origsrc: # copying back a copy?
707 if repo.dirstate[dst] not in 'mn' and not dryrun:
717 if repo.dirstate[dst] not in 'mn' and not dryrun:
708 repo.dirstate.normallookup(dst)
718 repo.dirstate.normallookup(dst)
709 else:
719 else:
710 if repo.dirstate[origsrc] == 'a' and origsrc == src:
720 if repo.dirstate[origsrc] == 'a' and origsrc == src:
711 if not ui.quiet:
721 if not ui.quiet:
712 ui.warn(_("%s has not been committed yet, so no copy "
722 ui.warn(_("%s has not been committed yet, so no copy "
713 "data will be stored for %s.\n")
723 "data will be stored for %s.\n")
714 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
724 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
715 if repo.dirstate[dst] in '?r' and not dryrun:
725 if repo.dirstate[dst] in '?r' and not dryrun:
716 wctx.add([dst])
726 wctx.add([dst])
717 elif not dryrun:
727 elif not dryrun:
718 wctx.copy(origsrc, dst)
728 wctx.copy(origsrc, dst)
719
729
720 def readrequires(opener, supported):
730 def readrequires(opener, supported):
721 '''Reads and parses .hg/requires and checks if all entries found
731 '''Reads and parses .hg/requires and checks if all entries found
722 are in the list of supported features.'''
732 are in the list of supported features.'''
723 requirements = set(opener.read("requires").splitlines())
733 requirements = set(opener.read("requires").splitlines())
724 missings = []
734 missings = []
725 for r in requirements:
735 for r in requirements:
726 if r not in supported:
736 if r not in supported:
727 if not r or not r[0].isalnum():
737 if not r or not r[0].isalnum():
728 raise error.RequirementError(_(".hg/requires file is corrupt"))
738 raise error.RequirementError(_(".hg/requires file is corrupt"))
729 missings.append(r)
739 missings.append(r)
730 missings.sort()
740 missings.sort()
731 if missings:
741 if missings:
732 raise error.RequirementError(
742 raise error.RequirementError(
733 _("repository requires features unknown to this Mercurial: %s")
743 _("repository requires features unknown to this Mercurial: %s")
734 % " ".join(missings),
744 % " ".join(missings),
735 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
745 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
736 " for more information"))
746 " for more information"))
737 return requirements
747 return requirements
738
748
739 def writerequires(opener, requirements):
749 def writerequires(opener, requirements):
740 with opener('requires', 'w') as fp:
750 with opener('requires', 'w') as fp:
741 for r in sorted(requirements):
751 for r in sorted(requirements):
742 fp.write("%s\n" % r)
752 fp.write("%s\n" % r)
743
753
744 class filecachesubentry(object):
754 class filecachesubentry(object):
745 def __init__(self, path, stat):
755 def __init__(self, path, stat):
746 self.path = path
756 self.path = path
747 self.cachestat = None
757 self.cachestat = None
748 self._cacheable = None
758 self._cacheable = None
749
759
750 if stat:
760 if stat:
751 self.cachestat = filecachesubentry.stat(self.path)
761 self.cachestat = filecachesubentry.stat(self.path)
752
762
753 if self.cachestat:
763 if self.cachestat:
754 self._cacheable = self.cachestat.cacheable()
764 self._cacheable = self.cachestat.cacheable()
755 else:
765 else:
756 # None means we don't know yet
766 # None means we don't know yet
757 self._cacheable = None
767 self._cacheable = None
758
768
759 def refresh(self):
769 def refresh(self):
760 if self.cacheable():
770 if self.cacheable():
761 self.cachestat = filecachesubentry.stat(self.path)
771 self.cachestat = filecachesubentry.stat(self.path)
762
772
763 def cacheable(self):
773 def cacheable(self):
764 if self._cacheable is not None:
774 if self._cacheable is not None:
765 return self._cacheable
775 return self._cacheable
766
776
767 # we don't know yet, assume it is for now
777 # we don't know yet, assume it is for now
768 return True
778 return True
769
779
770 def changed(self):
780 def changed(self):
771 # no point in going further if we can't cache it
781 # no point in going further if we can't cache it
772 if not self.cacheable():
782 if not self.cacheable():
773 return True
783 return True
774
784
775 newstat = filecachesubentry.stat(self.path)
785 newstat = filecachesubentry.stat(self.path)
776
786
777 # we may not know if it's cacheable yet, check again now
787 # we may not know if it's cacheable yet, check again now
778 if newstat and self._cacheable is None:
788 if newstat and self._cacheable is None:
779 self._cacheable = newstat.cacheable()
789 self._cacheable = newstat.cacheable()
780
790
781 # check again
791 # check again
782 if not self._cacheable:
792 if not self._cacheable:
783 return True
793 return True
784
794
785 if self.cachestat != newstat:
795 if self.cachestat != newstat:
786 self.cachestat = newstat
796 self.cachestat = newstat
787 return True
797 return True
788 else:
798 else:
789 return False
799 return False
790
800
791 @staticmethod
801 @staticmethod
792 def stat(path):
802 def stat(path):
793 try:
803 try:
794 return util.cachestat(path)
804 return util.cachestat(path)
795 except OSError as e:
805 except OSError as e:
796 if e.errno != errno.ENOENT:
806 if e.errno != errno.ENOENT:
797 raise
807 raise
798
808
799 class filecacheentry(object):
809 class filecacheentry(object):
800 def __init__(self, paths, stat=True):
810 def __init__(self, paths, stat=True):
801 self._entries = []
811 self._entries = []
802 for path in paths:
812 for path in paths:
803 self._entries.append(filecachesubentry(path, stat))
813 self._entries.append(filecachesubentry(path, stat))
804
814
805 def changed(self):
815 def changed(self):
806 '''true if any entry has changed'''
816 '''true if any entry has changed'''
807 for entry in self._entries:
817 for entry in self._entries:
808 if entry.changed():
818 if entry.changed():
809 return True
819 return True
810 return False
820 return False
811
821
812 def refresh(self):
822 def refresh(self):
813 for entry in self._entries:
823 for entry in self._entries:
814 entry.refresh()
824 entry.refresh()
815
825
816 class filecache(object):
826 class filecache(object):
817 '''A property like decorator that tracks files under .hg/ for updates.
827 '''A property like decorator that tracks files under .hg/ for updates.
818
828
819 Records stat info when called in _filecache.
829 Records stat info when called in _filecache.
820
830
821 On subsequent calls, compares old stat info with new info, and recreates the
831 On subsequent calls, compares old stat info with new info, and recreates the
822 object when any of the files changes, updating the new stat info in
832 object when any of the files changes, updating the new stat info in
823 _filecache.
833 _filecache.
824
834
825 Mercurial either atomic renames or appends for files under .hg,
835 Mercurial either atomic renames or appends for files under .hg,
826 so to ensure the cache is reliable we need the filesystem to be able
836 so to ensure the cache is reliable we need the filesystem to be able
827 to tell us if a file has been replaced. If it can't, we fallback to
837 to tell us if a file has been replaced. If it can't, we fallback to
828 recreating the object on every call (essentially the same behavior as
838 recreating the object on every call (essentially the same behavior as
829 propertycache).
839 propertycache).
830
840
831 '''
841 '''
832 def __init__(self, *paths):
842 def __init__(self, *paths):
833 self.paths = paths
843 self.paths = paths
834
844
835 def join(self, obj, fname):
845 def join(self, obj, fname):
836 """Used to compute the runtime path of a cached file.
846 """Used to compute the runtime path of a cached file.
837
847
838 Users should subclass filecache and provide their own version of this
848 Users should subclass filecache and provide their own version of this
839 function to call the appropriate join function on 'obj' (an instance
849 function to call the appropriate join function on 'obj' (an instance
840 of the class that its member function was decorated).
850 of the class that its member function was decorated).
841 """
851 """
842 raise NotImplementedError
852 raise NotImplementedError
843
853
844 def __call__(self, func):
854 def __call__(self, func):
845 self.func = func
855 self.func = func
846 self.name = func.__name__.encode('ascii')
856 self.name = func.__name__.encode('ascii')
847 return self
857 return self
848
858
849 def __get__(self, obj, type=None):
859 def __get__(self, obj, type=None):
850 # if accessed on the class, return the descriptor itself.
860 # if accessed on the class, return the descriptor itself.
851 if obj is None:
861 if obj is None:
852 return self
862 return self
853 # do we need to check if the file changed?
863 # do we need to check if the file changed?
854 if self.name in obj.__dict__:
864 if self.name in obj.__dict__:
855 assert self.name in obj._filecache, self.name
865 assert self.name in obj._filecache, self.name
856 return obj.__dict__[self.name]
866 return obj.__dict__[self.name]
857
867
858 entry = obj._filecache.get(self.name)
868 entry = obj._filecache.get(self.name)
859
869
860 if entry:
870 if entry:
861 if entry.changed():
871 if entry.changed():
862 entry.obj = self.func(obj)
872 entry.obj = self.func(obj)
863 else:
873 else:
864 paths = [self.join(obj, path) for path in self.paths]
874 paths = [self.join(obj, path) for path in self.paths]
865
875
866 # We stat -before- creating the object so our cache doesn't lie if
876 # We stat -before- creating the object so our cache doesn't lie if
867 # a writer modified between the time we read and stat
877 # a writer modified between the time we read and stat
868 entry = filecacheentry(paths, True)
878 entry = filecacheentry(paths, True)
869 entry.obj = self.func(obj)
879 entry.obj = self.func(obj)
870
880
871 obj._filecache[self.name] = entry
881 obj._filecache[self.name] = entry
872
882
873 obj.__dict__[self.name] = entry.obj
883 obj.__dict__[self.name] = entry.obj
874 return entry.obj
884 return entry.obj
875
885
876 def __set__(self, obj, value):
886 def __set__(self, obj, value):
877 if self.name not in obj._filecache:
887 if self.name not in obj._filecache:
878 # we add an entry for the missing value because X in __dict__
888 # we add an entry for the missing value because X in __dict__
879 # implies X in _filecache
889 # implies X in _filecache
880 paths = [self.join(obj, path) for path in self.paths]
890 paths = [self.join(obj, path) for path in self.paths]
881 ce = filecacheentry(paths, False)
891 ce = filecacheentry(paths, False)
882 obj._filecache[self.name] = ce
892 obj._filecache[self.name] = ce
883 else:
893 else:
884 ce = obj._filecache[self.name]
894 ce = obj._filecache[self.name]
885
895
886 ce.obj = value # update cached copy
896 ce.obj = value # update cached copy
887 obj.__dict__[self.name] = value # update copy returned by obj.x
897 obj.__dict__[self.name] = value # update copy returned by obj.x
888
898
889 def __delete__(self, obj):
899 def __delete__(self, obj):
890 try:
900 try:
891 del obj.__dict__[self.name]
901 del obj.__dict__[self.name]
892 except KeyError:
902 except KeyError:
893 raise AttributeError(self.name)
903 raise AttributeError(self.name)
894
904
895 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
905 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
896 if lock is None:
906 if lock is None:
897 raise error.LockInheritanceContractViolation(
907 raise error.LockInheritanceContractViolation(
898 'lock can only be inherited while held')
908 'lock can only be inherited while held')
899 if environ is None:
909 if environ is None:
900 environ = {}
910 environ = {}
901 with lock.inherit() as locker:
911 with lock.inherit() as locker:
902 environ[envvar] = locker
912 environ[envvar] = locker
903 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
913 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
904
914
905 def wlocksub(repo, cmd, *args, **kwargs):
915 def wlocksub(repo, cmd, *args, **kwargs):
906 """run cmd as a subprocess that allows inheriting repo's wlock
916 """run cmd as a subprocess that allows inheriting repo's wlock
907
917
908 This can only be called while the wlock is held. This takes all the
918 This can only be called while the wlock is held. This takes all the
909 arguments that ui.system does, and returns the exit code of the
919 arguments that ui.system does, and returns the exit code of the
910 subprocess."""
920 subprocess."""
911 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
921 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
912 **kwargs)
922 **kwargs)
913
923
914 def gdinitconfig(ui):
924 def gdinitconfig(ui):
915 """helper function to know if a repo should be created as general delta
925 """helper function to know if a repo should be created as general delta
916 """
926 """
917 # experimental config: format.generaldelta
927 # experimental config: format.generaldelta
918 return (ui.configbool('format', 'generaldelta', False)
928 return (ui.configbool('format', 'generaldelta', False)
919 or ui.configbool('format', 'usegeneraldelta', True))
929 or ui.configbool('format', 'usegeneraldelta', True))
920
930
921 def gddeltaconfig(ui):
931 def gddeltaconfig(ui):
922 """helper function to know if incoming delta should be optimised
932 """helper function to know if incoming delta should be optimised
923 """
933 """
924 # experimental config: format.generaldelta
934 # experimental config: format.generaldelta
925 return ui.configbool('format', 'generaldelta', False)
935 return ui.configbool('format', 'generaldelta', False)
926
936
927 class simplekeyvaluefile(object):
937 class simplekeyvaluefile(object):
928 """A simple file with key=value lines
938 """A simple file with key=value lines
929
939
930 Keys must be alphanumerics and start with a letter, values must not
940 Keys must be alphanumerics and start with a letter, values must not
931 contain '\n' characters"""
941 contain '\n' characters"""
932
942
933 def __init__(self, vfs, path, keys=None):
943 def __init__(self, vfs, path, keys=None):
934 self.vfs = vfs
944 self.vfs = vfs
935 self.path = path
945 self.path = path
936
946
937 def read(self):
947 def read(self):
938 lines = self.vfs.readlines(self.path)
948 lines = self.vfs.readlines(self.path)
939 try:
949 try:
940 d = dict(line[:-1].split('=', 1) for line in lines if line)
950 d = dict(line[:-1].split('=', 1) for line in lines if line)
941 except ValueError as e:
951 except ValueError as e:
942 raise error.CorruptedState(str(e))
952 raise error.CorruptedState(str(e))
943 return d
953 return d
944
954
945 def write(self, data):
955 def write(self, data):
946 """Write key=>value mapping to a file
956 """Write key=>value mapping to a file
947 data is a dict. Keys must be alphanumerical and start with a letter.
957 data is a dict. Keys must be alphanumerical and start with a letter.
948 Values must not contain newline characters."""
958 Values must not contain newline characters."""
949 lines = []
959 lines = []
950 for k, v in data.items():
960 for k, v in data.items():
951 if not k[0].isalpha():
961 if not k[0].isalpha():
952 e = "keys must start with a letter in a key-value file"
962 e = "keys must start with a letter in a key-value file"
953 raise error.ProgrammingError(e)
963 raise error.ProgrammingError(e)
954 if not k.isalnum():
964 if not k.isalnum():
955 e = "invalid key name in a simple key-value file"
965 e = "invalid key name in a simple key-value file"
956 raise error.ProgrammingError(e)
966 raise error.ProgrammingError(e)
957 if '\n' in v:
967 if '\n' in v:
958 e = "invalid value in a simple key-value file"
968 e = "invalid value in a simple key-value file"
959 raise error.ProgrammingError(e)
969 raise error.ProgrammingError(e)
960 lines.append("%s=%s\n" % (k, v))
970 lines.append("%s=%s\n" % (k, v))
961 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
971 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
962 fp.write(''.join(lines))
972 fp.write(''.join(lines))
963
General Comments 0
You need to be logged in to leave comments. Login now