##// END OF EJS Templates
scmutil: add a simple key-value file helper...
Kostia Balytskyi -
r31553:56acc425 default
parent child Browse files
Show More
@@ -0,0 +1,72 b''
1 from __future__ import absolute_import
2
3 import unittest
4 import silenttestrunner
5
6 from mercurial import (
7 error,
8 scmutil,
9 )
10
11 class mockfile(object):
12 def __init__(self, name, fs):
13 self.name = name
14 self.fs = fs
15
16 def __enter__(self):
17 return self
18
19 def __exit__(self, *args, **kwargs):
20 pass
21
22 def write(self, text):
23 self.fs.contents[self.name] = text
24
25 def read(self):
26 return self.fs.contents[self.name]
27
28 class mockvfs(object):
29 def __init__(self):
30 self.contents = {}
31
32 def read(self, path):
33 return mockfile(path, self).read()
34
35 def readlines(self, path):
36 return mockfile(path, self).read().split('\n')
37
38 def __call__(self, path, mode, atomictemp):
39 return mockfile(path, self)
40
41 class testsimplekeyvaluefile(unittest.TestCase):
42 def setUp(self):
43 self.vfs = mockvfs()
44
45 def testbasicwriting(self):
46 d = {'key1': 'value1', 'Key2': 'value2'}
47 scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(d)
48 self.assertEqual(sorted(self.vfs.read('kvfile').split('\n')),
49 ['', 'Key2=value2', 'key1=value1'])
50
51 def testinvalidkeys(self):
52 d = {'0key1': 'value1', 'Key2': 'value2'}
53 with self.assertRaisesRegexp(error.ProgrammingError,
54 "keys must start with a letter.*"):
55 scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(d)
56 d = {'key1@': 'value1', 'Key2': 'value2'}
57 with self.assertRaisesRegexp(error.ProgrammingError, "invalid key.*"):
58 scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(d)
59
60 def testinvalidvalues(self):
61 d = {'key1': 'value1', 'Key2': 'value2\n'}
62 with self.assertRaisesRegexp(error.ProgrammingError, "invalid val.*"):
63 scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(d)
64
65 def testcorruptedfile(self):
66 self.vfs.contents['badfile'] = 'ababagalamaga\n'
67 with self.assertRaisesRegexp(error.CorruptedState,
68 "dictionary.*element.*"):
69 scmutil.simplekeyvaluefile(self.vfs, 'badfile').read()
70
71 if __name__ == "__main__":
72 silenttestrunner.main(__name__)
@@ -1,967 +1,1005 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16
16
17 from .i18n import _
17 from .i18n import _
18 from .node import wdirrev
18 from .node import wdirrev
19 from . import (
19 from . import (
20 encoding,
20 encoding,
21 error,
21 error,
22 match as matchmod,
22 match as matchmod,
23 osutil,
23 osutil,
24 pathutil,
24 pathutil,
25 phases,
25 phases,
26 pycompat,
26 pycompat,
27 revsetlang,
27 revsetlang,
28 similar,
28 similar,
29 util,
29 util,
30 vfs as vfsmod,
30 vfs as vfsmod,
31 )
31 )
32
32
33 if pycompat.osname == 'nt':
33 if pycompat.osname == 'nt':
34 from . import scmwindows as scmplatform
34 from . import scmwindows as scmplatform
35 else:
35 else:
36 from . import scmposix as scmplatform
36 from . import scmposix as scmplatform
37
37
38 systemrcpath = scmplatform.systemrcpath
38 systemrcpath = scmplatform.systemrcpath
39 userrcpath = scmplatform.userrcpath
39 userrcpath = scmplatform.userrcpath
40 termsize = scmplatform.termsize
40 termsize = scmplatform.termsize
41
41
42 class status(tuple):
42 class status(tuple):
43 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
43 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
44 and 'ignored' properties are only relevant to the working copy.
44 and 'ignored' properties are only relevant to the working copy.
45 '''
45 '''
46
46
47 __slots__ = ()
47 __slots__ = ()
48
48
49 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
49 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
50 clean):
50 clean):
51 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
51 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
52 ignored, clean))
52 ignored, clean))
53
53
54 @property
54 @property
55 def modified(self):
55 def modified(self):
56 '''files that have been modified'''
56 '''files that have been modified'''
57 return self[0]
57 return self[0]
58
58
59 @property
59 @property
60 def added(self):
60 def added(self):
61 '''files that have been added'''
61 '''files that have been added'''
62 return self[1]
62 return self[1]
63
63
64 @property
64 @property
65 def removed(self):
65 def removed(self):
66 '''files that have been removed'''
66 '''files that have been removed'''
67 return self[2]
67 return self[2]
68
68
69 @property
69 @property
70 def deleted(self):
70 def deleted(self):
71 '''files that are in the dirstate, but have been deleted from the
71 '''files that are in the dirstate, but have been deleted from the
72 working copy (aka "missing")
72 working copy (aka "missing")
73 '''
73 '''
74 return self[3]
74 return self[3]
75
75
76 @property
76 @property
77 def unknown(self):
77 def unknown(self):
78 '''files not in the dirstate that are not ignored'''
78 '''files not in the dirstate that are not ignored'''
79 return self[4]
79 return self[4]
80
80
81 @property
81 @property
82 def ignored(self):
82 def ignored(self):
83 '''files not in the dirstate that are ignored (by _dirignore())'''
83 '''files not in the dirstate that are ignored (by _dirignore())'''
84 return self[5]
84 return self[5]
85
85
86 @property
86 @property
87 def clean(self):
87 def clean(self):
88 '''files that have not been modified'''
88 '''files that have not been modified'''
89 return self[6]
89 return self[6]
90
90
91 def __repr__(self, *args, **kwargs):
91 def __repr__(self, *args, **kwargs):
92 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
92 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
93 'unknown=%r, ignored=%r, clean=%r>') % self)
93 'unknown=%r, ignored=%r, clean=%r>') % self)
94
94
95 def itersubrepos(ctx1, ctx2):
95 def itersubrepos(ctx1, ctx2):
96 """find subrepos in ctx1 or ctx2"""
96 """find subrepos in ctx1 or ctx2"""
97 # Create a (subpath, ctx) mapping where we prefer subpaths from
97 # Create a (subpath, ctx) mapping where we prefer subpaths from
98 # ctx1. The subpaths from ctx2 are important when the .hgsub file
98 # ctx1. The subpaths from ctx2 are important when the .hgsub file
99 # has been modified (in ctx2) but not yet committed (in ctx1).
99 # has been modified (in ctx2) but not yet committed (in ctx1).
100 subpaths = dict.fromkeys(ctx2.substate, ctx2)
100 subpaths = dict.fromkeys(ctx2.substate, ctx2)
101 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
101 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
102
102
103 missing = set()
103 missing = set()
104
104
105 for subpath in ctx2.substate:
105 for subpath in ctx2.substate:
106 if subpath not in ctx1.substate:
106 if subpath not in ctx1.substate:
107 del subpaths[subpath]
107 del subpaths[subpath]
108 missing.add(subpath)
108 missing.add(subpath)
109
109
110 for subpath, ctx in sorted(subpaths.iteritems()):
110 for subpath, ctx in sorted(subpaths.iteritems()):
111 yield subpath, ctx.sub(subpath)
111 yield subpath, ctx.sub(subpath)
112
112
113 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
113 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
114 # status and diff will have an accurate result when it does
114 # status and diff will have an accurate result when it does
115 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
115 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
116 # against itself.
116 # against itself.
117 for subpath in missing:
117 for subpath in missing:
118 yield subpath, ctx2.nullsub(subpath, ctx1)
118 yield subpath, ctx2.nullsub(subpath, ctx1)
119
119
120 def nochangesfound(ui, repo, excluded=None):
120 def nochangesfound(ui, repo, excluded=None):
121 '''Report no changes for push/pull, excluded is None or a list of
121 '''Report no changes for push/pull, excluded is None or a list of
122 nodes excluded from the push/pull.
122 nodes excluded from the push/pull.
123 '''
123 '''
124 secretlist = []
124 secretlist = []
125 if excluded:
125 if excluded:
126 for n in excluded:
126 for n in excluded:
127 if n not in repo:
127 if n not in repo:
128 # discovery should not have included the filtered revision,
128 # discovery should not have included the filtered revision,
129 # we have to explicitly exclude it until discovery is cleanup.
129 # we have to explicitly exclude it until discovery is cleanup.
130 continue
130 continue
131 ctx = repo[n]
131 ctx = repo[n]
132 if ctx.phase() >= phases.secret and not ctx.extinct():
132 if ctx.phase() >= phases.secret and not ctx.extinct():
133 secretlist.append(n)
133 secretlist.append(n)
134
134
135 if secretlist:
135 if secretlist:
136 ui.status(_("no changes found (ignored %d secret changesets)\n")
136 ui.status(_("no changes found (ignored %d secret changesets)\n")
137 % len(secretlist))
137 % len(secretlist))
138 else:
138 else:
139 ui.status(_("no changes found\n"))
139 ui.status(_("no changes found\n"))
140
140
141 def callcatch(ui, func):
141 def callcatch(ui, func):
142 """call func() with global exception handling
142 """call func() with global exception handling
143
143
144 return func() if no exception happens. otherwise do some error handling
144 return func() if no exception happens. otherwise do some error handling
145 and return an exit code accordingly. does not handle all exceptions.
145 and return an exit code accordingly. does not handle all exceptions.
146 """
146 """
147 try:
147 try:
148 return func()
148 return func()
149 # Global exception handling, alphabetically
149 # Global exception handling, alphabetically
150 # Mercurial-specific first, followed by built-in and library exceptions
150 # Mercurial-specific first, followed by built-in and library exceptions
151 except error.LockHeld as inst:
151 except error.LockHeld as inst:
152 if inst.errno == errno.ETIMEDOUT:
152 if inst.errno == errno.ETIMEDOUT:
153 reason = _('timed out waiting for lock held by %s') % inst.locker
153 reason = _('timed out waiting for lock held by %s') % inst.locker
154 else:
154 else:
155 reason = _('lock held by %s') % inst.locker
155 reason = _('lock held by %s') % inst.locker
156 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
156 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
157 except error.LockUnavailable as inst:
157 except error.LockUnavailable as inst:
158 ui.warn(_("abort: could not lock %s: %s\n") %
158 ui.warn(_("abort: could not lock %s: %s\n") %
159 (inst.desc or inst.filename, inst.strerror))
159 (inst.desc or inst.filename, inst.strerror))
160 except error.OutOfBandError as inst:
160 except error.OutOfBandError as inst:
161 if inst.args:
161 if inst.args:
162 msg = _("abort: remote error:\n")
162 msg = _("abort: remote error:\n")
163 else:
163 else:
164 msg = _("abort: remote error\n")
164 msg = _("abort: remote error\n")
165 ui.warn(msg)
165 ui.warn(msg)
166 if inst.args:
166 if inst.args:
167 ui.warn(''.join(inst.args))
167 ui.warn(''.join(inst.args))
168 if inst.hint:
168 if inst.hint:
169 ui.warn('(%s)\n' % inst.hint)
169 ui.warn('(%s)\n' % inst.hint)
170 except error.RepoError as inst:
170 except error.RepoError as inst:
171 ui.warn(_("abort: %s!\n") % inst)
171 ui.warn(_("abort: %s!\n") % inst)
172 if inst.hint:
172 if inst.hint:
173 ui.warn(_("(%s)\n") % inst.hint)
173 ui.warn(_("(%s)\n") % inst.hint)
174 except error.ResponseError as inst:
174 except error.ResponseError as inst:
175 ui.warn(_("abort: %s") % inst.args[0])
175 ui.warn(_("abort: %s") % inst.args[0])
176 if not isinstance(inst.args[1], basestring):
176 if not isinstance(inst.args[1], basestring):
177 ui.warn(" %r\n" % (inst.args[1],))
177 ui.warn(" %r\n" % (inst.args[1],))
178 elif not inst.args[1]:
178 elif not inst.args[1]:
179 ui.warn(_(" empty string\n"))
179 ui.warn(_(" empty string\n"))
180 else:
180 else:
181 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
181 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
182 except error.CensoredNodeError as inst:
182 except error.CensoredNodeError as inst:
183 ui.warn(_("abort: file censored %s!\n") % inst)
183 ui.warn(_("abort: file censored %s!\n") % inst)
184 except error.RevlogError as inst:
184 except error.RevlogError as inst:
185 ui.warn(_("abort: %s!\n") % inst)
185 ui.warn(_("abort: %s!\n") % inst)
186 except error.SignalInterrupt:
186 except error.SignalInterrupt:
187 ui.warn(_("killed!\n"))
187 ui.warn(_("killed!\n"))
188 except error.InterventionRequired as inst:
188 except error.InterventionRequired as inst:
189 ui.warn("%s\n" % inst)
189 ui.warn("%s\n" % inst)
190 if inst.hint:
190 if inst.hint:
191 ui.warn(_("(%s)\n") % inst.hint)
191 ui.warn(_("(%s)\n") % inst.hint)
192 return 1
192 return 1
193 except error.Abort as inst:
193 except error.Abort as inst:
194 ui.warn(_("abort: %s\n") % inst)
194 ui.warn(_("abort: %s\n") % inst)
195 if inst.hint:
195 if inst.hint:
196 ui.warn(_("(%s)\n") % inst.hint)
196 ui.warn(_("(%s)\n") % inst.hint)
197 except ImportError as inst:
197 except ImportError as inst:
198 ui.warn(_("abort: %s!\n") % inst)
198 ui.warn(_("abort: %s!\n") % inst)
199 m = str(inst).split()[-1]
199 m = str(inst).split()[-1]
200 if m in "mpatch bdiff".split():
200 if m in "mpatch bdiff".split():
201 ui.warn(_("(did you forget to compile extensions?)\n"))
201 ui.warn(_("(did you forget to compile extensions?)\n"))
202 elif m in "zlib".split():
202 elif m in "zlib".split():
203 ui.warn(_("(is your Python install correct?)\n"))
203 ui.warn(_("(is your Python install correct?)\n"))
204 except IOError as inst:
204 except IOError as inst:
205 if util.safehasattr(inst, "code"):
205 if util.safehasattr(inst, "code"):
206 ui.warn(_("abort: %s\n") % inst)
206 ui.warn(_("abort: %s\n") % inst)
207 elif util.safehasattr(inst, "reason"):
207 elif util.safehasattr(inst, "reason"):
208 try: # usually it is in the form (errno, strerror)
208 try: # usually it is in the form (errno, strerror)
209 reason = inst.reason.args[1]
209 reason = inst.reason.args[1]
210 except (AttributeError, IndexError):
210 except (AttributeError, IndexError):
211 # it might be anything, for example a string
211 # it might be anything, for example a string
212 reason = inst.reason
212 reason = inst.reason
213 if isinstance(reason, unicode):
213 if isinstance(reason, unicode):
214 # SSLError of Python 2.7.9 contains a unicode
214 # SSLError of Python 2.7.9 contains a unicode
215 reason = reason.encode(encoding.encoding, 'replace')
215 reason = reason.encode(encoding.encoding, 'replace')
216 ui.warn(_("abort: error: %s\n") % reason)
216 ui.warn(_("abort: error: %s\n") % reason)
217 elif (util.safehasattr(inst, "args")
217 elif (util.safehasattr(inst, "args")
218 and inst.args and inst.args[0] == errno.EPIPE):
218 and inst.args and inst.args[0] == errno.EPIPE):
219 pass
219 pass
220 elif getattr(inst, "strerror", None):
220 elif getattr(inst, "strerror", None):
221 if getattr(inst, "filename", None):
221 if getattr(inst, "filename", None):
222 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
222 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
223 else:
223 else:
224 ui.warn(_("abort: %s\n") % inst.strerror)
224 ui.warn(_("abort: %s\n") % inst.strerror)
225 else:
225 else:
226 raise
226 raise
227 except OSError as inst:
227 except OSError as inst:
228 if getattr(inst, "filename", None) is not None:
228 if getattr(inst, "filename", None) is not None:
229 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
229 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
230 else:
230 else:
231 ui.warn(_("abort: %s\n") % inst.strerror)
231 ui.warn(_("abort: %s\n") % inst.strerror)
232 except MemoryError:
232 except MemoryError:
233 ui.warn(_("abort: out of memory\n"))
233 ui.warn(_("abort: out of memory\n"))
234 except SystemExit as inst:
234 except SystemExit as inst:
235 # Commands shouldn't sys.exit directly, but give a return code.
235 # Commands shouldn't sys.exit directly, but give a return code.
236 # Just in case catch this and and pass exit code to caller.
236 # Just in case catch this and and pass exit code to caller.
237 return inst.code
237 return inst.code
238 except socket.error as inst:
238 except socket.error as inst:
239 ui.warn(_("abort: %s\n") % inst.args[-1])
239 ui.warn(_("abort: %s\n") % inst.args[-1])
240
240
241 return -1
241 return -1
242
242
243 def checknewlabel(repo, lbl, kind):
243 def checknewlabel(repo, lbl, kind):
244 # Do not use the "kind" parameter in ui output.
244 # Do not use the "kind" parameter in ui output.
245 # It makes strings difficult to translate.
245 # It makes strings difficult to translate.
246 if lbl in ['tip', '.', 'null']:
246 if lbl in ['tip', '.', 'null']:
247 raise error.Abort(_("the name '%s' is reserved") % lbl)
247 raise error.Abort(_("the name '%s' is reserved") % lbl)
248 for c in (':', '\0', '\n', '\r'):
248 for c in (':', '\0', '\n', '\r'):
249 if c in lbl:
249 if c in lbl:
250 raise error.Abort(_("%r cannot be used in a name") % c)
250 raise error.Abort(_("%r cannot be used in a name") % c)
251 try:
251 try:
252 int(lbl)
252 int(lbl)
253 raise error.Abort(_("cannot use an integer as a name"))
253 raise error.Abort(_("cannot use an integer as a name"))
254 except ValueError:
254 except ValueError:
255 pass
255 pass
256
256
257 def checkfilename(f):
257 def checkfilename(f):
258 '''Check that the filename f is an acceptable filename for a tracked file'''
258 '''Check that the filename f is an acceptable filename for a tracked file'''
259 if '\r' in f or '\n' in f:
259 if '\r' in f or '\n' in f:
260 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
260 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
261
261
262 def checkportable(ui, f):
262 def checkportable(ui, f):
263 '''Check if filename f is portable and warn or abort depending on config'''
263 '''Check if filename f is portable and warn or abort depending on config'''
264 checkfilename(f)
264 checkfilename(f)
265 abort, warn = checkportabilityalert(ui)
265 abort, warn = checkportabilityalert(ui)
266 if abort or warn:
266 if abort or warn:
267 msg = util.checkwinfilename(f)
267 msg = util.checkwinfilename(f)
268 if msg:
268 if msg:
269 msg = "%s: %r" % (msg, f)
269 msg = "%s: %r" % (msg, f)
270 if abort:
270 if abort:
271 raise error.Abort(msg)
271 raise error.Abort(msg)
272 ui.warn(_("warning: %s\n") % msg)
272 ui.warn(_("warning: %s\n") % msg)
273
273
274 def checkportabilityalert(ui):
274 def checkportabilityalert(ui):
275 '''check if the user's config requests nothing, a warning, or abort for
275 '''check if the user's config requests nothing, a warning, or abort for
276 non-portable filenames'''
276 non-portable filenames'''
277 val = ui.config('ui', 'portablefilenames', 'warn')
277 val = ui.config('ui', 'portablefilenames', 'warn')
278 lval = val.lower()
278 lval = val.lower()
279 bval = util.parsebool(val)
279 bval = util.parsebool(val)
280 abort = pycompat.osname == 'nt' or lval == 'abort'
280 abort = pycompat.osname == 'nt' or lval == 'abort'
281 warn = bval or lval == 'warn'
281 warn = bval or lval == 'warn'
282 if bval is None and not (warn or abort or lval == 'ignore'):
282 if bval is None and not (warn or abort or lval == 'ignore'):
283 raise error.ConfigError(
283 raise error.ConfigError(
284 _("ui.portablefilenames value is invalid ('%s')") % val)
284 _("ui.portablefilenames value is invalid ('%s')") % val)
285 return abort, warn
285 return abort, warn
286
286
287 class casecollisionauditor(object):
287 class casecollisionauditor(object):
288 def __init__(self, ui, abort, dirstate):
288 def __init__(self, ui, abort, dirstate):
289 self._ui = ui
289 self._ui = ui
290 self._abort = abort
290 self._abort = abort
291 allfiles = '\0'.join(dirstate._map)
291 allfiles = '\0'.join(dirstate._map)
292 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
292 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
293 self._dirstate = dirstate
293 self._dirstate = dirstate
294 # The purpose of _newfiles is so that we don't complain about
294 # The purpose of _newfiles is so that we don't complain about
295 # case collisions if someone were to call this object with the
295 # case collisions if someone were to call this object with the
296 # same filename twice.
296 # same filename twice.
297 self._newfiles = set()
297 self._newfiles = set()
298
298
299 def __call__(self, f):
299 def __call__(self, f):
300 if f in self._newfiles:
300 if f in self._newfiles:
301 return
301 return
302 fl = encoding.lower(f)
302 fl = encoding.lower(f)
303 if fl in self._loweredfiles and f not in self._dirstate:
303 if fl in self._loweredfiles and f not in self._dirstate:
304 msg = _('possible case-folding collision for %s') % f
304 msg = _('possible case-folding collision for %s') % f
305 if self._abort:
305 if self._abort:
306 raise error.Abort(msg)
306 raise error.Abort(msg)
307 self._ui.warn(_("warning: %s\n") % msg)
307 self._ui.warn(_("warning: %s\n") % msg)
308 self._loweredfiles.add(fl)
308 self._loweredfiles.add(fl)
309 self._newfiles.add(f)
309 self._newfiles.add(f)
310
310
311 def filteredhash(repo, maxrev):
311 def filteredhash(repo, maxrev):
312 """build hash of filtered revisions in the current repoview.
312 """build hash of filtered revisions in the current repoview.
313
313
314 Multiple caches perform up-to-date validation by checking that the
314 Multiple caches perform up-to-date validation by checking that the
315 tiprev and tipnode stored in the cache file match the current repository.
315 tiprev and tipnode stored in the cache file match the current repository.
316 However, this is not sufficient for validating repoviews because the set
316 However, this is not sufficient for validating repoviews because the set
317 of revisions in the view may change without the repository tiprev and
317 of revisions in the view may change without the repository tiprev and
318 tipnode changing.
318 tipnode changing.
319
319
320 This function hashes all the revs filtered from the view and returns
320 This function hashes all the revs filtered from the view and returns
321 that SHA-1 digest.
321 that SHA-1 digest.
322 """
322 """
323 cl = repo.changelog
323 cl = repo.changelog
324 if not cl.filteredrevs:
324 if not cl.filteredrevs:
325 return None
325 return None
326 key = None
326 key = None
327 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
327 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
328 if revs:
328 if revs:
329 s = hashlib.sha1()
329 s = hashlib.sha1()
330 for rev in revs:
330 for rev in revs:
331 s.update('%d;' % rev)
331 s.update('%d;' % rev)
332 key = s.digest()
332 key = s.digest()
333 return key
333 return key
334
334
335 # compatibility layer since all 'vfs' code moved to 'mercurial.vfs'
335 # compatibility layer since all 'vfs' code moved to 'mercurial.vfs'
336 #
336 #
337 # This is hard to instal deprecation warning to this since we do not have
337 # This is hard to instal deprecation warning to this since we do not have
338 # access to a 'ui' object.
338 # access to a 'ui' object.
339 opener = vfs = vfsmod.vfs
339 opener = vfs = vfsmod.vfs
340 filteropener = filtervfs = vfsmod.filtervfs
340 filteropener = filtervfs = vfsmod.filtervfs
341 abstractvfs = vfsmod.abstractvfs
341 abstractvfs = vfsmod.abstractvfs
342 readonlyvfs = vfsmod.readonlyvfs
342 readonlyvfs = vfsmod.readonlyvfs
343 auditvfs = vfsmod.auditvfs
343 auditvfs = vfsmod.auditvfs
344 checkambigatclosing = vfsmod.checkambigatclosing
344 checkambigatclosing = vfsmod.checkambigatclosing
345
345
346 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
346 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
347 '''yield every hg repository under path, always recursively.
347 '''yield every hg repository under path, always recursively.
348 The recurse flag will only control recursion into repo working dirs'''
348 The recurse flag will only control recursion into repo working dirs'''
349 def errhandler(err):
349 def errhandler(err):
350 if err.filename == path:
350 if err.filename == path:
351 raise err
351 raise err
352 samestat = getattr(os.path, 'samestat', None)
352 samestat = getattr(os.path, 'samestat', None)
353 if followsym and samestat is not None:
353 if followsym and samestat is not None:
354 def adddir(dirlst, dirname):
354 def adddir(dirlst, dirname):
355 match = False
355 match = False
356 dirstat = os.stat(dirname)
356 dirstat = os.stat(dirname)
357 for lstdirstat in dirlst:
357 for lstdirstat in dirlst:
358 if samestat(dirstat, lstdirstat):
358 if samestat(dirstat, lstdirstat):
359 match = True
359 match = True
360 break
360 break
361 if not match:
361 if not match:
362 dirlst.append(dirstat)
362 dirlst.append(dirstat)
363 return not match
363 return not match
364 else:
364 else:
365 followsym = False
365 followsym = False
366
366
367 if (seen_dirs is None) and followsym:
367 if (seen_dirs is None) and followsym:
368 seen_dirs = []
368 seen_dirs = []
369 adddir(seen_dirs, path)
369 adddir(seen_dirs, path)
370 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
370 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
371 dirs.sort()
371 dirs.sort()
372 if '.hg' in dirs:
372 if '.hg' in dirs:
373 yield root # found a repository
373 yield root # found a repository
374 qroot = os.path.join(root, '.hg', 'patches')
374 qroot = os.path.join(root, '.hg', 'patches')
375 if os.path.isdir(os.path.join(qroot, '.hg')):
375 if os.path.isdir(os.path.join(qroot, '.hg')):
376 yield qroot # we have a patch queue repo here
376 yield qroot # we have a patch queue repo here
377 if recurse:
377 if recurse:
378 # avoid recursing inside the .hg directory
378 # avoid recursing inside the .hg directory
379 dirs.remove('.hg')
379 dirs.remove('.hg')
380 else:
380 else:
381 dirs[:] = [] # don't descend further
381 dirs[:] = [] # don't descend further
382 elif followsym:
382 elif followsym:
383 newdirs = []
383 newdirs = []
384 for d in dirs:
384 for d in dirs:
385 fname = os.path.join(root, d)
385 fname = os.path.join(root, d)
386 if adddir(seen_dirs, fname):
386 if adddir(seen_dirs, fname):
387 if os.path.islink(fname):
387 if os.path.islink(fname):
388 for hgname in walkrepos(fname, True, seen_dirs):
388 for hgname in walkrepos(fname, True, seen_dirs):
389 yield hgname
389 yield hgname
390 else:
390 else:
391 newdirs.append(d)
391 newdirs.append(d)
392 dirs[:] = newdirs
392 dirs[:] = newdirs
393
393
394 def osrcpath():
394 def osrcpath():
395 '''return default os-specific hgrc search path'''
395 '''return default os-specific hgrc search path'''
396 path = []
396 path = []
397 defaultpath = os.path.join(util.datapath, 'default.d')
397 defaultpath = os.path.join(util.datapath, 'default.d')
398 if os.path.isdir(defaultpath):
398 if os.path.isdir(defaultpath):
399 for f, kind in osutil.listdir(defaultpath):
399 for f, kind in osutil.listdir(defaultpath):
400 if f.endswith('.rc'):
400 if f.endswith('.rc'):
401 path.append(os.path.join(defaultpath, f))
401 path.append(os.path.join(defaultpath, f))
402 path.extend(systemrcpath())
402 path.extend(systemrcpath())
403 path.extend(userrcpath())
403 path.extend(userrcpath())
404 path = [os.path.normpath(f) for f in path]
404 path = [os.path.normpath(f) for f in path]
405 return path
405 return path
406
406
407 _rcpath = None
407 _rcpath = None
408
408
409 def rcpath():
409 def rcpath():
410 '''return hgrc search path. if env var HGRCPATH is set, use it.
410 '''return hgrc search path. if env var HGRCPATH is set, use it.
411 for each item in path, if directory, use files ending in .rc,
411 for each item in path, if directory, use files ending in .rc,
412 else use item.
412 else use item.
413 make HGRCPATH empty to only look in .hg/hgrc of current repo.
413 make HGRCPATH empty to only look in .hg/hgrc of current repo.
414 if no HGRCPATH, use default os-specific path.'''
414 if no HGRCPATH, use default os-specific path.'''
415 global _rcpath
415 global _rcpath
416 if _rcpath is None:
416 if _rcpath is None:
417 if 'HGRCPATH' in encoding.environ:
417 if 'HGRCPATH' in encoding.environ:
418 _rcpath = []
418 _rcpath = []
419 for p in encoding.environ['HGRCPATH'].split(pycompat.ospathsep):
419 for p in encoding.environ['HGRCPATH'].split(pycompat.ospathsep):
420 if not p:
420 if not p:
421 continue
421 continue
422 p = util.expandpath(p)
422 p = util.expandpath(p)
423 if os.path.isdir(p):
423 if os.path.isdir(p):
424 for f, kind in osutil.listdir(p):
424 for f, kind in osutil.listdir(p):
425 if f.endswith('.rc'):
425 if f.endswith('.rc'):
426 _rcpath.append(os.path.join(p, f))
426 _rcpath.append(os.path.join(p, f))
427 else:
427 else:
428 _rcpath.append(p)
428 _rcpath.append(p)
429 else:
429 else:
430 _rcpath = osrcpath()
430 _rcpath = osrcpath()
431 return _rcpath
431 return _rcpath
432
432
433 def intrev(rev):
433 def intrev(rev):
434 """Return integer for a given revision that can be used in comparison or
434 """Return integer for a given revision that can be used in comparison or
435 arithmetic operation"""
435 arithmetic operation"""
436 if rev is None:
436 if rev is None:
437 return wdirrev
437 return wdirrev
438 return rev
438 return rev
439
439
440 def revsingle(repo, revspec, default='.'):
440 def revsingle(repo, revspec, default='.'):
441 if not revspec and revspec != 0:
441 if not revspec and revspec != 0:
442 return repo[default]
442 return repo[default]
443
443
444 l = revrange(repo, [revspec])
444 l = revrange(repo, [revspec])
445 if not l:
445 if not l:
446 raise error.Abort(_('empty revision set'))
446 raise error.Abort(_('empty revision set'))
447 return repo[l.last()]
447 return repo[l.last()]
448
448
449 def _pairspec(revspec):
449 def _pairspec(revspec):
450 tree = revsetlang.parse(revspec)
450 tree = revsetlang.parse(revspec)
451 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
451 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
452
452
453 def revpair(repo, revs):
453 def revpair(repo, revs):
454 if not revs:
454 if not revs:
455 return repo.dirstate.p1(), None
455 return repo.dirstate.p1(), None
456
456
457 l = revrange(repo, revs)
457 l = revrange(repo, revs)
458
458
459 if not l:
459 if not l:
460 first = second = None
460 first = second = None
461 elif l.isascending():
461 elif l.isascending():
462 first = l.min()
462 first = l.min()
463 second = l.max()
463 second = l.max()
464 elif l.isdescending():
464 elif l.isdescending():
465 first = l.max()
465 first = l.max()
466 second = l.min()
466 second = l.min()
467 else:
467 else:
468 first = l.first()
468 first = l.first()
469 second = l.last()
469 second = l.last()
470
470
471 if first is None:
471 if first is None:
472 raise error.Abort(_('empty revision range'))
472 raise error.Abort(_('empty revision range'))
473 if (first == second and len(revs) >= 2
473 if (first == second and len(revs) >= 2
474 and not all(revrange(repo, [r]) for r in revs)):
474 and not all(revrange(repo, [r]) for r in revs)):
475 raise error.Abort(_('empty revision on one side of range'))
475 raise error.Abort(_('empty revision on one side of range'))
476
476
477 # if top-level is range expression, the result must always be a pair
477 # if top-level is range expression, the result must always be a pair
478 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
478 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
479 return repo.lookup(first), None
479 return repo.lookup(first), None
480
480
481 return repo.lookup(first), repo.lookup(second)
481 return repo.lookup(first), repo.lookup(second)
482
482
483 def revrange(repo, specs):
483 def revrange(repo, specs):
484 """Execute 1 to many revsets and return the union.
484 """Execute 1 to many revsets and return the union.
485
485
486 This is the preferred mechanism for executing revsets using user-specified
486 This is the preferred mechanism for executing revsets using user-specified
487 config options, such as revset aliases.
487 config options, such as revset aliases.
488
488
489 The revsets specified by ``specs`` will be executed via a chained ``OR``
489 The revsets specified by ``specs`` will be executed via a chained ``OR``
490 expression. If ``specs`` is empty, an empty result is returned.
490 expression. If ``specs`` is empty, an empty result is returned.
491
491
492 ``specs`` can contain integers, in which case they are assumed to be
492 ``specs`` can contain integers, in which case they are assumed to be
493 revision numbers.
493 revision numbers.
494
494
495 It is assumed the revsets are already formatted. If you have arguments
495 It is assumed the revsets are already formatted. If you have arguments
496 that need to be expanded in the revset, call ``revsetlang.formatspec()``
496 that need to be expanded in the revset, call ``revsetlang.formatspec()``
497 and pass the result as an element of ``specs``.
497 and pass the result as an element of ``specs``.
498
498
499 Specifying a single revset is allowed.
499 Specifying a single revset is allowed.
500
500
501 Returns a ``revset.abstractsmartset`` which is a list-like interface over
501 Returns a ``revset.abstractsmartset`` which is a list-like interface over
502 integer revisions.
502 integer revisions.
503 """
503 """
504 allspecs = []
504 allspecs = []
505 for spec in specs:
505 for spec in specs:
506 if isinstance(spec, int):
506 if isinstance(spec, int):
507 spec = revsetlang.formatspec('rev(%d)', spec)
507 spec = revsetlang.formatspec('rev(%d)', spec)
508 allspecs.append(spec)
508 allspecs.append(spec)
509 return repo.anyrevs(allspecs, user=True)
509 return repo.anyrevs(allspecs, user=True)
510
510
511 def meaningfulparents(repo, ctx):
511 def meaningfulparents(repo, ctx):
512 """Return list of meaningful (or all if debug) parentrevs for rev.
512 """Return list of meaningful (or all if debug) parentrevs for rev.
513
513
514 For merges (two non-nullrev revisions) both parents are meaningful.
514 For merges (two non-nullrev revisions) both parents are meaningful.
515 Otherwise the first parent revision is considered meaningful if it
515 Otherwise the first parent revision is considered meaningful if it
516 is not the preceding revision.
516 is not the preceding revision.
517 """
517 """
518 parents = ctx.parents()
518 parents = ctx.parents()
519 if len(parents) > 1:
519 if len(parents) > 1:
520 return parents
520 return parents
521 if repo.ui.debugflag:
521 if repo.ui.debugflag:
522 return [parents[0], repo['null']]
522 return [parents[0], repo['null']]
523 if parents[0].rev() >= intrev(ctx.rev()) - 1:
523 if parents[0].rev() >= intrev(ctx.rev()) - 1:
524 return []
524 return []
525 return parents
525 return parents
526
526
527 def expandpats(pats):
527 def expandpats(pats):
528 '''Expand bare globs when running on windows.
528 '''Expand bare globs when running on windows.
529 On posix we assume it already has already been done by sh.'''
529 On posix we assume it already has already been done by sh.'''
530 if not util.expandglobs:
530 if not util.expandglobs:
531 return list(pats)
531 return list(pats)
532 ret = []
532 ret = []
533 for kindpat in pats:
533 for kindpat in pats:
534 kind, pat = matchmod._patsplit(kindpat, None)
534 kind, pat = matchmod._patsplit(kindpat, None)
535 if kind is None:
535 if kind is None:
536 try:
536 try:
537 globbed = glob.glob(pat)
537 globbed = glob.glob(pat)
538 except re.error:
538 except re.error:
539 globbed = [pat]
539 globbed = [pat]
540 if globbed:
540 if globbed:
541 ret.extend(globbed)
541 ret.extend(globbed)
542 continue
542 continue
543 ret.append(kindpat)
543 ret.append(kindpat)
544 return ret
544 return ret
545
545
546 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
546 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
547 badfn=None):
547 badfn=None):
548 '''Return a matcher and the patterns that were used.
548 '''Return a matcher and the patterns that were used.
549 The matcher will warn about bad matches, unless an alternate badfn callback
549 The matcher will warn about bad matches, unless an alternate badfn callback
550 is provided.'''
550 is provided.'''
551 if pats == ("",):
551 if pats == ("",):
552 pats = []
552 pats = []
553 if opts is None:
553 if opts is None:
554 opts = {}
554 opts = {}
555 if not globbed and default == 'relpath':
555 if not globbed and default == 'relpath':
556 pats = expandpats(pats or [])
556 pats = expandpats(pats or [])
557
557
558 def bad(f, msg):
558 def bad(f, msg):
559 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
559 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
560
560
561 if badfn is None:
561 if badfn is None:
562 badfn = bad
562 badfn = bad
563
563
564 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
564 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
565 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
565 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
566
566
567 if m.always():
567 if m.always():
568 pats = []
568 pats = []
569 return m, pats
569 return m, pats
570
570
571 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
571 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
572 badfn=None):
572 badfn=None):
573 '''Return a matcher that will warn about bad matches.'''
573 '''Return a matcher that will warn about bad matches.'''
574 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
574 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
575
575
576 def matchall(repo):
576 def matchall(repo):
577 '''Return a matcher that will efficiently match everything.'''
577 '''Return a matcher that will efficiently match everything.'''
578 return matchmod.always(repo.root, repo.getcwd())
578 return matchmod.always(repo.root, repo.getcwd())
579
579
580 def matchfiles(repo, files, badfn=None):
580 def matchfiles(repo, files, badfn=None):
581 '''Return a matcher that will efficiently match exactly these files.'''
581 '''Return a matcher that will efficiently match exactly these files.'''
582 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
582 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
583
583
584 def origpath(ui, repo, filepath):
584 def origpath(ui, repo, filepath):
585 '''customize where .orig files are created
585 '''customize where .orig files are created
586
586
587 Fetch user defined path from config file: [ui] origbackuppath = <path>
587 Fetch user defined path from config file: [ui] origbackuppath = <path>
588 Fall back to default (filepath) if not specified
588 Fall back to default (filepath) if not specified
589 '''
589 '''
590 origbackuppath = ui.config('ui', 'origbackuppath', None)
590 origbackuppath = ui.config('ui', 'origbackuppath', None)
591 if origbackuppath is None:
591 if origbackuppath is None:
592 return filepath + ".orig"
592 return filepath + ".orig"
593
593
594 filepathfromroot = os.path.relpath(filepath, start=repo.root)
594 filepathfromroot = os.path.relpath(filepath, start=repo.root)
595 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
595 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
596
596
597 origbackupdir = repo.vfs.dirname(fullorigpath)
597 origbackupdir = repo.vfs.dirname(fullorigpath)
598 if not repo.vfs.exists(origbackupdir):
598 if not repo.vfs.exists(origbackupdir):
599 ui.note(_('creating directory: %s\n') % origbackupdir)
599 ui.note(_('creating directory: %s\n') % origbackupdir)
600 util.makedirs(origbackupdir)
600 util.makedirs(origbackupdir)
601
601
602 return fullorigpath + ".orig"
602 return fullorigpath + ".orig"
603
603
604 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
604 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
605 if opts is None:
605 if opts is None:
606 opts = {}
606 opts = {}
607 m = matcher
607 m = matcher
608 if dry_run is None:
608 if dry_run is None:
609 dry_run = opts.get('dry_run')
609 dry_run = opts.get('dry_run')
610 if similarity is None:
610 if similarity is None:
611 similarity = float(opts.get('similarity') or 0)
611 similarity = float(opts.get('similarity') or 0)
612
612
613 ret = 0
613 ret = 0
614 join = lambda f: os.path.join(prefix, f)
614 join = lambda f: os.path.join(prefix, f)
615
615
616 wctx = repo[None]
616 wctx = repo[None]
617 for subpath in sorted(wctx.substate):
617 for subpath in sorted(wctx.substate):
618 submatch = matchmod.subdirmatcher(subpath, m)
618 submatch = matchmod.subdirmatcher(subpath, m)
619 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
619 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
620 sub = wctx.sub(subpath)
620 sub = wctx.sub(subpath)
621 try:
621 try:
622 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
622 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
623 ret = 1
623 ret = 1
624 except error.LookupError:
624 except error.LookupError:
625 repo.ui.status(_("skipping missing subrepository: %s\n")
625 repo.ui.status(_("skipping missing subrepository: %s\n")
626 % join(subpath))
626 % join(subpath))
627
627
628 rejected = []
628 rejected = []
629 def badfn(f, msg):
629 def badfn(f, msg):
630 if f in m.files():
630 if f in m.files():
631 m.bad(f, msg)
631 m.bad(f, msg)
632 rejected.append(f)
632 rejected.append(f)
633
633
634 badmatch = matchmod.badmatch(m, badfn)
634 badmatch = matchmod.badmatch(m, badfn)
635 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
635 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
636 badmatch)
636 badmatch)
637
637
638 unknownset = set(unknown + forgotten)
638 unknownset = set(unknown + forgotten)
639 toprint = unknownset.copy()
639 toprint = unknownset.copy()
640 toprint.update(deleted)
640 toprint.update(deleted)
641 for abs in sorted(toprint):
641 for abs in sorted(toprint):
642 if repo.ui.verbose or not m.exact(abs):
642 if repo.ui.verbose or not m.exact(abs):
643 if abs in unknownset:
643 if abs in unknownset:
644 status = _('adding %s\n') % m.uipath(abs)
644 status = _('adding %s\n') % m.uipath(abs)
645 else:
645 else:
646 status = _('removing %s\n') % m.uipath(abs)
646 status = _('removing %s\n') % m.uipath(abs)
647 repo.ui.status(status)
647 repo.ui.status(status)
648
648
649 renames = _findrenames(repo, m, added + unknown, removed + deleted,
649 renames = _findrenames(repo, m, added + unknown, removed + deleted,
650 similarity)
650 similarity)
651
651
652 if not dry_run:
652 if not dry_run:
653 _markchanges(repo, unknown + forgotten, deleted, renames)
653 _markchanges(repo, unknown + forgotten, deleted, renames)
654
654
655 for f in rejected:
655 for f in rejected:
656 if f in m.files():
656 if f in m.files():
657 return 1
657 return 1
658 return ret
658 return ret
659
659
660 def marktouched(repo, files, similarity=0.0):
660 def marktouched(repo, files, similarity=0.0):
661 '''Assert that files have somehow been operated upon. files are relative to
661 '''Assert that files have somehow been operated upon. files are relative to
662 the repo root.'''
662 the repo root.'''
663 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
663 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
664 rejected = []
664 rejected = []
665
665
666 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
666 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
667
667
668 if repo.ui.verbose:
668 if repo.ui.verbose:
669 unknownset = set(unknown + forgotten)
669 unknownset = set(unknown + forgotten)
670 toprint = unknownset.copy()
670 toprint = unknownset.copy()
671 toprint.update(deleted)
671 toprint.update(deleted)
672 for abs in sorted(toprint):
672 for abs in sorted(toprint):
673 if abs in unknownset:
673 if abs in unknownset:
674 status = _('adding %s\n') % abs
674 status = _('adding %s\n') % abs
675 else:
675 else:
676 status = _('removing %s\n') % abs
676 status = _('removing %s\n') % abs
677 repo.ui.status(status)
677 repo.ui.status(status)
678
678
679 renames = _findrenames(repo, m, added + unknown, removed + deleted,
679 renames = _findrenames(repo, m, added + unknown, removed + deleted,
680 similarity)
680 similarity)
681
681
682 _markchanges(repo, unknown + forgotten, deleted, renames)
682 _markchanges(repo, unknown + forgotten, deleted, renames)
683
683
684 for f in rejected:
684 for f in rejected:
685 if f in m.files():
685 if f in m.files():
686 return 1
686 return 1
687 return 0
687 return 0
688
688
689 def _interestingfiles(repo, matcher):
689 def _interestingfiles(repo, matcher):
690 '''Walk dirstate with matcher, looking for files that addremove would care
690 '''Walk dirstate with matcher, looking for files that addremove would care
691 about.
691 about.
692
692
693 This is different from dirstate.status because it doesn't care about
693 This is different from dirstate.status because it doesn't care about
694 whether files are modified or clean.'''
694 whether files are modified or clean.'''
695 added, unknown, deleted, removed, forgotten = [], [], [], [], []
695 added, unknown, deleted, removed, forgotten = [], [], [], [], []
696 audit_path = pathutil.pathauditor(repo.root)
696 audit_path = pathutil.pathauditor(repo.root)
697
697
698 ctx = repo[None]
698 ctx = repo[None]
699 dirstate = repo.dirstate
699 dirstate = repo.dirstate
700 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
700 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
701 full=False)
701 full=False)
702 for abs, st in walkresults.iteritems():
702 for abs, st in walkresults.iteritems():
703 dstate = dirstate[abs]
703 dstate = dirstate[abs]
704 if dstate == '?' and audit_path.check(abs):
704 if dstate == '?' and audit_path.check(abs):
705 unknown.append(abs)
705 unknown.append(abs)
706 elif dstate != 'r' and not st:
706 elif dstate != 'r' and not st:
707 deleted.append(abs)
707 deleted.append(abs)
708 elif dstate == 'r' and st:
708 elif dstate == 'r' and st:
709 forgotten.append(abs)
709 forgotten.append(abs)
710 # for finding renames
710 # for finding renames
711 elif dstate == 'r' and not st:
711 elif dstate == 'r' and not st:
712 removed.append(abs)
712 removed.append(abs)
713 elif dstate == 'a':
713 elif dstate == 'a':
714 added.append(abs)
714 added.append(abs)
715
715
716 return added, unknown, deleted, removed, forgotten
716 return added, unknown, deleted, removed, forgotten
717
717
718 def _findrenames(repo, matcher, added, removed, similarity):
718 def _findrenames(repo, matcher, added, removed, similarity):
719 '''Find renames from removed files to added ones.'''
719 '''Find renames from removed files to added ones.'''
720 renames = {}
720 renames = {}
721 if similarity > 0:
721 if similarity > 0:
722 for old, new, score in similar.findrenames(repo, added, removed,
722 for old, new, score in similar.findrenames(repo, added, removed,
723 similarity):
723 similarity):
724 if (repo.ui.verbose or not matcher.exact(old)
724 if (repo.ui.verbose or not matcher.exact(old)
725 or not matcher.exact(new)):
725 or not matcher.exact(new)):
726 repo.ui.status(_('recording removal of %s as rename to %s '
726 repo.ui.status(_('recording removal of %s as rename to %s '
727 '(%d%% similar)\n') %
727 '(%d%% similar)\n') %
728 (matcher.rel(old), matcher.rel(new),
728 (matcher.rel(old), matcher.rel(new),
729 score * 100))
729 score * 100))
730 renames[new] = old
730 renames[new] = old
731 return renames
731 return renames
732
732
733 def _markchanges(repo, unknown, deleted, renames):
733 def _markchanges(repo, unknown, deleted, renames):
734 '''Marks the files in unknown as added, the files in deleted as removed,
734 '''Marks the files in unknown as added, the files in deleted as removed,
735 and the files in renames as copied.'''
735 and the files in renames as copied.'''
736 wctx = repo[None]
736 wctx = repo[None]
737 with repo.wlock():
737 with repo.wlock():
738 wctx.forget(deleted)
738 wctx.forget(deleted)
739 wctx.add(unknown)
739 wctx.add(unknown)
740 for new, old in renames.iteritems():
740 for new, old in renames.iteritems():
741 wctx.copy(old, new)
741 wctx.copy(old, new)
742
742
743 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
743 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
744 """Update the dirstate to reflect the intent of copying src to dst. For
744 """Update the dirstate to reflect the intent of copying src to dst. For
745 different reasons it might not end with dst being marked as copied from src.
745 different reasons it might not end with dst being marked as copied from src.
746 """
746 """
747 origsrc = repo.dirstate.copied(src) or src
747 origsrc = repo.dirstate.copied(src) or src
748 if dst == origsrc: # copying back a copy?
748 if dst == origsrc: # copying back a copy?
749 if repo.dirstate[dst] not in 'mn' and not dryrun:
749 if repo.dirstate[dst] not in 'mn' and not dryrun:
750 repo.dirstate.normallookup(dst)
750 repo.dirstate.normallookup(dst)
751 else:
751 else:
752 if repo.dirstate[origsrc] == 'a' and origsrc == src:
752 if repo.dirstate[origsrc] == 'a' and origsrc == src:
753 if not ui.quiet:
753 if not ui.quiet:
754 ui.warn(_("%s has not been committed yet, so no copy "
754 ui.warn(_("%s has not been committed yet, so no copy "
755 "data will be stored for %s.\n")
755 "data will be stored for %s.\n")
756 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
756 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
757 if repo.dirstate[dst] in '?r' and not dryrun:
757 if repo.dirstate[dst] in '?r' and not dryrun:
758 wctx.add([dst])
758 wctx.add([dst])
759 elif not dryrun:
759 elif not dryrun:
760 wctx.copy(origsrc, dst)
760 wctx.copy(origsrc, dst)
761
761
762 def readrequires(opener, supported):
762 def readrequires(opener, supported):
763 '''Reads and parses .hg/requires and checks if all entries found
763 '''Reads and parses .hg/requires and checks if all entries found
764 are in the list of supported features.'''
764 are in the list of supported features.'''
765 requirements = set(opener.read("requires").splitlines())
765 requirements = set(opener.read("requires").splitlines())
766 missings = []
766 missings = []
767 for r in requirements:
767 for r in requirements:
768 if r not in supported:
768 if r not in supported:
769 if not r or not r[0].isalnum():
769 if not r or not r[0].isalnum():
770 raise error.RequirementError(_(".hg/requires file is corrupt"))
770 raise error.RequirementError(_(".hg/requires file is corrupt"))
771 missings.append(r)
771 missings.append(r)
772 missings.sort()
772 missings.sort()
773 if missings:
773 if missings:
774 raise error.RequirementError(
774 raise error.RequirementError(
775 _("repository requires features unknown to this Mercurial: %s")
775 _("repository requires features unknown to this Mercurial: %s")
776 % " ".join(missings),
776 % " ".join(missings),
777 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
777 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
778 " for more information"))
778 " for more information"))
779 return requirements
779 return requirements
780
780
781 def writerequires(opener, requirements):
781 def writerequires(opener, requirements):
782 with opener('requires', 'w') as fp:
782 with opener('requires', 'w') as fp:
783 for r in sorted(requirements):
783 for r in sorted(requirements):
784 fp.write("%s\n" % r)
784 fp.write("%s\n" % r)
785
785
786 class filecachesubentry(object):
786 class filecachesubentry(object):
787 def __init__(self, path, stat):
787 def __init__(self, path, stat):
788 self.path = path
788 self.path = path
789 self.cachestat = None
789 self.cachestat = None
790 self._cacheable = None
790 self._cacheable = None
791
791
792 if stat:
792 if stat:
793 self.cachestat = filecachesubentry.stat(self.path)
793 self.cachestat = filecachesubentry.stat(self.path)
794
794
795 if self.cachestat:
795 if self.cachestat:
796 self._cacheable = self.cachestat.cacheable()
796 self._cacheable = self.cachestat.cacheable()
797 else:
797 else:
798 # None means we don't know yet
798 # None means we don't know yet
799 self._cacheable = None
799 self._cacheable = None
800
800
801 def refresh(self):
801 def refresh(self):
802 if self.cacheable():
802 if self.cacheable():
803 self.cachestat = filecachesubentry.stat(self.path)
803 self.cachestat = filecachesubentry.stat(self.path)
804
804
805 def cacheable(self):
805 def cacheable(self):
806 if self._cacheable is not None:
806 if self._cacheable is not None:
807 return self._cacheable
807 return self._cacheable
808
808
809 # we don't know yet, assume it is for now
809 # we don't know yet, assume it is for now
810 return True
810 return True
811
811
812 def changed(self):
812 def changed(self):
813 # no point in going further if we can't cache it
813 # no point in going further if we can't cache it
814 if not self.cacheable():
814 if not self.cacheable():
815 return True
815 return True
816
816
817 newstat = filecachesubentry.stat(self.path)
817 newstat = filecachesubentry.stat(self.path)
818
818
819 # we may not know if it's cacheable yet, check again now
819 # we may not know if it's cacheable yet, check again now
820 if newstat and self._cacheable is None:
820 if newstat and self._cacheable is None:
821 self._cacheable = newstat.cacheable()
821 self._cacheable = newstat.cacheable()
822
822
823 # check again
823 # check again
824 if not self._cacheable:
824 if not self._cacheable:
825 return True
825 return True
826
826
827 if self.cachestat != newstat:
827 if self.cachestat != newstat:
828 self.cachestat = newstat
828 self.cachestat = newstat
829 return True
829 return True
830 else:
830 else:
831 return False
831 return False
832
832
833 @staticmethod
833 @staticmethod
834 def stat(path):
834 def stat(path):
835 try:
835 try:
836 return util.cachestat(path)
836 return util.cachestat(path)
837 except OSError as e:
837 except OSError as e:
838 if e.errno != errno.ENOENT:
838 if e.errno != errno.ENOENT:
839 raise
839 raise
840
840
841 class filecacheentry(object):
841 class filecacheentry(object):
842 def __init__(self, paths, stat=True):
842 def __init__(self, paths, stat=True):
843 self._entries = []
843 self._entries = []
844 for path in paths:
844 for path in paths:
845 self._entries.append(filecachesubentry(path, stat))
845 self._entries.append(filecachesubentry(path, stat))
846
846
847 def changed(self):
847 def changed(self):
848 '''true if any entry has changed'''
848 '''true if any entry has changed'''
849 for entry in self._entries:
849 for entry in self._entries:
850 if entry.changed():
850 if entry.changed():
851 return True
851 return True
852 return False
852 return False
853
853
854 def refresh(self):
854 def refresh(self):
855 for entry in self._entries:
855 for entry in self._entries:
856 entry.refresh()
856 entry.refresh()
857
857
858 class filecache(object):
858 class filecache(object):
859 '''A property like decorator that tracks files under .hg/ for updates.
859 '''A property like decorator that tracks files under .hg/ for updates.
860
860
861 Records stat info when called in _filecache.
861 Records stat info when called in _filecache.
862
862
863 On subsequent calls, compares old stat info with new info, and recreates the
863 On subsequent calls, compares old stat info with new info, and recreates the
864 object when any of the files changes, updating the new stat info in
864 object when any of the files changes, updating the new stat info in
865 _filecache.
865 _filecache.
866
866
867 Mercurial either atomic renames or appends for files under .hg,
867 Mercurial either atomic renames or appends for files under .hg,
868 so to ensure the cache is reliable we need the filesystem to be able
868 so to ensure the cache is reliable we need the filesystem to be able
869 to tell us if a file has been replaced. If it can't, we fallback to
869 to tell us if a file has been replaced. If it can't, we fallback to
870 recreating the object on every call (essentially the same behavior as
870 recreating the object on every call (essentially the same behavior as
871 propertycache).
871 propertycache).
872
872
873 '''
873 '''
874 def __init__(self, *paths):
874 def __init__(self, *paths):
875 self.paths = paths
875 self.paths = paths
876
876
877 def join(self, obj, fname):
877 def join(self, obj, fname):
878 """Used to compute the runtime path of a cached file.
878 """Used to compute the runtime path of a cached file.
879
879
880 Users should subclass filecache and provide their own version of this
880 Users should subclass filecache and provide their own version of this
881 function to call the appropriate join function on 'obj' (an instance
881 function to call the appropriate join function on 'obj' (an instance
882 of the class that its member function was decorated).
882 of the class that its member function was decorated).
883 """
883 """
884 raise NotImplementedError
884 raise NotImplementedError
885
885
886 def __call__(self, func):
886 def __call__(self, func):
887 self.func = func
887 self.func = func
888 self.name = func.__name__.encode('ascii')
888 self.name = func.__name__.encode('ascii')
889 return self
889 return self
890
890
891 def __get__(self, obj, type=None):
891 def __get__(self, obj, type=None):
892 # if accessed on the class, return the descriptor itself.
892 # if accessed on the class, return the descriptor itself.
893 if obj is None:
893 if obj is None:
894 return self
894 return self
895 # do we need to check if the file changed?
895 # do we need to check if the file changed?
896 if self.name in obj.__dict__:
896 if self.name in obj.__dict__:
897 assert self.name in obj._filecache, self.name
897 assert self.name in obj._filecache, self.name
898 return obj.__dict__[self.name]
898 return obj.__dict__[self.name]
899
899
900 entry = obj._filecache.get(self.name)
900 entry = obj._filecache.get(self.name)
901
901
902 if entry:
902 if entry:
903 if entry.changed():
903 if entry.changed():
904 entry.obj = self.func(obj)
904 entry.obj = self.func(obj)
905 else:
905 else:
906 paths = [self.join(obj, path) for path in self.paths]
906 paths = [self.join(obj, path) for path in self.paths]
907
907
908 # We stat -before- creating the object so our cache doesn't lie if
908 # We stat -before- creating the object so our cache doesn't lie if
909 # a writer modified between the time we read and stat
909 # a writer modified between the time we read and stat
910 entry = filecacheentry(paths, True)
910 entry = filecacheentry(paths, True)
911 entry.obj = self.func(obj)
911 entry.obj = self.func(obj)
912
912
913 obj._filecache[self.name] = entry
913 obj._filecache[self.name] = entry
914
914
915 obj.__dict__[self.name] = entry.obj
915 obj.__dict__[self.name] = entry.obj
916 return entry.obj
916 return entry.obj
917
917
918 def __set__(self, obj, value):
918 def __set__(self, obj, value):
919 if self.name not in obj._filecache:
919 if self.name not in obj._filecache:
920 # we add an entry for the missing value because X in __dict__
920 # we add an entry for the missing value because X in __dict__
921 # implies X in _filecache
921 # implies X in _filecache
922 paths = [self.join(obj, path) for path in self.paths]
922 paths = [self.join(obj, path) for path in self.paths]
923 ce = filecacheentry(paths, False)
923 ce = filecacheentry(paths, False)
924 obj._filecache[self.name] = ce
924 obj._filecache[self.name] = ce
925 else:
925 else:
926 ce = obj._filecache[self.name]
926 ce = obj._filecache[self.name]
927
927
928 ce.obj = value # update cached copy
928 ce.obj = value # update cached copy
929 obj.__dict__[self.name] = value # update copy returned by obj.x
929 obj.__dict__[self.name] = value # update copy returned by obj.x
930
930
931 def __delete__(self, obj):
931 def __delete__(self, obj):
932 try:
932 try:
933 del obj.__dict__[self.name]
933 del obj.__dict__[self.name]
934 except KeyError:
934 except KeyError:
935 raise AttributeError(self.name)
935 raise AttributeError(self.name)
936
936
937 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
937 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
938 if lock is None:
938 if lock is None:
939 raise error.LockInheritanceContractViolation(
939 raise error.LockInheritanceContractViolation(
940 'lock can only be inherited while held')
940 'lock can only be inherited while held')
941 if environ is None:
941 if environ is None:
942 environ = {}
942 environ = {}
943 with lock.inherit() as locker:
943 with lock.inherit() as locker:
944 environ[envvar] = locker
944 environ[envvar] = locker
945 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
945 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
946
946
947 def wlocksub(repo, cmd, *args, **kwargs):
947 def wlocksub(repo, cmd, *args, **kwargs):
948 """run cmd as a subprocess that allows inheriting repo's wlock
948 """run cmd as a subprocess that allows inheriting repo's wlock
949
949
950 This can only be called while the wlock is held. This takes all the
950 This can only be called while the wlock is held. This takes all the
951 arguments that ui.system does, and returns the exit code of the
951 arguments that ui.system does, and returns the exit code of the
952 subprocess."""
952 subprocess."""
953 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
953 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
954 **kwargs)
954 **kwargs)
955
955
956 def gdinitconfig(ui):
956 def gdinitconfig(ui):
957 """helper function to know if a repo should be created as general delta
957 """helper function to know if a repo should be created as general delta
958 """
958 """
959 # experimental config: format.generaldelta
959 # experimental config: format.generaldelta
960 return (ui.configbool('format', 'generaldelta', False)
960 return (ui.configbool('format', 'generaldelta', False)
961 or ui.configbool('format', 'usegeneraldelta', True))
961 or ui.configbool('format', 'usegeneraldelta', True))
962
962
963 def gddeltaconfig(ui):
963 def gddeltaconfig(ui):
964 """helper function to know if incoming delta should be optimised
964 """helper function to know if incoming delta should be optimised
965 """
965 """
966 # experimental config: format.generaldelta
966 # experimental config: format.generaldelta
967 return ui.configbool('format', 'generaldelta', False)
967 return ui.configbool('format', 'generaldelta', False)
968
969 class simplekeyvaluefile(object):
970 """A simple file with key=value lines
971
972 Keys must be alphanumerics and start with a letter, values must not
973 contain '\n' characters"""
974
975 def __init__(self, vfs, path, keys=None):
976 self.vfs = vfs
977 self.path = path
978
979 def read(self):
980 lines = self.vfs.readlines(self.path)
981 try:
982 d = dict(line[:-1].split('=', 1) for line in lines if line)
983 except ValueError as e:
984 raise error.CorruptedState(str(e))
985 return d
986
987 def write(self, data):
988 """Write key=>value mapping to a file
989 data is a dict. Keys must be alphanumerical and start with a letter.
990 Values must not contain newline characters."""
991 lines = []
992 for k, v in data.items():
993 if not k[0].isalpha():
994 e = "keys must start with a letter in a key-value file"
995 raise error.ProgrammingError(e)
996 if not k.isalnum():
997 e = "invalid key name in a simple key-value file"
998 raise error.ProgrammingError(e)
999 if '\n' in v:
1000 e = "invalid value in a simple key-value file"
1001 raise error.ProgrammingError(e)
1002 lines.append("%s=%s\n" % (k, v))
1003 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1004 fp.write(''.join(lines))
1005
General Comments 0
You need to be logged in to leave comments. Login now