##// END OF EJS Templates
scmutil: add a simple key-value file helper...
Kostia Balytskyi -
r31553:56acc425 default
parent child Browse files
Show More
@@ -0,0 +1,72 b''
1 from __future__ import absolute_import
2
3 import unittest
4 import silenttestrunner
5
6 from mercurial import (
7 error,
8 scmutil,
9 )
10
11 class mockfile(object):
12 def __init__(self, name, fs):
13 self.name = name
14 self.fs = fs
15
16 def __enter__(self):
17 return self
18
19 def __exit__(self, *args, **kwargs):
20 pass
21
22 def write(self, text):
23 self.fs.contents[self.name] = text
24
25 def read(self):
26 return self.fs.contents[self.name]
27
28 class mockvfs(object):
29 def __init__(self):
30 self.contents = {}
31
32 def read(self, path):
33 return mockfile(path, self).read()
34
35 def readlines(self, path):
36 return mockfile(path, self).read().split('\n')
37
38 def __call__(self, path, mode, atomictemp):
39 return mockfile(path, self)
40
41 class testsimplekeyvaluefile(unittest.TestCase):
42 def setUp(self):
43 self.vfs = mockvfs()
44
45 def testbasicwriting(self):
46 d = {'key1': 'value1', 'Key2': 'value2'}
47 scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(d)
48 self.assertEqual(sorted(self.vfs.read('kvfile').split('\n')),
49 ['', 'Key2=value2', 'key1=value1'])
50
51 def testinvalidkeys(self):
52 d = {'0key1': 'value1', 'Key2': 'value2'}
53 with self.assertRaisesRegexp(error.ProgrammingError,
54 "keys must start with a letter.*"):
55 scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(d)
56 d = {'key1@': 'value1', 'Key2': 'value2'}
57 with self.assertRaisesRegexp(error.ProgrammingError, "invalid key.*"):
58 scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(d)
59
60 def testinvalidvalues(self):
61 d = {'key1': 'value1', 'Key2': 'value2\n'}
62 with self.assertRaisesRegexp(error.ProgrammingError, "invalid val.*"):
63 scmutil.simplekeyvaluefile(self.vfs, 'kvfile').write(d)
64
65 def testcorruptedfile(self):
66 self.vfs.contents['badfile'] = 'ababagalamaga\n'
67 with self.assertRaisesRegexp(error.CorruptedState,
68 "dictionary.*element.*"):
69 scmutil.simplekeyvaluefile(self.vfs, 'badfile').read()
70
71 if __name__ == "__main__":
72 silenttestrunner.main(__name__)
@@ -1,967 +1,1005 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16
17 17 from .i18n import _
18 18 from .node import wdirrev
19 19 from . import (
20 20 encoding,
21 21 error,
22 22 match as matchmod,
23 23 osutil,
24 24 pathutil,
25 25 phases,
26 26 pycompat,
27 27 revsetlang,
28 28 similar,
29 29 util,
30 30 vfs as vfsmod,
31 31 )
32 32
33 33 if pycompat.osname == 'nt':
34 34 from . import scmwindows as scmplatform
35 35 else:
36 36 from . import scmposix as scmplatform
37 37
38 38 systemrcpath = scmplatform.systemrcpath
39 39 userrcpath = scmplatform.userrcpath
40 40 termsize = scmplatform.termsize
41 41
42 42 class status(tuple):
43 43 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
44 44 and 'ignored' properties are only relevant to the working copy.
45 45 '''
46 46
47 47 __slots__ = ()
48 48
49 49 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
50 50 clean):
51 51 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
52 52 ignored, clean))
53 53
54 54 @property
55 55 def modified(self):
56 56 '''files that have been modified'''
57 57 return self[0]
58 58
59 59 @property
60 60 def added(self):
61 61 '''files that have been added'''
62 62 return self[1]
63 63
64 64 @property
65 65 def removed(self):
66 66 '''files that have been removed'''
67 67 return self[2]
68 68
69 69 @property
70 70 def deleted(self):
71 71 '''files that are in the dirstate, but have been deleted from the
72 72 working copy (aka "missing")
73 73 '''
74 74 return self[3]
75 75
76 76 @property
77 77 def unknown(self):
78 78 '''files not in the dirstate that are not ignored'''
79 79 return self[4]
80 80
81 81 @property
82 82 def ignored(self):
83 83 '''files not in the dirstate that are ignored (by _dirignore())'''
84 84 return self[5]
85 85
86 86 @property
87 87 def clean(self):
88 88 '''files that have not been modified'''
89 89 return self[6]
90 90
91 91 def __repr__(self, *args, **kwargs):
92 92 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
93 93 'unknown=%r, ignored=%r, clean=%r>') % self)
94 94
95 95 def itersubrepos(ctx1, ctx2):
96 96 """find subrepos in ctx1 or ctx2"""
97 97 # Create a (subpath, ctx) mapping where we prefer subpaths from
98 98 # ctx1. The subpaths from ctx2 are important when the .hgsub file
99 99 # has been modified (in ctx2) but not yet committed (in ctx1).
100 100 subpaths = dict.fromkeys(ctx2.substate, ctx2)
101 101 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
102 102
103 103 missing = set()
104 104
105 105 for subpath in ctx2.substate:
106 106 if subpath not in ctx1.substate:
107 107 del subpaths[subpath]
108 108 missing.add(subpath)
109 109
110 110 for subpath, ctx in sorted(subpaths.iteritems()):
111 111 yield subpath, ctx.sub(subpath)
112 112
113 113 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
114 114 # status and diff will have an accurate result when it does
115 115 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
116 116 # against itself.
117 117 for subpath in missing:
118 118 yield subpath, ctx2.nullsub(subpath, ctx1)
119 119
120 120 def nochangesfound(ui, repo, excluded=None):
121 121 '''Report no changes for push/pull, excluded is None or a list of
122 122 nodes excluded from the push/pull.
123 123 '''
124 124 secretlist = []
125 125 if excluded:
126 126 for n in excluded:
127 127 if n not in repo:
128 128 # discovery should not have included the filtered revision,
129 129 # we have to explicitly exclude it until discovery is cleanup.
130 130 continue
131 131 ctx = repo[n]
132 132 if ctx.phase() >= phases.secret and not ctx.extinct():
133 133 secretlist.append(n)
134 134
135 135 if secretlist:
136 136 ui.status(_("no changes found (ignored %d secret changesets)\n")
137 137 % len(secretlist))
138 138 else:
139 139 ui.status(_("no changes found\n"))
140 140
141 141 def callcatch(ui, func):
142 142 """call func() with global exception handling
143 143
144 144 return func() if no exception happens. otherwise do some error handling
145 145 and return an exit code accordingly. does not handle all exceptions.
146 146 """
147 147 try:
148 148 return func()
149 149 # Global exception handling, alphabetically
150 150 # Mercurial-specific first, followed by built-in and library exceptions
151 151 except error.LockHeld as inst:
152 152 if inst.errno == errno.ETIMEDOUT:
153 153 reason = _('timed out waiting for lock held by %s') % inst.locker
154 154 else:
155 155 reason = _('lock held by %s') % inst.locker
156 156 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
157 157 except error.LockUnavailable as inst:
158 158 ui.warn(_("abort: could not lock %s: %s\n") %
159 159 (inst.desc or inst.filename, inst.strerror))
160 160 except error.OutOfBandError as inst:
161 161 if inst.args:
162 162 msg = _("abort: remote error:\n")
163 163 else:
164 164 msg = _("abort: remote error\n")
165 165 ui.warn(msg)
166 166 if inst.args:
167 167 ui.warn(''.join(inst.args))
168 168 if inst.hint:
169 169 ui.warn('(%s)\n' % inst.hint)
170 170 except error.RepoError as inst:
171 171 ui.warn(_("abort: %s!\n") % inst)
172 172 if inst.hint:
173 173 ui.warn(_("(%s)\n") % inst.hint)
174 174 except error.ResponseError as inst:
175 175 ui.warn(_("abort: %s") % inst.args[0])
176 176 if not isinstance(inst.args[1], basestring):
177 177 ui.warn(" %r\n" % (inst.args[1],))
178 178 elif not inst.args[1]:
179 179 ui.warn(_(" empty string\n"))
180 180 else:
181 181 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
182 182 except error.CensoredNodeError as inst:
183 183 ui.warn(_("abort: file censored %s!\n") % inst)
184 184 except error.RevlogError as inst:
185 185 ui.warn(_("abort: %s!\n") % inst)
186 186 except error.SignalInterrupt:
187 187 ui.warn(_("killed!\n"))
188 188 except error.InterventionRequired as inst:
189 189 ui.warn("%s\n" % inst)
190 190 if inst.hint:
191 191 ui.warn(_("(%s)\n") % inst.hint)
192 192 return 1
193 193 except error.Abort as inst:
194 194 ui.warn(_("abort: %s\n") % inst)
195 195 if inst.hint:
196 196 ui.warn(_("(%s)\n") % inst.hint)
197 197 except ImportError as inst:
198 198 ui.warn(_("abort: %s!\n") % inst)
199 199 m = str(inst).split()[-1]
200 200 if m in "mpatch bdiff".split():
201 201 ui.warn(_("(did you forget to compile extensions?)\n"))
202 202 elif m in "zlib".split():
203 203 ui.warn(_("(is your Python install correct?)\n"))
204 204 except IOError as inst:
205 205 if util.safehasattr(inst, "code"):
206 206 ui.warn(_("abort: %s\n") % inst)
207 207 elif util.safehasattr(inst, "reason"):
208 208 try: # usually it is in the form (errno, strerror)
209 209 reason = inst.reason.args[1]
210 210 except (AttributeError, IndexError):
211 211 # it might be anything, for example a string
212 212 reason = inst.reason
213 213 if isinstance(reason, unicode):
214 214 # SSLError of Python 2.7.9 contains a unicode
215 215 reason = reason.encode(encoding.encoding, 'replace')
216 216 ui.warn(_("abort: error: %s\n") % reason)
217 217 elif (util.safehasattr(inst, "args")
218 218 and inst.args and inst.args[0] == errno.EPIPE):
219 219 pass
220 220 elif getattr(inst, "strerror", None):
221 221 if getattr(inst, "filename", None):
222 222 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
223 223 else:
224 224 ui.warn(_("abort: %s\n") % inst.strerror)
225 225 else:
226 226 raise
227 227 except OSError as inst:
228 228 if getattr(inst, "filename", None) is not None:
229 229 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
230 230 else:
231 231 ui.warn(_("abort: %s\n") % inst.strerror)
232 232 except MemoryError:
233 233 ui.warn(_("abort: out of memory\n"))
234 234 except SystemExit as inst:
235 235 # Commands shouldn't sys.exit directly, but give a return code.
236 236 # Just in case catch this and and pass exit code to caller.
237 237 return inst.code
238 238 except socket.error as inst:
239 239 ui.warn(_("abort: %s\n") % inst.args[-1])
240 240
241 241 return -1
242 242
243 243 def checknewlabel(repo, lbl, kind):
244 244 # Do not use the "kind" parameter in ui output.
245 245 # It makes strings difficult to translate.
246 246 if lbl in ['tip', '.', 'null']:
247 247 raise error.Abort(_("the name '%s' is reserved") % lbl)
248 248 for c in (':', '\0', '\n', '\r'):
249 249 if c in lbl:
250 250 raise error.Abort(_("%r cannot be used in a name") % c)
251 251 try:
252 252 int(lbl)
253 253 raise error.Abort(_("cannot use an integer as a name"))
254 254 except ValueError:
255 255 pass
256 256
257 257 def checkfilename(f):
258 258 '''Check that the filename f is an acceptable filename for a tracked file'''
259 259 if '\r' in f or '\n' in f:
260 260 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
261 261
262 262 def checkportable(ui, f):
263 263 '''Check if filename f is portable and warn or abort depending on config'''
264 264 checkfilename(f)
265 265 abort, warn = checkportabilityalert(ui)
266 266 if abort or warn:
267 267 msg = util.checkwinfilename(f)
268 268 if msg:
269 269 msg = "%s: %r" % (msg, f)
270 270 if abort:
271 271 raise error.Abort(msg)
272 272 ui.warn(_("warning: %s\n") % msg)
273 273
274 274 def checkportabilityalert(ui):
275 275 '''check if the user's config requests nothing, a warning, or abort for
276 276 non-portable filenames'''
277 277 val = ui.config('ui', 'portablefilenames', 'warn')
278 278 lval = val.lower()
279 279 bval = util.parsebool(val)
280 280 abort = pycompat.osname == 'nt' or lval == 'abort'
281 281 warn = bval or lval == 'warn'
282 282 if bval is None and not (warn or abort or lval == 'ignore'):
283 283 raise error.ConfigError(
284 284 _("ui.portablefilenames value is invalid ('%s')") % val)
285 285 return abort, warn
286 286
287 287 class casecollisionauditor(object):
288 288 def __init__(self, ui, abort, dirstate):
289 289 self._ui = ui
290 290 self._abort = abort
291 291 allfiles = '\0'.join(dirstate._map)
292 292 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
293 293 self._dirstate = dirstate
294 294 # The purpose of _newfiles is so that we don't complain about
295 295 # case collisions if someone were to call this object with the
296 296 # same filename twice.
297 297 self._newfiles = set()
298 298
299 299 def __call__(self, f):
300 300 if f in self._newfiles:
301 301 return
302 302 fl = encoding.lower(f)
303 303 if fl in self._loweredfiles and f not in self._dirstate:
304 304 msg = _('possible case-folding collision for %s') % f
305 305 if self._abort:
306 306 raise error.Abort(msg)
307 307 self._ui.warn(_("warning: %s\n") % msg)
308 308 self._loweredfiles.add(fl)
309 309 self._newfiles.add(f)
310 310
311 311 def filteredhash(repo, maxrev):
312 312 """build hash of filtered revisions in the current repoview.
313 313
314 314 Multiple caches perform up-to-date validation by checking that the
315 315 tiprev and tipnode stored in the cache file match the current repository.
316 316 However, this is not sufficient for validating repoviews because the set
317 317 of revisions in the view may change without the repository tiprev and
318 318 tipnode changing.
319 319
320 320 This function hashes all the revs filtered from the view and returns
321 321 that SHA-1 digest.
322 322 """
323 323 cl = repo.changelog
324 324 if not cl.filteredrevs:
325 325 return None
326 326 key = None
327 327 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
328 328 if revs:
329 329 s = hashlib.sha1()
330 330 for rev in revs:
331 331 s.update('%d;' % rev)
332 332 key = s.digest()
333 333 return key
334 334
335 335 # compatibility layer since all 'vfs' code moved to 'mercurial.vfs'
336 336 #
337 337 # This is hard to instal deprecation warning to this since we do not have
338 338 # access to a 'ui' object.
339 339 opener = vfs = vfsmod.vfs
340 340 filteropener = filtervfs = vfsmod.filtervfs
341 341 abstractvfs = vfsmod.abstractvfs
342 342 readonlyvfs = vfsmod.readonlyvfs
343 343 auditvfs = vfsmod.auditvfs
344 344 checkambigatclosing = vfsmod.checkambigatclosing
345 345
346 346 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
347 347 '''yield every hg repository under path, always recursively.
348 348 The recurse flag will only control recursion into repo working dirs'''
349 349 def errhandler(err):
350 350 if err.filename == path:
351 351 raise err
352 352 samestat = getattr(os.path, 'samestat', None)
353 353 if followsym and samestat is not None:
354 354 def adddir(dirlst, dirname):
355 355 match = False
356 356 dirstat = os.stat(dirname)
357 357 for lstdirstat in dirlst:
358 358 if samestat(dirstat, lstdirstat):
359 359 match = True
360 360 break
361 361 if not match:
362 362 dirlst.append(dirstat)
363 363 return not match
364 364 else:
365 365 followsym = False
366 366
367 367 if (seen_dirs is None) and followsym:
368 368 seen_dirs = []
369 369 adddir(seen_dirs, path)
370 370 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
371 371 dirs.sort()
372 372 if '.hg' in dirs:
373 373 yield root # found a repository
374 374 qroot = os.path.join(root, '.hg', 'patches')
375 375 if os.path.isdir(os.path.join(qroot, '.hg')):
376 376 yield qroot # we have a patch queue repo here
377 377 if recurse:
378 378 # avoid recursing inside the .hg directory
379 379 dirs.remove('.hg')
380 380 else:
381 381 dirs[:] = [] # don't descend further
382 382 elif followsym:
383 383 newdirs = []
384 384 for d in dirs:
385 385 fname = os.path.join(root, d)
386 386 if adddir(seen_dirs, fname):
387 387 if os.path.islink(fname):
388 388 for hgname in walkrepos(fname, True, seen_dirs):
389 389 yield hgname
390 390 else:
391 391 newdirs.append(d)
392 392 dirs[:] = newdirs
393 393
394 394 def osrcpath():
395 395 '''return default os-specific hgrc search path'''
396 396 path = []
397 397 defaultpath = os.path.join(util.datapath, 'default.d')
398 398 if os.path.isdir(defaultpath):
399 399 for f, kind in osutil.listdir(defaultpath):
400 400 if f.endswith('.rc'):
401 401 path.append(os.path.join(defaultpath, f))
402 402 path.extend(systemrcpath())
403 403 path.extend(userrcpath())
404 404 path = [os.path.normpath(f) for f in path]
405 405 return path
406 406
407 407 _rcpath = None
408 408
409 409 def rcpath():
410 410 '''return hgrc search path. if env var HGRCPATH is set, use it.
411 411 for each item in path, if directory, use files ending in .rc,
412 412 else use item.
413 413 make HGRCPATH empty to only look in .hg/hgrc of current repo.
414 414 if no HGRCPATH, use default os-specific path.'''
415 415 global _rcpath
416 416 if _rcpath is None:
417 417 if 'HGRCPATH' in encoding.environ:
418 418 _rcpath = []
419 419 for p in encoding.environ['HGRCPATH'].split(pycompat.ospathsep):
420 420 if not p:
421 421 continue
422 422 p = util.expandpath(p)
423 423 if os.path.isdir(p):
424 424 for f, kind in osutil.listdir(p):
425 425 if f.endswith('.rc'):
426 426 _rcpath.append(os.path.join(p, f))
427 427 else:
428 428 _rcpath.append(p)
429 429 else:
430 430 _rcpath = osrcpath()
431 431 return _rcpath
432 432
433 433 def intrev(rev):
434 434 """Return integer for a given revision that can be used in comparison or
435 435 arithmetic operation"""
436 436 if rev is None:
437 437 return wdirrev
438 438 return rev
439 439
440 440 def revsingle(repo, revspec, default='.'):
441 441 if not revspec and revspec != 0:
442 442 return repo[default]
443 443
444 444 l = revrange(repo, [revspec])
445 445 if not l:
446 446 raise error.Abort(_('empty revision set'))
447 447 return repo[l.last()]
448 448
449 449 def _pairspec(revspec):
450 450 tree = revsetlang.parse(revspec)
451 451 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
452 452
453 453 def revpair(repo, revs):
454 454 if not revs:
455 455 return repo.dirstate.p1(), None
456 456
457 457 l = revrange(repo, revs)
458 458
459 459 if not l:
460 460 first = second = None
461 461 elif l.isascending():
462 462 first = l.min()
463 463 second = l.max()
464 464 elif l.isdescending():
465 465 first = l.max()
466 466 second = l.min()
467 467 else:
468 468 first = l.first()
469 469 second = l.last()
470 470
471 471 if first is None:
472 472 raise error.Abort(_('empty revision range'))
473 473 if (first == second and len(revs) >= 2
474 474 and not all(revrange(repo, [r]) for r in revs)):
475 475 raise error.Abort(_('empty revision on one side of range'))
476 476
477 477 # if top-level is range expression, the result must always be a pair
478 478 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
479 479 return repo.lookup(first), None
480 480
481 481 return repo.lookup(first), repo.lookup(second)
482 482
483 483 def revrange(repo, specs):
484 484 """Execute 1 to many revsets and return the union.
485 485
486 486 This is the preferred mechanism for executing revsets using user-specified
487 487 config options, such as revset aliases.
488 488
489 489 The revsets specified by ``specs`` will be executed via a chained ``OR``
490 490 expression. If ``specs`` is empty, an empty result is returned.
491 491
492 492 ``specs`` can contain integers, in which case they are assumed to be
493 493 revision numbers.
494 494
495 495 It is assumed the revsets are already formatted. If you have arguments
496 496 that need to be expanded in the revset, call ``revsetlang.formatspec()``
497 497 and pass the result as an element of ``specs``.
498 498
499 499 Specifying a single revset is allowed.
500 500
501 501 Returns a ``revset.abstractsmartset`` which is a list-like interface over
502 502 integer revisions.
503 503 """
504 504 allspecs = []
505 505 for spec in specs:
506 506 if isinstance(spec, int):
507 507 spec = revsetlang.formatspec('rev(%d)', spec)
508 508 allspecs.append(spec)
509 509 return repo.anyrevs(allspecs, user=True)
510 510
511 511 def meaningfulparents(repo, ctx):
512 512 """Return list of meaningful (or all if debug) parentrevs for rev.
513 513
514 514 For merges (two non-nullrev revisions) both parents are meaningful.
515 515 Otherwise the first parent revision is considered meaningful if it
516 516 is not the preceding revision.
517 517 """
518 518 parents = ctx.parents()
519 519 if len(parents) > 1:
520 520 return parents
521 521 if repo.ui.debugflag:
522 522 return [parents[0], repo['null']]
523 523 if parents[0].rev() >= intrev(ctx.rev()) - 1:
524 524 return []
525 525 return parents
526 526
527 527 def expandpats(pats):
528 528 '''Expand bare globs when running on windows.
529 529 On posix we assume it already has already been done by sh.'''
530 530 if not util.expandglobs:
531 531 return list(pats)
532 532 ret = []
533 533 for kindpat in pats:
534 534 kind, pat = matchmod._patsplit(kindpat, None)
535 535 if kind is None:
536 536 try:
537 537 globbed = glob.glob(pat)
538 538 except re.error:
539 539 globbed = [pat]
540 540 if globbed:
541 541 ret.extend(globbed)
542 542 continue
543 543 ret.append(kindpat)
544 544 return ret
545 545
546 546 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
547 547 badfn=None):
548 548 '''Return a matcher and the patterns that were used.
549 549 The matcher will warn about bad matches, unless an alternate badfn callback
550 550 is provided.'''
551 551 if pats == ("",):
552 552 pats = []
553 553 if opts is None:
554 554 opts = {}
555 555 if not globbed and default == 'relpath':
556 556 pats = expandpats(pats or [])
557 557
558 558 def bad(f, msg):
559 559 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
560 560
561 561 if badfn is None:
562 562 badfn = bad
563 563
564 564 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
565 565 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
566 566
567 567 if m.always():
568 568 pats = []
569 569 return m, pats
570 570
571 571 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
572 572 badfn=None):
573 573 '''Return a matcher that will warn about bad matches.'''
574 574 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
575 575
576 576 def matchall(repo):
577 577 '''Return a matcher that will efficiently match everything.'''
578 578 return matchmod.always(repo.root, repo.getcwd())
579 579
580 580 def matchfiles(repo, files, badfn=None):
581 581 '''Return a matcher that will efficiently match exactly these files.'''
582 582 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
583 583
584 584 def origpath(ui, repo, filepath):
585 585 '''customize where .orig files are created
586 586
587 587 Fetch user defined path from config file: [ui] origbackuppath = <path>
588 588 Fall back to default (filepath) if not specified
589 589 '''
590 590 origbackuppath = ui.config('ui', 'origbackuppath', None)
591 591 if origbackuppath is None:
592 592 return filepath + ".orig"
593 593
594 594 filepathfromroot = os.path.relpath(filepath, start=repo.root)
595 595 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
596 596
597 597 origbackupdir = repo.vfs.dirname(fullorigpath)
598 598 if not repo.vfs.exists(origbackupdir):
599 599 ui.note(_('creating directory: %s\n') % origbackupdir)
600 600 util.makedirs(origbackupdir)
601 601
602 602 return fullorigpath + ".orig"
603 603
604 604 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
605 605 if opts is None:
606 606 opts = {}
607 607 m = matcher
608 608 if dry_run is None:
609 609 dry_run = opts.get('dry_run')
610 610 if similarity is None:
611 611 similarity = float(opts.get('similarity') or 0)
612 612
613 613 ret = 0
614 614 join = lambda f: os.path.join(prefix, f)
615 615
616 616 wctx = repo[None]
617 617 for subpath in sorted(wctx.substate):
618 618 submatch = matchmod.subdirmatcher(subpath, m)
619 619 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
620 620 sub = wctx.sub(subpath)
621 621 try:
622 622 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
623 623 ret = 1
624 624 except error.LookupError:
625 625 repo.ui.status(_("skipping missing subrepository: %s\n")
626 626 % join(subpath))
627 627
628 628 rejected = []
629 629 def badfn(f, msg):
630 630 if f in m.files():
631 631 m.bad(f, msg)
632 632 rejected.append(f)
633 633
634 634 badmatch = matchmod.badmatch(m, badfn)
635 635 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
636 636 badmatch)
637 637
638 638 unknownset = set(unknown + forgotten)
639 639 toprint = unknownset.copy()
640 640 toprint.update(deleted)
641 641 for abs in sorted(toprint):
642 642 if repo.ui.verbose or not m.exact(abs):
643 643 if abs in unknownset:
644 644 status = _('adding %s\n') % m.uipath(abs)
645 645 else:
646 646 status = _('removing %s\n') % m.uipath(abs)
647 647 repo.ui.status(status)
648 648
649 649 renames = _findrenames(repo, m, added + unknown, removed + deleted,
650 650 similarity)
651 651
652 652 if not dry_run:
653 653 _markchanges(repo, unknown + forgotten, deleted, renames)
654 654
655 655 for f in rejected:
656 656 if f in m.files():
657 657 return 1
658 658 return ret
659 659
660 660 def marktouched(repo, files, similarity=0.0):
661 661 '''Assert that files have somehow been operated upon. files are relative to
662 662 the repo root.'''
663 663 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
664 664 rejected = []
665 665
666 666 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
667 667
668 668 if repo.ui.verbose:
669 669 unknownset = set(unknown + forgotten)
670 670 toprint = unknownset.copy()
671 671 toprint.update(deleted)
672 672 for abs in sorted(toprint):
673 673 if abs in unknownset:
674 674 status = _('adding %s\n') % abs
675 675 else:
676 676 status = _('removing %s\n') % abs
677 677 repo.ui.status(status)
678 678
679 679 renames = _findrenames(repo, m, added + unknown, removed + deleted,
680 680 similarity)
681 681
682 682 _markchanges(repo, unknown + forgotten, deleted, renames)
683 683
684 684 for f in rejected:
685 685 if f in m.files():
686 686 return 1
687 687 return 0
688 688
689 689 def _interestingfiles(repo, matcher):
690 690 '''Walk dirstate with matcher, looking for files that addremove would care
691 691 about.
692 692
693 693 This is different from dirstate.status because it doesn't care about
694 694 whether files are modified or clean.'''
695 695 added, unknown, deleted, removed, forgotten = [], [], [], [], []
696 696 audit_path = pathutil.pathauditor(repo.root)
697 697
698 698 ctx = repo[None]
699 699 dirstate = repo.dirstate
700 700 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
701 701 full=False)
702 702 for abs, st in walkresults.iteritems():
703 703 dstate = dirstate[abs]
704 704 if dstate == '?' and audit_path.check(abs):
705 705 unknown.append(abs)
706 706 elif dstate != 'r' and not st:
707 707 deleted.append(abs)
708 708 elif dstate == 'r' and st:
709 709 forgotten.append(abs)
710 710 # for finding renames
711 711 elif dstate == 'r' and not st:
712 712 removed.append(abs)
713 713 elif dstate == 'a':
714 714 added.append(abs)
715 715
716 716 return added, unknown, deleted, removed, forgotten
717 717
718 718 def _findrenames(repo, matcher, added, removed, similarity):
719 719 '''Find renames from removed files to added ones.'''
720 720 renames = {}
721 721 if similarity > 0:
722 722 for old, new, score in similar.findrenames(repo, added, removed,
723 723 similarity):
724 724 if (repo.ui.verbose or not matcher.exact(old)
725 725 or not matcher.exact(new)):
726 726 repo.ui.status(_('recording removal of %s as rename to %s '
727 727 '(%d%% similar)\n') %
728 728 (matcher.rel(old), matcher.rel(new),
729 729 score * 100))
730 730 renames[new] = old
731 731 return renames
732 732
733 733 def _markchanges(repo, unknown, deleted, renames):
734 734 '''Marks the files in unknown as added, the files in deleted as removed,
735 735 and the files in renames as copied.'''
736 736 wctx = repo[None]
737 737 with repo.wlock():
738 738 wctx.forget(deleted)
739 739 wctx.add(unknown)
740 740 for new, old in renames.iteritems():
741 741 wctx.copy(old, new)
742 742
743 743 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
744 744 """Update the dirstate to reflect the intent of copying src to dst. For
745 745 different reasons it might not end with dst being marked as copied from src.
746 746 """
747 747 origsrc = repo.dirstate.copied(src) or src
748 748 if dst == origsrc: # copying back a copy?
749 749 if repo.dirstate[dst] not in 'mn' and not dryrun:
750 750 repo.dirstate.normallookup(dst)
751 751 else:
752 752 if repo.dirstate[origsrc] == 'a' and origsrc == src:
753 753 if not ui.quiet:
754 754 ui.warn(_("%s has not been committed yet, so no copy "
755 755 "data will be stored for %s.\n")
756 756 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
757 757 if repo.dirstate[dst] in '?r' and not dryrun:
758 758 wctx.add([dst])
759 759 elif not dryrun:
760 760 wctx.copy(origsrc, dst)
761 761
762 762 def readrequires(opener, supported):
763 763 '''Reads and parses .hg/requires and checks if all entries found
764 764 are in the list of supported features.'''
765 765 requirements = set(opener.read("requires").splitlines())
766 766 missings = []
767 767 for r in requirements:
768 768 if r not in supported:
769 769 if not r or not r[0].isalnum():
770 770 raise error.RequirementError(_(".hg/requires file is corrupt"))
771 771 missings.append(r)
772 772 missings.sort()
773 773 if missings:
774 774 raise error.RequirementError(
775 775 _("repository requires features unknown to this Mercurial: %s")
776 776 % " ".join(missings),
777 777 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
778 778 " for more information"))
779 779 return requirements
780 780
781 781 def writerequires(opener, requirements):
782 782 with opener('requires', 'w') as fp:
783 783 for r in sorted(requirements):
784 784 fp.write("%s\n" % r)
785 785
786 786 class filecachesubentry(object):
787 787 def __init__(self, path, stat):
788 788 self.path = path
789 789 self.cachestat = None
790 790 self._cacheable = None
791 791
792 792 if stat:
793 793 self.cachestat = filecachesubentry.stat(self.path)
794 794
795 795 if self.cachestat:
796 796 self._cacheable = self.cachestat.cacheable()
797 797 else:
798 798 # None means we don't know yet
799 799 self._cacheable = None
800 800
801 801 def refresh(self):
802 802 if self.cacheable():
803 803 self.cachestat = filecachesubentry.stat(self.path)
804 804
805 805 def cacheable(self):
806 806 if self._cacheable is not None:
807 807 return self._cacheable
808 808
809 809 # we don't know yet, assume it is for now
810 810 return True
811 811
812 812 def changed(self):
813 813 # no point in going further if we can't cache it
814 814 if not self.cacheable():
815 815 return True
816 816
817 817 newstat = filecachesubentry.stat(self.path)
818 818
819 819 # we may not know if it's cacheable yet, check again now
820 820 if newstat and self._cacheable is None:
821 821 self._cacheable = newstat.cacheable()
822 822
823 823 # check again
824 824 if not self._cacheable:
825 825 return True
826 826
827 827 if self.cachestat != newstat:
828 828 self.cachestat = newstat
829 829 return True
830 830 else:
831 831 return False
832 832
833 833 @staticmethod
834 834 def stat(path):
835 835 try:
836 836 return util.cachestat(path)
837 837 except OSError as e:
838 838 if e.errno != errno.ENOENT:
839 839 raise
840 840
841 841 class filecacheentry(object):
842 842 def __init__(self, paths, stat=True):
843 843 self._entries = []
844 844 for path in paths:
845 845 self._entries.append(filecachesubentry(path, stat))
846 846
847 847 def changed(self):
848 848 '''true if any entry has changed'''
849 849 for entry in self._entries:
850 850 if entry.changed():
851 851 return True
852 852 return False
853 853
854 854 def refresh(self):
855 855 for entry in self._entries:
856 856 entry.refresh()
857 857
858 858 class filecache(object):
859 859 '''A property like decorator that tracks files under .hg/ for updates.
860 860
861 861 Records stat info when called in _filecache.
862 862
863 863 On subsequent calls, compares old stat info with new info, and recreates the
864 864 object when any of the files changes, updating the new stat info in
865 865 _filecache.
866 866
867 867 Mercurial either atomic renames or appends for files under .hg,
868 868 so to ensure the cache is reliable we need the filesystem to be able
869 869 to tell us if a file has been replaced. If it can't, we fallback to
870 870 recreating the object on every call (essentially the same behavior as
871 871 propertycache).
872 872
873 873 '''
874 874 def __init__(self, *paths):
875 875 self.paths = paths
876 876
877 877 def join(self, obj, fname):
878 878 """Used to compute the runtime path of a cached file.
879 879
880 880 Users should subclass filecache and provide their own version of this
881 881 function to call the appropriate join function on 'obj' (an instance
882 882 of the class that its member function was decorated).
883 883 """
884 884 raise NotImplementedError
885 885
886 886 def __call__(self, func):
887 887 self.func = func
888 888 self.name = func.__name__.encode('ascii')
889 889 return self
890 890
891 891 def __get__(self, obj, type=None):
892 892 # if accessed on the class, return the descriptor itself.
893 893 if obj is None:
894 894 return self
895 895 # do we need to check if the file changed?
896 896 if self.name in obj.__dict__:
897 897 assert self.name in obj._filecache, self.name
898 898 return obj.__dict__[self.name]
899 899
900 900 entry = obj._filecache.get(self.name)
901 901
902 902 if entry:
903 903 if entry.changed():
904 904 entry.obj = self.func(obj)
905 905 else:
906 906 paths = [self.join(obj, path) for path in self.paths]
907 907
908 908 # We stat -before- creating the object so our cache doesn't lie if
909 909 # a writer modified between the time we read and stat
910 910 entry = filecacheentry(paths, True)
911 911 entry.obj = self.func(obj)
912 912
913 913 obj._filecache[self.name] = entry
914 914
915 915 obj.__dict__[self.name] = entry.obj
916 916 return entry.obj
917 917
918 918 def __set__(self, obj, value):
919 919 if self.name not in obj._filecache:
920 920 # we add an entry for the missing value because X in __dict__
921 921 # implies X in _filecache
922 922 paths = [self.join(obj, path) for path in self.paths]
923 923 ce = filecacheentry(paths, False)
924 924 obj._filecache[self.name] = ce
925 925 else:
926 926 ce = obj._filecache[self.name]
927 927
928 928 ce.obj = value # update cached copy
929 929 obj.__dict__[self.name] = value # update copy returned by obj.x
930 930
931 931 def __delete__(self, obj):
932 932 try:
933 933 del obj.__dict__[self.name]
934 934 except KeyError:
935 935 raise AttributeError(self.name)
936 936
937 937 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
938 938 if lock is None:
939 939 raise error.LockInheritanceContractViolation(
940 940 'lock can only be inherited while held')
941 941 if environ is None:
942 942 environ = {}
943 943 with lock.inherit() as locker:
944 944 environ[envvar] = locker
945 945 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
946 946
947 947 def wlocksub(repo, cmd, *args, **kwargs):
948 948 """run cmd as a subprocess that allows inheriting repo's wlock
949 949
950 950 This can only be called while the wlock is held. This takes all the
951 951 arguments that ui.system does, and returns the exit code of the
952 952 subprocess."""
953 953 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
954 954 **kwargs)
955 955
956 956 def gdinitconfig(ui):
957 957 """helper function to know if a repo should be created as general delta
958 958 """
959 959 # experimental config: format.generaldelta
960 960 return (ui.configbool('format', 'generaldelta', False)
961 961 or ui.configbool('format', 'usegeneraldelta', True))
962 962
963 963 def gddeltaconfig(ui):
964 964 """helper function to know if incoming delta should be optimised
965 965 """
966 966 # experimental config: format.generaldelta
967 967 return ui.configbool('format', 'generaldelta', False)
968
969 class simplekeyvaluefile(object):
970 """A simple file with key=value lines
971
972 Keys must be alphanumerics and start with a letter, values must not
973 contain '\n' characters"""
974
975 def __init__(self, vfs, path, keys=None):
976 self.vfs = vfs
977 self.path = path
978
979 def read(self):
980 lines = self.vfs.readlines(self.path)
981 try:
982 d = dict(line[:-1].split('=', 1) for line in lines if line)
983 except ValueError as e:
984 raise error.CorruptedState(str(e))
985 return d
986
987 def write(self, data):
988 """Write key=>value mapping to a file
989 data is a dict. Keys must be alphanumerical and start with a letter.
990 Values must not contain newline characters."""
991 lines = []
992 for k, v in data.items():
993 if not k[0].isalpha():
994 e = "keys must start with a letter in a key-value file"
995 raise error.ProgrammingError(e)
996 if not k.isalnum():
997 e = "invalid key name in a simple key-value file"
998 raise error.ProgrammingError(e)
999 if '\n' in v:
1000 e = "invalid value in a simple key-value file"
1001 raise error.ProgrammingError(e)
1002 lines.append("%s=%s\n" % (k, v))
1003 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1004 fp.write(''.join(lines))
1005
General Comments 0
You need to be logged in to leave comments. Login now