##// END OF EJS Templates
dirstate: use absolute_import
Gregory Szorc -
r27503:0f459662 default
parent child Browse files
Show More
@@ -1,1180 +1,1193
1 1 # dirstate.py - working directory tracking for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 from node import nullid
9 from i18n import _
10 import scmutil, util, osutil, parsers, encoding, pathutil, error
11 import os, stat, errno
12 import match as matchmod
8 from __future__ import absolute_import
9
10 import errno
11 import os
12 import stat
13
14 from .i18n import _
15 from .node import nullid
16 from . import (
17 encoding,
18 error,
19 match as matchmod,
20 osutil,
21 parsers,
22 pathutil,
23 scmutil,
24 util,
25 )
13 26
14 27 propertycache = util.propertycache
15 28 filecache = scmutil.filecache
16 29 _rangemask = 0x7fffffff
17 30
18 31 dirstatetuple = parsers.dirstatetuple
19 32
20 33 class repocache(filecache):
21 34 """filecache for files in .hg/"""
22 35 def join(self, obj, fname):
23 36 return obj._opener.join(fname)
24 37
25 38 class rootcache(filecache):
26 39 """filecache for files in the repository root"""
27 40 def join(self, obj, fname):
28 41 return obj._join(fname)
29 42
30 43 def _getfsnow(vfs):
31 44 '''Get "now" timestamp on filesystem'''
32 45 tmpfd, tmpname = vfs.mkstemp()
33 46 try:
34 47 return os.fstat(tmpfd).st_mtime
35 48 finally:
36 49 os.close(tmpfd)
37 50 vfs.unlink(tmpname)
38 51
39 52 def _trypending(root, vfs, filename):
40 53 '''Open file to be read according to HG_PENDING environment variable
41 54
42 55 This opens '.pending' of specified 'filename' only when HG_PENDING
43 56 is equal to 'root'.
44 57
45 58 This returns '(fp, is_pending_opened)' tuple.
46 59 '''
47 60 if root == os.environ.get('HG_PENDING'):
48 61 try:
49 62 return (vfs('%s.pending' % filename), True)
50 63 except IOError as inst:
51 64 if inst.errno != errno.ENOENT:
52 65 raise
53 66 return (vfs(filename), False)
54 67
55 68 class dirstate(object):
56 69
57 70 def __init__(self, opener, ui, root, validate):
58 71 '''Create a new dirstate object.
59 72
60 73 opener is an open()-like callable that can be used to open the
61 74 dirstate file; root is the root of the directory tracked by
62 75 the dirstate.
63 76 '''
64 77 self._opener = opener
65 78 self._validate = validate
66 79 self._root = root
67 80 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
68 81 # UNC path pointing to root share (issue4557)
69 82 self._rootdir = pathutil.normasprefix(root)
70 83 # internal config: ui.forcecwd
71 84 forcecwd = ui.config('ui', 'forcecwd')
72 85 if forcecwd:
73 86 self._cwd = forcecwd
74 87 self._dirty = False
75 88 self._dirtypl = False
76 89 self._lastnormaltime = 0
77 90 self._ui = ui
78 91 self._filecache = {}
79 92 self._parentwriters = 0
80 93 self._filename = 'dirstate'
81 94 self._pendingfilename = '%s.pending' % self._filename
82 95
83 96 # for consistent view between _pl() and _read() invocations
84 97 self._pendingmode = None
85 98
86 99 def beginparentchange(self):
87 100 '''Marks the beginning of a set of changes that involve changing
88 101 the dirstate parents. If there is an exception during this time,
89 102 the dirstate will not be written when the wlock is released. This
90 103 prevents writing an incoherent dirstate where the parent doesn't
91 104 match the contents.
92 105 '''
93 106 self._parentwriters += 1
94 107
95 108 def endparentchange(self):
96 109 '''Marks the end of a set of changes that involve changing the
97 110 dirstate parents. Once all parent changes have been marked done,
98 111 the wlock will be free to write the dirstate on release.
99 112 '''
100 113 if self._parentwriters > 0:
101 114 self._parentwriters -= 1
102 115
103 116 def pendingparentchange(self):
104 117 '''Returns true if the dirstate is in the middle of a set of changes
105 118 that modify the dirstate parent.
106 119 '''
107 120 return self._parentwriters > 0
108 121
109 122 @propertycache
110 123 def _map(self):
111 124 '''Return the dirstate contents as a map from filename to
112 125 (state, mode, size, time).'''
113 126 self._read()
114 127 return self._map
115 128
116 129 @propertycache
117 130 def _copymap(self):
118 131 self._read()
119 132 return self._copymap
120 133
121 134 @propertycache
122 135 def _filefoldmap(self):
123 136 try:
124 137 makefilefoldmap = parsers.make_file_foldmap
125 138 except AttributeError:
126 139 pass
127 140 else:
128 141 return makefilefoldmap(self._map, util.normcasespec,
129 142 util.normcasefallback)
130 143
131 144 f = {}
132 145 normcase = util.normcase
133 146 for name, s in self._map.iteritems():
134 147 if s[0] != 'r':
135 148 f[normcase(name)] = name
136 149 f['.'] = '.' # prevents useless util.fspath() invocation
137 150 return f
138 151
139 152 @propertycache
140 153 def _dirfoldmap(self):
141 154 f = {}
142 155 normcase = util.normcase
143 156 for name in self._dirs:
144 157 f[normcase(name)] = name
145 158 return f
146 159
147 160 @repocache('branch')
148 161 def _branch(self):
149 162 try:
150 163 return self._opener.read("branch").strip() or "default"
151 164 except IOError as inst:
152 165 if inst.errno != errno.ENOENT:
153 166 raise
154 167 return "default"
155 168
156 169 @propertycache
157 170 def _pl(self):
158 171 try:
159 172 fp = self._opendirstatefile()
160 173 st = fp.read(40)
161 174 fp.close()
162 175 l = len(st)
163 176 if l == 40:
164 177 return st[:20], st[20:40]
165 178 elif l > 0 and l < 40:
166 179 raise error.Abort(_('working directory state appears damaged!'))
167 180 except IOError as err:
168 181 if err.errno != errno.ENOENT:
169 182 raise
170 183 return [nullid, nullid]
171 184
172 185 @propertycache
173 186 def _dirs(self):
174 187 return util.dirs(self._map, 'r')
175 188
176 189 def dirs(self):
177 190 return self._dirs
178 191
179 192 @rootcache('.hgignore')
180 193 def _ignore(self):
181 194 files = []
182 195 if os.path.exists(self._join('.hgignore')):
183 196 files.append(self._join('.hgignore'))
184 197 for name, path in self._ui.configitems("ui"):
185 198 if name == 'ignore' or name.startswith('ignore.'):
186 199 # we need to use os.path.join here rather than self._join
187 200 # because path is arbitrary and user-specified
188 201 files.append(os.path.join(self._rootdir, util.expandpath(path)))
189 202
190 203 if not files:
191 204 return util.never
192 205
193 206 pats = ['include:%s' % f for f in files]
194 207 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
195 208
196 209 @propertycache
197 210 def _slash(self):
198 211 return self._ui.configbool('ui', 'slash') and os.sep != '/'
199 212
200 213 @propertycache
201 214 def _checklink(self):
202 215 return util.checklink(self._root)
203 216
204 217 @propertycache
205 218 def _checkexec(self):
206 219 return util.checkexec(self._root)
207 220
208 221 @propertycache
209 222 def _checkcase(self):
210 223 return not util.checkcase(self._join('.hg'))
211 224
212 225 def _join(self, f):
213 226 # much faster than os.path.join()
214 227 # it's safe because f is always a relative path
215 228 return self._rootdir + f
216 229
217 230 def flagfunc(self, buildfallback):
218 231 if self._checklink and self._checkexec:
219 232 def f(x):
220 233 try:
221 234 st = os.lstat(self._join(x))
222 235 if util.statislink(st):
223 236 return 'l'
224 237 if util.statisexec(st):
225 238 return 'x'
226 239 except OSError:
227 240 pass
228 241 return ''
229 242 return f
230 243
231 244 fallback = buildfallback()
232 245 if self._checklink:
233 246 def f(x):
234 247 if os.path.islink(self._join(x)):
235 248 return 'l'
236 249 if 'x' in fallback(x):
237 250 return 'x'
238 251 return ''
239 252 return f
240 253 if self._checkexec:
241 254 def f(x):
242 255 if 'l' in fallback(x):
243 256 return 'l'
244 257 if util.isexec(self._join(x)):
245 258 return 'x'
246 259 return ''
247 260 return f
248 261 else:
249 262 return fallback
250 263
251 264 @propertycache
252 265 def _cwd(self):
253 266 return os.getcwd()
254 267
255 268 def getcwd(self):
256 269 '''Return the path from which a canonical path is calculated.
257 270
258 271 This path should be used to resolve file patterns or to convert
259 272 canonical paths back to file paths for display. It shouldn't be
260 273 used to get real file paths. Use vfs functions instead.
261 274 '''
262 275 cwd = self._cwd
263 276 if cwd == self._root:
264 277 return ''
265 278 # self._root ends with a path separator if self._root is '/' or 'C:\'
266 279 rootsep = self._root
267 280 if not util.endswithsep(rootsep):
268 281 rootsep += os.sep
269 282 if cwd.startswith(rootsep):
270 283 return cwd[len(rootsep):]
271 284 else:
272 285 # we're outside the repo. return an absolute path.
273 286 return cwd
274 287
275 288 def pathto(self, f, cwd=None):
276 289 if cwd is None:
277 290 cwd = self.getcwd()
278 291 path = util.pathto(self._root, cwd, f)
279 292 if self._slash:
280 293 return util.pconvert(path)
281 294 return path
282 295
283 296 def __getitem__(self, key):
284 297 '''Return the current state of key (a filename) in the dirstate.
285 298
286 299 States are:
287 300 n normal
288 301 m needs merging
289 302 r marked for removal
290 303 a marked for addition
291 304 ? not tracked
292 305 '''
293 306 return self._map.get(key, ("?",))[0]
294 307
295 308 def __contains__(self, key):
296 309 return key in self._map
297 310
298 311 def __iter__(self):
299 312 for x in sorted(self._map):
300 313 yield x
301 314
302 315 def iteritems(self):
303 316 return self._map.iteritems()
304 317
305 318 def parents(self):
306 319 return [self._validate(p) for p in self._pl]
307 320
308 321 def p1(self):
309 322 return self._validate(self._pl[0])
310 323
311 324 def p2(self):
312 325 return self._validate(self._pl[1])
313 326
314 327 def branch(self):
315 328 return encoding.tolocal(self._branch)
316 329
317 330 def setparents(self, p1, p2=nullid):
318 331 """Set dirstate parents to p1 and p2.
319 332
320 333 When moving from two parents to one, 'm' merged entries a
321 334 adjusted to normal and previous copy records discarded and
322 335 returned by the call.
323 336
324 337 See localrepo.setparents()
325 338 """
326 339 if self._parentwriters == 0:
327 340 raise ValueError("cannot set dirstate parent without "
328 341 "calling dirstate.beginparentchange")
329 342
330 343 self._dirty = self._dirtypl = True
331 344 oldp2 = self._pl[1]
332 345 self._pl = p1, p2
333 346 copies = {}
334 347 if oldp2 != nullid and p2 == nullid:
335 348 for f, s in self._map.iteritems():
336 349 # Discard 'm' markers when moving away from a merge state
337 350 if s[0] == 'm':
338 351 if f in self._copymap:
339 352 copies[f] = self._copymap[f]
340 353 self.normallookup(f)
341 354 # Also fix up otherparent markers
342 355 elif s[0] == 'n' and s[2] == -2:
343 356 if f in self._copymap:
344 357 copies[f] = self._copymap[f]
345 358 self.add(f)
346 359 return copies
347 360
348 361 def setbranch(self, branch):
349 362 self._branch = encoding.fromlocal(branch)
350 363 f = self._opener('branch', 'w', atomictemp=True)
351 364 try:
352 365 f.write(self._branch + '\n')
353 366 f.close()
354 367
355 368 # make sure filecache has the correct stat info for _branch after
356 369 # replacing the underlying file
357 370 ce = self._filecache['_branch']
358 371 if ce:
359 372 ce.refresh()
360 373 except: # re-raises
361 374 f.discard()
362 375 raise
363 376
364 377 def _opendirstatefile(self):
365 378 fp, mode = _trypending(self._root, self._opener, self._filename)
366 379 if self._pendingmode is not None and self._pendingmode != mode:
367 380 fp.close()
368 381 raise error.Abort(_('working directory state may be '
369 382 'changed parallelly'))
370 383 self._pendingmode = mode
371 384 return fp
372 385
373 386 def _read(self):
374 387 self._map = {}
375 388 self._copymap = {}
376 389 try:
377 390 fp = self._opendirstatefile()
378 391 try:
379 392 st = fp.read()
380 393 finally:
381 394 fp.close()
382 395 except IOError as err:
383 396 if err.errno != errno.ENOENT:
384 397 raise
385 398 return
386 399 if not st:
387 400 return
388 401
389 402 if util.safehasattr(parsers, 'dict_new_presized'):
390 403 # Make an estimate of the number of files in the dirstate based on
391 404 # its size. From a linear regression on a set of real-world repos,
392 405 # all over 10,000 files, the size of a dirstate entry is 85
393 406 # bytes. The cost of resizing is significantly higher than the cost
394 407 # of filling in a larger presized dict, so subtract 20% from the
395 408 # size.
396 409 #
397 410 # This heuristic is imperfect in many ways, so in a future dirstate
398 411 # format update it makes sense to just record the number of entries
399 412 # on write.
400 413 self._map = parsers.dict_new_presized(len(st) / 71)
401 414
402 415 # Python's garbage collector triggers a GC each time a certain number
403 416 # of container objects (the number being defined by
404 417 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
405 418 # for each file in the dirstate. The C version then immediately marks
406 419 # them as not to be tracked by the collector. However, this has no
407 420 # effect on when GCs are triggered, only on what objects the GC looks
408 421 # into. This means that O(number of files) GCs are unavoidable.
409 422 # Depending on when in the process's lifetime the dirstate is parsed,
410 423 # this can get very expensive. As a workaround, disable GC while
411 424 # parsing the dirstate.
412 425 #
413 426 # (we cannot decorate the function directly since it is in a C module)
414 427 parse_dirstate = util.nogc(parsers.parse_dirstate)
415 428 p = parse_dirstate(self._map, self._copymap, st)
416 429 if not self._dirtypl:
417 430 self._pl = p
418 431
419 432 def invalidate(self):
420 433 for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
421 434 "_pl", "_dirs", "_ignore"):
422 435 if a in self.__dict__:
423 436 delattr(self, a)
424 437 self._lastnormaltime = 0
425 438 self._dirty = False
426 439 self._parentwriters = 0
427 440
428 441 def copy(self, source, dest):
429 442 """Mark dest as a copy of source. Unmark dest if source is None."""
430 443 if source == dest:
431 444 return
432 445 self._dirty = True
433 446 if source is not None:
434 447 self._copymap[dest] = source
435 448 elif dest in self._copymap:
436 449 del self._copymap[dest]
437 450
438 451 def copied(self, file):
439 452 return self._copymap.get(file, None)
440 453
441 454 def copies(self):
442 455 return self._copymap
443 456
444 457 def _droppath(self, f):
445 458 if self[f] not in "?r" and "_dirs" in self.__dict__:
446 459 self._dirs.delpath(f)
447 460
448 461 if "_filefoldmap" in self.__dict__:
449 462 normed = util.normcase(f)
450 463 if normed in self._filefoldmap:
451 464 del self._filefoldmap[normed]
452 465
453 466 def _addpath(self, f, state, mode, size, mtime):
454 467 oldstate = self[f]
455 468 if state == 'a' or oldstate == 'r':
456 469 scmutil.checkfilename(f)
457 470 if f in self._dirs:
458 471 raise error.Abort(_('directory %r already in dirstate') % f)
459 472 # shadows
460 473 for d in util.finddirs(f):
461 474 if d in self._dirs:
462 475 break
463 476 if d in self._map and self[d] != 'r':
464 477 raise error.Abort(
465 478 _('file %r in dirstate clashes with %r') % (d, f))
466 479 if oldstate in "?r" and "_dirs" in self.__dict__:
467 480 self._dirs.addpath(f)
468 481 self._dirty = True
469 482 self._map[f] = dirstatetuple(state, mode, size, mtime)
470 483
471 484 def normal(self, f):
472 485 '''Mark a file normal and clean.'''
473 486 s = os.lstat(self._join(f))
474 487 mtime = s.st_mtime
475 488 self._addpath(f, 'n', s.st_mode,
476 489 s.st_size & _rangemask, mtime & _rangemask)
477 490 if f in self._copymap:
478 491 del self._copymap[f]
479 492 if mtime > self._lastnormaltime:
480 493 # Remember the most recent modification timeslot for status(),
481 494 # to make sure we won't miss future size-preserving file content
482 495 # modifications that happen within the same timeslot.
483 496 self._lastnormaltime = mtime
484 497
485 498 def normallookup(self, f):
486 499 '''Mark a file normal, but possibly dirty.'''
487 500 if self._pl[1] != nullid and f in self._map:
488 501 # if there is a merge going on and the file was either
489 502 # in state 'm' (-1) or coming from other parent (-2) before
490 503 # being removed, restore that state.
491 504 entry = self._map[f]
492 505 if entry[0] == 'r' and entry[2] in (-1, -2):
493 506 source = self._copymap.get(f)
494 507 if entry[2] == -1:
495 508 self.merge(f)
496 509 elif entry[2] == -2:
497 510 self.otherparent(f)
498 511 if source:
499 512 self.copy(source, f)
500 513 return
501 514 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
502 515 return
503 516 self._addpath(f, 'n', 0, -1, -1)
504 517 if f in self._copymap:
505 518 del self._copymap[f]
506 519
507 520 def otherparent(self, f):
508 521 '''Mark as coming from the other parent, always dirty.'''
509 522 if self._pl[1] == nullid:
510 523 raise error.Abort(_("setting %r to other parent "
511 524 "only allowed in merges") % f)
512 525 if f in self and self[f] == 'n':
513 526 # merge-like
514 527 self._addpath(f, 'm', 0, -2, -1)
515 528 else:
516 529 # add-like
517 530 self._addpath(f, 'n', 0, -2, -1)
518 531
519 532 if f in self._copymap:
520 533 del self._copymap[f]
521 534
522 535 def add(self, f):
523 536 '''Mark a file added.'''
524 537 self._addpath(f, 'a', 0, -1, -1)
525 538 if f in self._copymap:
526 539 del self._copymap[f]
527 540
528 541 def remove(self, f):
529 542 '''Mark a file removed.'''
530 543 self._dirty = True
531 544 self._droppath(f)
532 545 size = 0
533 546 if self._pl[1] != nullid and f in self._map:
534 547 # backup the previous state
535 548 entry = self._map[f]
536 549 if entry[0] == 'm': # merge
537 550 size = -1
538 551 elif entry[0] == 'n' and entry[2] == -2: # other parent
539 552 size = -2
540 553 self._map[f] = dirstatetuple('r', 0, size, 0)
541 554 if size == 0 and f in self._copymap:
542 555 del self._copymap[f]
543 556
544 557 def merge(self, f):
545 558 '''Mark a file merged.'''
546 559 if self._pl[1] == nullid:
547 560 return self.normallookup(f)
548 561 return self.otherparent(f)
549 562
550 563 def drop(self, f):
551 564 '''Drop a file from the dirstate'''
552 565 if f in self._map:
553 566 self._dirty = True
554 567 self._droppath(f)
555 568 del self._map[f]
556 569
557 570 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
558 571 if exists is None:
559 572 exists = os.path.lexists(os.path.join(self._root, path))
560 573 if not exists:
561 574 # Maybe a path component exists
562 575 if not ignoremissing and '/' in path:
563 576 d, f = path.rsplit('/', 1)
564 577 d = self._normalize(d, False, ignoremissing, None)
565 578 folded = d + "/" + f
566 579 else:
567 580 # No path components, preserve original case
568 581 folded = path
569 582 else:
570 583 # recursively normalize leading directory components
571 584 # against dirstate
572 585 if '/' in normed:
573 586 d, f = normed.rsplit('/', 1)
574 587 d = self._normalize(d, False, ignoremissing, True)
575 588 r = self._root + "/" + d
576 589 folded = d + "/" + util.fspath(f, r)
577 590 else:
578 591 folded = util.fspath(normed, self._root)
579 592 storemap[normed] = folded
580 593
581 594 return folded
582 595
583 596 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
584 597 normed = util.normcase(path)
585 598 folded = self._filefoldmap.get(normed, None)
586 599 if folded is None:
587 600 if isknown:
588 601 folded = path
589 602 else:
590 603 folded = self._discoverpath(path, normed, ignoremissing, exists,
591 604 self._filefoldmap)
592 605 return folded
593 606
594 607 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
595 608 normed = util.normcase(path)
596 609 folded = self._filefoldmap.get(normed, None)
597 610 if folded is None:
598 611 folded = self._dirfoldmap.get(normed, None)
599 612 if folded is None:
600 613 if isknown:
601 614 folded = path
602 615 else:
603 616 # store discovered result in dirfoldmap so that future
604 617 # normalizefile calls don't start matching directories
605 618 folded = self._discoverpath(path, normed, ignoremissing, exists,
606 619 self._dirfoldmap)
607 620 return folded
608 621
609 622 def normalize(self, path, isknown=False, ignoremissing=False):
610 623 '''
611 624 normalize the case of a pathname when on a casefolding filesystem
612 625
613 626 isknown specifies whether the filename came from walking the
614 627 disk, to avoid extra filesystem access.
615 628
616 629 If ignoremissing is True, missing path are returned
617 630 unchanged. Otherwise, we try harder to normalize possibly
618 631 existing path components.
619 632
620 633 The normalized case is determined based on the following precedence:
621 634
622 635 - version of name already stored in the dirstate
623 636 - version of name stored on disk
624 637 - version provided via command arguments
625 638 '''
626 639
627 640 if self._checkcase:
628 641 return self._normalize(path, isknown, ignoremissing)
629 642 return path
630 643
631 644 def clear(self):
632 645 self._map = {}
633 646 if "_dirs" in self.__dict__:
634 647 delattr(self, "_dirs")
635 648 self._copymap = {}
636 649 self._pl = [nullid, nullid]
637 650 self._lastnormaltime = 0
638 651 self._dirty = True
639 652
640 653 def rebuild(self, parent, allfiles, changedfiles=None):
641 654 if changedfiles is None:
642 655 # Rebuild entire dirstate
643 656 changedfiles = allfiles
644 657 lastnormaltime = self._lastnormaltime
645 658 self.clear()
646 659 self._lastnormaltime = lastnormaltime
647 660
648 661 for f in changedfiles:
649 662 mode = 0o666
650 663 if f in allfiles and 'x' in allfiles.flags(f):
651 664 mode = 0o777
652 665
653 666 if f in allfiles:
654 667 self._map[f] = dirstatetuple('n', mode, -1, 0)
655 668 else:
656 669 self._map.pop(f, None)
657 670
658 671 self._pl = (parent, nullid)
659 672 self._dirty = True
660 673
661 674 def write(self, tr=False):
662 675 if not self._dirty:
663 676 return
664 677
665 678 filename = self._filename
666 679 if tr is False: # not explicitly specified
667 680 if (self._ui.configbool('devel', 'all-warnings')
668 681 or self._ui.configbool('devel', 'check-dirstate-write')):
669 682 self._ui.develwarn('use dirstate.write with '
670 683 'repo.currenttransaction()')
671 684
672 685 if self._opener.lexists(self._pendingfilename):
673 686 # if pending file already exists, in-memory changes
674 687 # should be written into it, because it has priority
675 688 # to '.hg/dirstate' at reading under HG_PENDING mode
676 689 filename = self._pendingfilename
677 690 elif tr:
678 691 # 'dirstate.write()' is not only for writing in-memory
679 692 # changes out, but also for dropping ambiguous timestamp.
680 693 # delayed writing re-raise "ambiguous timestamp issue".
681 694 # See also the wiki page below for detail:
682 695 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
683 696
684 697 # emulate dropping timestamp in 'parsers.pack_dirstate'
685 698 now = _getfsnow(self._opener)
686 699 dmap = self._map
687 700 for f, e in dmap.iteritems():
688 701 if e[0] == 'n' and e[3] == now:
689 702 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
690 703
691 704 # emulate that all 'dirstate.normal' results are written out
692 705 self._lastnormaltime = 0
693 706
694 707 # delay writing in-memory changes out
695 708 tr.addfilegenerator('dirstate', (self._filename,),
696 709 self._writedirstate, location='plain')
697 710 return
698 711
699 712 st = self._opener(filename, "w", atomictemp=True)
700 713 self._writedirstate(st)
701 714
702 715 def _writedirstate(self, st):
703 716 # use the modification time of the newly created temporary file as the
704 717 # filesystem's notion of 'now'
705 718 now = util.fstat(st).st_mtime & _rangemask
706 719
707 720 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
708 721 # timestamp of each entries in dirstate, because of 'now > mtime'
709 722 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
710 723 if delaywrite > 0:
711 724 # do we have any files to delay for?
712 725 for f, e in self._map.iteritems():
713 726 if e[0] == 'n' and e[3] == now:
714 727 import time # to avoid useless import
715 728 # rather than sleep n seconds, sleep until the next
716 729 # multiple of n seconds
717 730 clock = time.time()
718 731 start = int(clock) - (int(clock) % delaywrite)
719 732 end = start + delaywrite
720 733 time.sleep(end - clock)
721 734 break
722 735
723 736 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
724 737 st.close()
725 738 self._lastnormaltime = 0
726 739 self._dirty = self._dirtypl = False
727 740
728 741 def _dirignore(self, f):
729 742 if f == '.':
730 743 return False
731 744 if self._ignore(f):
732 745 return True
733 746 for p in util.finddirs(f):
734 747 if self._ignore(p):
735 748 return True
736 749 return False
737 750
738 751 def _walkexplicit(self, match, subrepos):
739 752 '''Get stat data about the files explicitly specified by match.
740 753
741 754 Return a triple (results, dirsfound, dirsnotfound).
742 755 - results is a mapping from filename to stat result. It also contains
743 756 listings mapping subrepos and .hg to None.
744 757 - dirsfound is a list of files found to be directories.
745 758 - dirsnotfound is a list of files that the dirstate thinks are
746 759 directories and that were not found.'''
747 760
748 761 def badtype(mode):
749 762 kind = _('unknown')
750 763 if stat.S_ISCHR(mode):
751 764 kind = _('character device')
752 765 elif stat.S_ISBLK(mode):
753 766 kind = _('block device')
754 767 elif stat.S_ISFIFO(mode):
755 768 kind = _('fifo')
756 769 elif stat.S_ISSOCK(mode):
757 770 kind = _('socket')
758 771 elif stat.S_ISDIR(mode):
759 772 kind = _('directory')
760 773 return _('unsupported file type (type is %s)') % kind
761 774
762 775 matchedir = match.explicitdir
763 776 badfn = match.bad
764 777 dmap = self._map
765 778 lstat = os.lstat
766 779 getkind = stat.S_IFMT
767 780 dirkind = stat.S_IFDIR
768 781 regkind = stat.S_IFREG
769 782 lnkkind = stat.S_IFLNK
770 783 join = self._join
771 784 dirsfound = []
772 785 foundadd = dirsfound.append
773 786 dirsnotfound = []
774 787 notfoundadd = dirsnotfound.append
775 788
776 789 if not match.isexact() and self._checkcase:
777 790 normalize = self._normalize
778 791 else:
779 792 normalize = None
780 793
781 794 files = sorted(match.files())
782 795 subrepos.sort()
783 796 i, j = 0, 0
784 797 while i < len(files) and j < len(subrepos):
785 798 subpath = subrepos[j] + "/"
786 799 if files[i] < subpath:
787 800 i += 1
788 801 continue
789 802 while i < len(files) and files[i].startswith(subpath):
790 803 del files[i]
791 804 j += 1
792 805
793 806 if not files or '.' in files:
794 807 files = ['.']
795 808 results = dict.fromkeys(subrepos)
796 809 results['.hg'] = None
797 810
798 811 alldirs = None
799 812 for ff in files:
800 813 # constructing the foldmap is expensive, so don't do it for the
801 814 # common case where files is ['.']
802 815 if normalize and ff != '.':
803 816 nf = normalize(ff, False, True)
804 817 else:
805 818 nf = ff
806 819 if nf in results:
807 820 continue
808 821
809 822 try:
810 823 st = lstat(join(nf))
811 824 kind = getkind(st.st_mode)
812 825 if kind == dirkind:
813 826 if nf in dmap:
814 827 # file replaced by dir on disk but still in dirstate
815 828 results[nf] = None
816 829 if matchedir:
817 830 matchedir(nf)
818 831 foundadd((nf, ff))
819 832 elif kind == regkind or kind == lnkkind:
820 833 results[nf] = st
821 834 else:
822 835 badfn(ff, badtype(kind))
823 836 if nf in dmap:
824 837 results[nf] = None
825 838 except OSError as inst: # nf not found on disk - it is dirstate only
826 839 if nf in dmap: # does it exactly match a missing file?
827 840 results[nf] = None
828 841 else: # does it match a missing directory?
829 842 if alldirs is None:
830 843 alldirs = util.dirs(dmap)
831 844 if nf in alldirs:
832 845 if matchedir:
833 846 matchedir(nf)
834 847 notfoundadd(nf)
835 848 else:
836 849 badfn(ff, inst.strerror)
837 850
838 851 # Case insensitive filesystems cannot rely on lstat() failing to detect
839 852 # a case-only rename. Prune the stat object for any file that does not
840 853 # match the case in the filesystem, if there are multiple files that
841 854 # normalize to the same path.
842 855 if match.isexact() and self._checkcase:
843 856 normed = {}
844 857
845 858 for f, st in results.iteritems():
846 859 if st is None:
847 860 continue
848 861
849 862 nc = util.normcase(f)
850 863 paths = normed.get(nc)
851 864
852 865 if paths is None:
853 866 paths = set()
854 867 normed[nc] = paths
855 868
856 869 paths.add(f)
857 870
858 871 for norm, paths in normed.iteritems():
859 872 if len(paths) > 1:
860 873 for path in paths:
861 874 folded = self._discoverpath(path, norm, True, None,
862 875 self._dirfoldmap)
863 876 if path != folded:
864 877 results[path] = None
865 878
866 879 return results, dirsfound, dirsnotfound
867 880
868 881 def walk(self, match, subrepos, unknown, ignored, full=True):
869 882 '''
870 883 Walk recursively through the directory tree, finding all files
871 884 matched by match.
872 885
873 886 If full is False, maybe skip some known-clean files.
874 887
875 888 Return a dict mapping filename to stat-like object (either
876 889 mercurial.osutil.stat instance or return value of os.stat()).
877 890
878 891 '''
879 892 # full is a flag that extensions that hook into walk can use -- this
880 893 # implementation doesn't use it at all. This satisfies the contract
881 894 # because we only guarantee a "maybe".
882 895
883 896 if ignored:
884 897 ignore = util.never
885 898 dirignore = util.never
886 899 elif unknown:
887 900 ignore = self._ignore
888 901 dirignore = self._dirignore
889 902 else:
890 903 # if not unknown and not ignored, drop dir recursion and step 2
891 904 ignore = util.always
892 905 dirignore = util.always
893 906
894 907 matchfn = match.matchfn
895 908 matchalways = match.always()
896 909 matchtdir = match.traversedir
897 910 dmap = self._map
898 911 listdir = osutil.listdir
899 912 lstat = os.lstat
900 913 dirkind = stat.S_IFDIR
901 914 regkind = stat.S_IFREG
902 915 lnkkind = stat.S_IFLNK
903 916 join = self._join
904 917
905 918 exact = skipstep3 = False
906 919 if match.isexact(): # match.exact
907 920 exact = True
908 921 dirignore = util.always # skip step 2
909 922 elif match.prefix(): # match.match, no patterns
910 923 skipstep3 = True
911 924
912 925 if not exact and self._checkcase:
913 926 normalize = self._normalize
914 927 normalizefile = self._normalizefile
915 928 skipstep3 = False
916 929 else:
917 930 normalize = self._normalize
918 931 normalizefile = None
919 932
920 933 # step 1: find all explicit files
921 934 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
922 935
923 936 skipstep3 = skipstep3 and not (work or dirsnotfound)
924 937 work = [d for d in work if not dirignore(d[0])]
925 938
926 939 # step 2: visit subdirectories
927 940 def traverse(work, alreadynormed):
928 941 wadd = work.append
929 942 while work:
930 943 nd = work.pop()
931 944 skip = None
932 945 if nd == '.':
933 946 nd = ''
934 947 else:
935 948 skip = '.hg'
936 949 try:
937 950 entries = listdir(join(nd), stat=True, skip=skip)
938 951 except OSError as inst:
939 952 if inst.errno in (errno.EACCES, errno.ENOENT):
940 953 match.bad(self.pathto(nd), inst.strerror)
941 954 continue
942 955 raise
943 956 for f, kind, st in entries:
944 957 if normalizefile:
945 958 # even though f might be a directory, we're only
946 959 # interested in comparing it to files currently in the
947 960 # dmap -- therefore normalizefile is enough
948 961 nf = normalizefile(nd and (nd + "/" + f) or f, True,
949 962 True)
950 963 else:
951 964 nf = nd and (nd + "/" + f) or f
952 965 if nf not in results:
953 966 if kind == dirkind:
954 967 if not ignore(nf):
955 968 if matchtdir:
956 969 matchtdir(nf)
957 970 wadd(nf)
958 971 if nf in dmap and (matchalways or matchfn(nf)):
959 972 results[nf] = None
960 973 elif kind == regkind or kind == lnkkind:
961 974 if nf in dmap:
962 975 if matchalways or matchfn(nf):
963 976 results[nf] = st
964 977 elif ((matchalways or matchfn(nf))
965 978 and not ignore(nf)):
966 979 # unknown file -- normalize if necessary
967 980 if not alreadynormed:
968 981 nf = normalize(nf, False, True)
969 982 results[nf] = st
970 983 elif nf in dmap and (matchalways or matchfn(nf)):
971 984 results[nf] = None
972 985
973 986 for nd, d in work:
974 987 # alreadynormed means that processwork doesn't have to do any
975 988 # expensive directory normalization
976 989 alreadynormed = not normalize or nd == d
977 990 traverse([d], alreadynormed)
978 991
979 992 for s in subrepos:
980 993 del results[s]
981 994 del results['.hg']
982 995
983 996 # step 3: visit remaining files from dmap
984 997 if not skipstep3 and not exact:
985 998 # If a dmap file is not in results yet, it was either
986 999 # a) not matching matchfn b) ignored, c) missing, or d) under a
987 1000 # symlink directory.
988 1001 if not results and matchalways:
989 1002 visit = dmap.keys()
990 1003 else:
991 1004 visit = [f for f in dmap if f not in results and matchfn(f)]
992 1005 visit.sort()
993 1006
994 1007 if unknown:
995 1008 # unknown == True means we walked all dirs under the roots
996 1009 # that wasn't ignored, and everything that matched was stat'ed
997 1010 # and is already in results.
998 1011 # The rest must thus be ignored or under a symlink.
999 1012 audit_path = pathutil.pathauditor(self._root)
1000 1013
1001 1014 for nf in iter(visit):
1002 1015 # If a stat for the same file was already added with a
1003 1016 # different case, don't add one for this, since that would
1004 1017 # make it appear as if the file exists under both names
1005 1018 # on disk.
1006 1019 if (normalizefile and
1007 1020 normalizefile(nf, True, True) in results):
1008 1021 results[nf] = None
1009 1022 # Report ignored items in the dmap as long as they are not
1010 1023 # under a symlink directory.
1011 1024 elif audit_path.check(nf):
1012 1025 try:
1013 1026 results[nf] = lstat(join(nf))
1014 1027 # file was just ignored, no links, and exists
1015 1028 except OSError:
1016 1029 # file doesn't exist
1017 1030 results[nf] = None
1018 1031 else:
1019 1032 # It's either missing or under a symlink directory
1020 1033 # which we in this case report as missing
1021 1034 results[nf] = None
1022 1035 else:
1023 1036 # We may not have walked the full directory tree above,
1024 1037 # so stat and check everything we missed.
1025 1038 nf = iter(visit).next
1026 1039 for st in util.statfiles([join(i) for i in visit]):
1027 1040 results[nf()] = st
1028 1041 return results
1029 1042
1030 1043 def status(self, match, subrepos, ignored, clean, unknown):
1031 1044 '''Determine the status of the working copy relative to the
1032 1045 dirstate and return a pair of (unsure, status), where status is of type
1033 1046 scmutil.status and:
1034 1047
1035 1048 unsure:
1036 1049 files that might have been modified since the dirstate was
1037 1050 written, but need to be read to be sure (size is the same
1038 1051 but mtime differs)
1039 1052 status.modified:
1040 1053 files that have definitely been modified since the dirstate
1041 1054 was written (different size or mode)
1042 1055 status.clean:
1043 1056 files that have definitely not been modified since the
1044 1057 dirstate was written
1045 1058 '''
1046 1059 listignored, listclean, listunknown = ignored, clean, unknown
1047 1060 lookup, modified, added, unknown, ignored = [], [], [], [], []
1048 1061 removed, deleted, clean = [], [], []
1049 1062
1050 1063 dmap = self._map
1051 1064 ladd = lookup.append # aka "unsure"
1052 1065 madd = modified.append
1053 1066 aadd = added.append
1054 1067 uadd = unknown.append
1055 1068 iadd = ignored.append
1056 1069 radd = removed.append
1057 1070 dadd = deleted.append
1058 1071 cadd = clean.append
1059 1072 mexact = match.exact
1060 1073 dirignore = self._dirignore
1061 1074 checkexec = self._checkexec
1062 1075 copymap = self._copymap
1063 1076 lastnormaltime = self._lastnormaltime
1064 1077
1065 1078 # We need to do full walks when either
1066 1079 # - we're listing all clean files, or
1067 1080 # - match.traversedir does something, because match.traversedir should
1068 1081 # be called for every dir in the working dir
1069 1082 full = listclean or match.traversedir is not None
1070 1083 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1071 1084 full=full).iteritems():
1072 1085 if fn not in dmap:
1073 1086 if (listignored or mexact(fn)) and dirignore(fn):
1074 1087 if listignored:
1075 1088 iadd(fn)
1076 1089 else:
1077 1090 uadd(fn)
1078 1091 continue
1079 1092
1080 1093 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1081 1094 # written like that for performance reasons. dmap[fn] is not a
1082 1095 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1083 1096 # opcode has fast paths when the value to be unpacked is a tuple or
1084 1097 # a list, but falls back to creating a full-fledged iterator in
1085 1098 # general. That is much slower than simply accessing and storing the
1086 1099 # tuple members one by one.
1087 1100 t = dmap[fn]
1088 1101 state = t[0]
1089 1102 mode = t[1]
1090 1103 size = t[2]
1091 1104 time = t[3]
1092 1105
1093 1106 if not st and state in "nma":
1094 1107 dadd(fn)
1095 1108 elif state == 'n':
1096 1109 if (size >= 0 and
1097 1110 ((size != st.st_size and size != st.st_size & _rangemask)
1098 1111 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1099 1112 or size == -2 # other parent
1100 1113 or fn in copymap):
1101 1114 madd(fn)
1102 1115 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1103 1116 ladd(fn)
1104 1117 elif st.st_mtime == lastnormaltime:
1105 1118 # fn may have just been marked as normal and it may have
1106 1119 # changed in the same second without changing its size.
1107 1120 # This can happen if we quickly do multiple commits.
1108 1121 # Force lookup, so we don't miss such a racy file change.
1109 1122 ladd(fn)
1110 1123 elif listclean:
1111 1124 cadd(fn)
1112 1125 elif state == 'm':
1113 1126 madd(fn)
1114 1127 elif state == 'a':
1115 1128 aadd(fn)
1116 1129 elif state == 'r':
1117 1130 radd(fn)
1118 1131
1119 1132 return (lookup, scmutil.status(modified, added, removed, deleted,
1120 1133 unknown, ignored, clean))
1121 1134
1122 1135 def matches(self, match):
1123 1136 '''
1124 1137 return files in the dirstate (in whatever state) filtered by match
1125 1138 '''
1126 1139 dmap = self._map
1127 1140 if match.always():
1128 1141 return dmap.keys()
1129 1142 files = match.files()
1130 1143 if match.isexact():
1131 1144 # fast path -- filter the other way around, since typically files is
1132 1145 # much smaller than dmap
1133 1146 return [f for f in files if f in dmap]
1134 1147 if match.prefix() and all(fn in dmap for fn in files):
1135 1148 # fast path -- all the values are known to be files, so just return
1136 1149 # that
1137 1150 return list(files)
1138 1151 return [f for f in dmap if match(f)]
1139 1152
1140 1153 def _actualfilename(self, tr):
1141 1154 if tr:
1142 1155 return self._pendingfilename
1143 1156 else:
1144 1157 return self._filename
1145 1158
1146 1159 def _savebackup(self, tr, suffix):
1147 1160 '''Save current dirstate into backup file with suffix'''
1148 1161 filename = self._actualfilename(tr)
1149 1162
1150 1163 # use '_writedirstate' instead of 'write' to write changes certainly,
1151 1164 # because the latter omits writing out if transaction is running.
1152 1165 # output file will be used to create backup of dirstate at this point.
1153 1166 self._writedirstate(self._opener(filename, "w", atomictemp=True))
1154 1167
1155 1168 if tr:
1156 1169 # ensure that subsequent tr.writepending returns True for
1157 1170 # changes written out above, even if dirstate is never
1158 1171 # changed after this
1159 1172 tr.addfilegenerator('dirstate', (self._filename,),
1160 1173 self._writedirstate, location='plain')
1161 1174
1162 1175 # ensure that pending file written above is unlinked at
1163 1176 # failure, even if tr.writepending isn't invoked until the
1164 1177 # end of this transaction
1165 1178 tr.registertmp(filename, location='plain')
1166 1179
1167 1180 self._opener.write(filename + suffix, self._opener.tryread(filename))
1168 1181
1169 1182 def _restorebackup(self, tr, suffix):
1170 1183 '''Restore dirstate by backup file with suffix'''
1171 1184 # this "invalidate()" prevents "wlock.release()" from writing
1172 1185 # changes of dirstate out after restoring from backup file
1173 1186 self.invalidate()
1174 1187 filename = self._actualfilename(tr)
1175 1188 self._opener.rename(filename + suffix, filename)
1176 1189
1177 1190 def _clearbackup(self, tr, suffix):
1178 1191 '''Clear backup file with suffix'''
1179 1192 filename = self._actualfilename(tr)
1180 1193 self._opener.unlink(filename + suffix)
@@ -1,197 +1,196
1 1 #require test-repo
2 2
3 3 $ cd "$TESTDIR"/..
4 4
5 5 $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs python contrib/check-py3-compat.py
6 6 contrib/casesmash.py not using absolute_import
7 7 contrib/check-code.py not using absolute_import
8 8 contrib/check-code.py requires print_function
9 9 contrib/check-config.py not using absolute_import
10 10 contrib/check-config.py requires print_function
11 11 contrib/debugcmdserver.py not using absolute_import
12 12 contrib/debugcmdserver.py requires print_function
13 13 contrib/debugshell.py not using absolute_import
14 14 contrib/fixpax.py not using absolute_import
15 15 contrib/fixpax.py requires print_function
16 16 contrib/hgclient.py not using absolute_import
17 17 contrib/hgclient.py requires print_function
18 18 contrib/hgfixes/fix_bytes.py not using absolute_import
19 19 contrib/hgfixes/fix_bytesmod.py not using absolute_import
20 20 contrib/hgfixes/fix_leftover_imports.py not using absolute_import
21 21 contrib/import-checker.py not using absolute_import
22 22 contrib/import-checker.py requires print_function
23 23 contrib/memory.py not using absolute_import
24 24 contrib/perf.py not using absolute_import
25 25 contrib/python-hook-examples.py not using absolute_import
26 26 contrib/revsetbenchmarks.py not using absolute_import
27 27 contrib/revsetbenchmarks.py requires print_function
28 28 contrib/showstack.py not using absolute_import
29 29 contrib/synthrepo.py not using absolute_import
30 30 contrib/win32/hgwebdir_wsgi.py not using absolute_import
31 31 doc/check-seclevel.py not using absolute_import
32 32 doc/gendoc.py not using absolute_import
33 33 doc/hgmanpage.py not using absolute_import
34 34 hgext/__init__.py not using absolute_import
35 35 hgext/acl.py not using absolute_import
36 36 hgext/blackbox.py not using absolute_import
37 37 hgext/bugzilla.py not using absolute_import
38 38 hgext/censor.py not using absolute_import
39 39 hgext/children.py not using absolute_import
40 40 hgext/churn.py not using absolute_import
41 41 hgext/clonebundles.py not using absolute_import
42 42 hgext/color.py not using absolute_import
43 43 hgext/convert/__init__.py not using absolute_import
44 44 hgext/convert/bzr.py not using absolute_import
45 45 hgext/convert/common.py not using absolute_import
46 46 hgext/convert/convcmd.py not using absolute_import
47 47 hgext/convert/cvs.py not using absolute_import
48 48 hgext/convert/cvsps.py not using absolute_import
49 49 hgext/convert/darcs.py not using absolute_import
50 50 hgext/convert/filemap.py not using absolute_import
51 51 hgext/convert/git.py not using absolute_import
52 52 hgext/convert/gnuarch.py not using absolute_import
53 53 hgext/convert/hg.py not using absolute_import
54 54 hgext/convert/monotone.py not using absolute_import
55 55 hgext/convert/p4.py not using absolute_import
56 56 hgext/convert/subversion.py not using absolute_import
57 57 hgext/convert/transport.py not using absolute_import
58 58 hgext/eol.py not using absolute_import
59 59 hgext/extdiff.py not using absolute_import
60 60 hgext/factotum.py not using absolute_import
61 61 hgext/fetch.py not using absolute_import
62 62 hgext/gpg.py not using absolute_import
63 63 hgext/graphlog.py not using absolute_import
64 64 hgext/hgcia.py not using absolute_import
65 65 hgext/hgk.py not using absolute_import
66 66 hgext/highlight/__init__.py not using absolute_import
67 67 hgext/highlight/highlight.py not using absolute_import
68 68 hgext/histedit.py not using absolute_import
69 69 hgext/keyword.py not using absolute_import
70 70 hgext/largefiles/__init__.py not using absolute_import
71 71 hgext/largefiles/basestore.py not using absolute_import
72 72 hgext/largefiles/lfcommands.py not using absolute_import
73 73 hgext/largefiles/lfutil.py not using absolute_import
74 74 hgext/largefiles/localstore.py not using absolute_import
75 75 hgext/largefiles/overrides.py not using absolute_import
76 76 hgext/largefiles/proto.py not using absolute_import
77 77 hgext/largefiles/remotestore.py not using absolute_import
78 78 hgext/largefiles/reposetup.py not using absolute_import
79 79 hgext/largefiles/uisetup.py not using absolute_import
80 80 hgext/largefiles/wirestore.py not using absolute_import
81 81 hgext/mq.py not using absolute_import
82 82 hgext/notify.py not using absolute_import
83 83 hgext/pager.py not using absolute_import
84 84 hgext/patchbomb.py not using absolute_import
85 85 hgext/purge.py not using absolute_import
86 86 hgext/rebase.py not using absolute_import
87 87 hgext/record.py not using absolute_import
88 88 hgext/relink.py not using absolute_import
89 89 hgext/schemes.py not using absolute_import
90 90 hgext/share.py not using absolute_import
91 91 hgext/shelve.py not using absolute_import
92 92 hgext/strip.py not using absolute_import
93 93 hgext/transplant.py not using absolute_import
94 94 hgext/win32mbcs.py not using absolute_import
95 95 hgext/win32text.py not using absolute_import
96 96 hgext/zeroconf/Zeroconf.py not using absolute_import
97 97 hgext/zeroconf/Zeroconf.py requires print_function
98 98 hgext/zeroconf/__init__.py not using absolute_import
99 99 i18n/check-translation.py not using absolute_import
100 100 i18n/polib.py not using absolute_import
101 101 mercurial/byterange.py not using absolute_import
102 102 mercurial/cmdutil.py not using absolute_import
103 103 mercurial/commands.py not using absolute_import
104 104 mercurial/context.py not using absolute_import
105 mercurial/dirstate.py not using absolute_import
106 105 mercurial/dispatch.py requires print_function
107 106 mercurial/exchange.py not using absolute_import
108 107 mercurial/httpclient/__init__.py not using absolute_import
109 108 mercurial/httpclient/_readers.py not using absolute_import
110 109 mercurial/httpclient/socketutil.py not using absolute_import
111 110 mercurial/httpconnection.py not using absolute_import
112 111 mercurial/keepalive.py not using absolute_import
113 112 mercurial/keepalive.py requires print_function
114 113 mercurial/localrepo.py not using absolute_import
115 114 mercurial/lsprof.py requires print_function
116 115 mercurial/lsprofcalltree.py not using absolute_import
117 116 mercurial/lsprofcalltree.py requires print_function
118 117 mercurial/mail.py requires print_function
119 118 setup.py not using absolute_import
120 119 tests/filterpyflakes.py requires print_function
121 120 tests/generate-working-copy-states.py requires print_function
122 121 tests/get-with-headers.py requires print_function
123 122 tests/heredoctest.py requires print_function
124 123 tests/hypothesishelpers.py not using absolute_import
125 124 tests/hypothesishelpers.py requires print_function
126 125 tests/killdaemons.py not using absolute_import
127 126 tests/md5sum.py not using absolute_import
128 127 tests/mockblackbox.py not using absolute_import
129 128 tests/printenv.py not using absolute_import
130 129 tests/readlink.py not using absolute_import
131 130 tests/readlink.py requires print_function
132 131 tests/revlog-formatv0.py not using absolute_import
133 132 tests/run-tests.py not using absolute_import
134 133 tests/seq.py not using absolute_import
135 134 tests/seq.py requires print_function
136 135 tests/silenttestrunner.py not using absolute_import
137 136 tests/silenttestrunner.py requires print_function
138 137 tests/sitecustomize.py not using absolute_import
139 138 tests/svn-safe-append.py not using absolute_import
140 139 tests/svnxml.py not using absolute_import
141 140 tests/test-ancestor.py requires print_function
142 141 tests/test-atomictempfile.py not using absolute_import
143 142 tests/test-batching.py not using absolute_import
144 143 tests/test-batching.py requires print_function
145 144 tests/test-bdiff.py not using absolute_import
146 145 tests/test-bdiff.py requires print_function
147 146 tests/test-context.py not using absolute_import
148 147 tests/test-context.py requires print_function
149 148 tests/test-demandimport.py not using absolute_import
150 149 tests/test-demandimport.py requires print_function
151 150 tests/test-dispatch.py not using absolute_import
152 151 tests/test-dispatch.py requires print_function
153 152 tests/test-doctest.py not using absolute_import
154 153 tests/test-duplicateoptions.py not using absolute_import
155 154 tests/test-duplicateoptions.py requires print_function
156 155 tests/test-filecache.py not using absolute_import
157 156 tests/test-filecache.py requires print_function
158 157 tests/test-filelog.py not using absolute_import
159 158 tests/test-filelog.py requires print_function
160 159 tests/test-hg-parseurl.py not using absolute_import
161 160 tests/test-hg-parseurl.py requires print_function
162 161 tests/test-hgweb-auth.py not using absolute_import
163 162 tests/test-hgweb-auth.py requires print_function
164 163 tests/test-hgwebdir-paths.py not using absolute_import
165 164 tests/test-hybridencode.py not using absolute_import
166 165 tests/test-hybridencode.py requires print_function
167 166 tests/test-lrucachedict.py not using absolute_import
168 167 tests/test-lrucachedict.py requires print_function
169 168 tests/test-manifest.py not using absolute_import
170 169 tests/test-minirst.py not using absolute_import
171 170 tests/test-minirst.py requires print_function
172 171 tests/test-parseindex2.py not using absolute_import
173 172 tests/test-parseindex2.py requires print_function
174 173 tests/test-pathencode.py not using absolute_import
175 174 tests/test-pathencode.py requires print_function
176 175 tests/test-propertycache.py not using absolute_import
177 176 tests/test-propertycache.py requires print_function
178 177 tests/test-revlog-ancestry.py not using absolute_import
179 178 tests/test-revlog-ancestry.py requires print_function
180 179 tests/test-run-tests.py not using absolute_import
181 180 tests/test-simplemerge.py not using absolute_import
182 181 tests/test-status-inprocess.py not using absolute_import
183 182 tests/test-status-inprocess.py requires print_function
184 183 tests/test-symlink-os-yes-fs-no.py not using absolute_import
185 184 tests/test-trusted.py not using absolute_import
186 185 tests/test-trusted.py requires print_function
187 186 tests/test-ui-color.py not using absolute_import
188 187 tests/test-ui-color.py requires print_function
189 188 tests/test-ui-config.py not using absolute_import
190 189 tests/test-ui-config.py requires print_function
191 190 tests/test-ui-verbosity.py not using absolute_import
192 191 tests/test-ui-verbosity.py requires print_function
193 192 tests/test-url.py not using absolute_import
194 193 tests/test-url.py requires print_function
195 194 tests/test-walkrepo.py requires print_function
196 195 tests/test-wireproto.py requires print_function
197 196 tests/tinyproxy.py requires print_function
General Comments 0
You need to be logged in to leave comments. Login now