##// END OF EJS Templates
scmutil: use util.queue/util.empty for py3 compat
timeless -
r28819:826d457d default
parent child Browse files
Show More
@@ -1,1379 +1,1378
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 import Queue
11 10 import contextlib
12 11 import errno
13 12 import glob
14 13 import os
15 14 import re
16 15 import shutil
17 16 import stat
18 17 import tempfile
19 18 import threading
20 19
21 20 from .i18n import _
22 21 from .node import wdirrev
23 22 from . import (
24 23 encoding,
25 24 error,
26 25 match as matchmod,
27 26 osutil,
28 27 pathutil,
29 28 phases,
30 29 revset,
31 30 similar,
32 31 util,
33 32 )
34 33
35 34 if os.name == 'nt':
36 35 from . import scmwindows as scmplatform
37 36 else:
38 37 from . import scmposix as scmplatform
39 38
40 39 systemrcpath = scmplatform.systemrcpath
41 40 userrcpath = scmplatform.userrcpath
42 41
43 42 class status(tuple):
44 43 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
45 44 and 'ignored' properties are only relevant to the working copy.
46 45 '''
47 46
48 47 __slots__ = ()
49 48
50 49 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
51 50 clean):
52 51 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
53 52 ignored, clean))
54 53
55 54 @property
56 55 def modified(self):
57 56 '''files that have been modified'''
58 57 return self[0]
59 58
60 59 @property
61 60 def added(self):
62 61 '''files that have been added'''
63 62 return self[1]
64 63
65 64 @property
66 65 def removed(self):
67 66 '''files that have been removed'''
68 67 return self[2]
69 68
70 69 @property
71 70 def deleted(self):
72 71 '''files that are in the dirstate, but have been deleted from the
73 72 working copy (aka "missing")
74 73 '''
75 74 return self[3]
76 75
77 76 @property
78 77 def unknown(self):
79 78 '''files not in the dirstate that are not ignored'''
80 79 return self[4]
81 80
82 81 @property
83 82 def ignored(self):
84 83 '''files not in the dirstate that are ignored (by _dirignore())'''
85 84 return self[5]
86 85
87 86 @property
88 87 def clean(self):
89 88 '''files that have not been modified'''
90 89 return self[6]
91 90
92 91 def __repr__(self, *args, **kwargs):
93 92 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
94 93 'unknown=%r, ignored=%r, clean=%r>') % self)
95 94
96 95 def itersubrepos(ctx1, ctx2):
97 96 """find subrepos in ctx1 or ctx2"""
98 97 # Create a (subpath, ctx) mapping where we prefer subpaths from
99 98 # ctx1. The subpaths from ctx2 are important when the .hgsub file
100 99 # has been modified (in ctx2) but not yet committed (in ctx1).
101 100 subpaths = dict.fromkeys(ctx2.substate, ctx2)
102 101 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
103 102
104 103 missing = set()
105 104
106 105 for subpath in ctx2.substate:
107 106 if subpath not in ctx1.substate:
108 107 del subpaths[subpath]
109 108 missing.add(subpath)
110 109
111 110 for subpath, ctx in sorted(subpaths.iteritems()):
112 111 yield subpath, ctx.sub(subpath)
113 112
114 113 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
115 114 # status and diff will have an accurate result when it does
116 115 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
117 116 # against itself.
118 117 for subpath in missing:
119 118 yield subpath, ctx2.nullsub(subpath, ctx1)
120 119
121 120 def nochangesfound(ui, repo, excluded=None):
122 121 '''Report no changes for push/pull, excluded is None or a list of
123 122 nodes excluded from the push/pull.
124 123 '''
125 124 secretlist = []
126 125 if excluded:
127 126 for n in excluded:
128 127 if n not in repo:
129 128 # discovery should not have included the filtered revision,
130 129 # we have to explicitly exclude it until discovery is cleanup.
131 130 continue
132 131 ctx = repo[n]
133 132 if ctx.phase() >= phases.secret and not ctx.extinct():
134 133 secretlist.append(n)
135 134
136 135 if secretlist:
137 136 ui.status(_("no changes found (ignored %d secret changesets)\n")
138 137 % len(secretlist))
139 138 else:
140 139 ui.status(_("no changes found\n"))
141 140
142 141 def checknewlabel(repo, lbl, kind):
143 142 # Do not use the "kind" parameter in ui output.
144 143 # It makes strings difficult to translate.
145 144 if lbl in ['tip', '.', 'null']:
146 145 raise error.Abort(_("the name '%s' is reserved") % lbl)
147 146 for c in (':', '\0', '\n', '\r'):
148 147 if c in lbl:
149 148 raise error.Abort(_("%r cannot be used in a name") % c)
150 149 try:
151 150 int(lbl)
152 151 raise error.Abort(_("cannot use an integer as a name"))
153 152 except ValueError:
154 153 pass
155 154
156 155 def checkfilename(f):
157 156 '''Check that the filename f is an acceptable filename for a tracked file'''
158 157 if '\r' in f or '\n' in f:
159 158 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
160 159
161 160 def checkportable(ui, f):
162 161 '''Check if filename f is portable and warn or abort depending on config'''
163 162 checkfilename(f)
164 163 abort, warn = checkportabilityalert(ui)
165 164 if abort or warn:
166 165 msg = util.checkwinfilename(f)
167 166 if msg:
168 167 msg = "%s: %r" % (msg, f)
169 168 if abort:
170 169 raise error.Abort(msg)
171 170 ui.warn(_("warning: %s\n") % msg)
172 171
173 172 def checkportabilityalert(ui):
174 173 '''check if the user's config requests nothing, a warning, or abort for
175 174 non-portable filenames'''
176 175 val = ui.config('ui', 'portablefilenames', 'warn')
177 176 lval = val.lower()
178 177 bval = util.parsebool(val)
179 178 abort = os.name == 'nt' or lval == 'abort'
180 179 warn = bval or lval == 'warn'
181 180 if bval is None and not (warn or abort or lval == 'ignore'):
182 181 raise error.ConfigError(
183 182 _("ui.portablefilenames value is invalid ('%s')") % val)
184 183 return abort, warn
185 184
186 185 class casecollisionauditor(object):
187 186 def __init__(self, ui, abort, dirstate):
188 187 self._ui = ui
189 188 self._abort = abort
190 189 allfiles = '\0'.join(dirstate._map)
191 190 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
192 191 self._dirstate = dirstate
193 192 # The purpose of _newfiles is so that we don't complain about
194 193 # case collisions if someone were to call this object with the
195 194 # same filename twice.
196 195 self._newfiles = set()
197 196
198 197 def __call__(self, f):
199 198 if f in self._newfiles:
200 199 return
201 200 fl = encoding.lower(f)
202 201 if fl in self._loweredfiles and f not in self._dirstate:
203 202 msg = _('possible case-folding collision for %s') % f
204 203 if self._abort:
205 204 raise error.Abort(msg)
206 205 self._ui.warn(_("warning: %s\n") % msg)
207 206 self._loweredfiles.add(fl)
208 207 self._newfiles.add(f)
209 208
210 209 def filteredhash(repo, maxrev):
211 210 """build hash of filtered revisions in the current repoview.
212 211
213 212 Multiple caches perform up-to-date validation by checking that the
214 213 tiprev and tipnode stored in the cache file match the current repository.
215 214 However, this is not sufficient for validating repoviews because the set
216 215 of revisions in the view may change without the repository tiprev and
217 216 tipnode changing.
218 217
219 218 This function hashes all the revs filtered from the view and returns
220 219 that SHA-1 digest.
221 220 """
222 221 cl = repo.changelog
223 222 if not cl.filteredrevs:
224 223 return None
225 224 key = None
226 225 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
227 226 if revs:
228 227 s = util.sha1()
229 228 for rev in revs:
230 229 s.update('%s;' % rev)
231 230 key = s.digest()
232 231 return key
233 232
234 233 class abstractvfs(object):
235 234 """Abstract base class; cannot be instantiated"""
236 235
237 236 def __init__(self, *args, **kwargs):
238 237 '''Prevent instantiation; don't call this from subclasses.'''
239 238 raise NotImplementedError('attempted instantiating ' + str(type(self)))
240 239
241 240 def tryread(self, path):
242 241 '''gracefully return an empty string for missing files'''
243 242 try:
244 243 return self.read(path)
245 244 except IOError as inst:
246 245 if inst.errno != errno.ENOENT:
247 246 raise
248 247 return ""
249 248
250 249 def tryreadlines(self, path, mode='rb'):
251 250 '''gracefully return an empty array for missing files'''
252 251 try:
253 252 return self.readlines(path, mode=mode)
254 253 except IOError as inst:
255 254 if inst.errno != errno.ENOENT:
256 255 raise
257 256 return []
258 257
259 258 def open(self, path, mode="r", text=False, atomictemp=False,
260 259 notindexed=False, backgroundclose=False):
261 260 '''Open ``path`` file, which is relative to vfs root.
262 261
263 262 Newly created directories are marked as "not to be indexed by
264 263 the content indexing service", if ``notindexed`` is specified
265 264 for "write" mode access.
266 265 '''
267 266 self.open = self.__call__
268 267 return self.__call__(path, mode, text, atomictemp, notindexed,
269 268 backgroundclose=backgroundclose)
270 269
271 270 def read(self, path):
272 271 with self(path, 'rb') as fp:
273 272 return fp.read()
274 273
275 274 def readlines(self, path, mode='rb'):
276 275 with self(path, mode=mode) as fp:
277 276 return fp.readlines()
278 277
279 278 def write(self, path, data, backgroundclose=False):
280 279 with self(path, 'wb', backgroundclose=backgroundclose) as fp:
281 280 return fp.write(data)
282 281
283 282 def writelines(self, path, data, mode='wb', notindexed=False):
284 283 with self(path, mode=mode, notindexed=notindexed) as fp:
285 284 return fp.writelines(data)
286 285
287 286 def append(self, path, data):
288 287 with self(path, 'ab') as fp:
289 288 return fp.write(data)
290 289
291 290 def basename(self, path):
292 291 """return base element of a path (as os.path.basename would do)
293 292
294 293 This exists to allow handling of strange encoding if needed."""
295 294 return os.path.basename(path)
296 295
297 296 def chmod(self, path, mode):
298 297 return os.chmod(self.join(path), mode)
299 298
300 299 def dirname(self, path):
301 300 """return dirname element of a path (as os.path.dirname would do)
302 301
303 302 This exists to allow handling of strange encoding if needed."""
304 303 return os.path.dirname(path)
305 304
306 305 def exists(self, path=None):
307 306 return os.path.exists(self.join(path))
308 307
309 308 def fstat(self, fp):
310 309 return util.fstat(fp)
311 310
312 311 def isdir(self, path=None):
313 312 return os.path.isdir(self.join(path))
314 313
315 314 def isfile(self, path=None):
316 315 return os.path.isfile(self.join(path))
317 316
318 317 def islink(self, path=None):
319 318 return os.path.islink(self.join(path))
320 319
321 320 def isfileorlink(self, path=None):
322 321 '''return whether path is a regular file or a symlink
323 322
324 323 Unlike isfile, this doesn't follow symlinks.'''
325 324 try:
326 325 st = self.lstat(path)
327 326 except OSError:
328 327 return False
329 328 mode = st.st_mode
330 329 return stat.S_ISREG(mode) or stat.S_ISLNK(mode)
331 330
332 331 def reljoin(self, *paths):
333 332 """join various elements of a path together (as os.path.join would do)
334 333
335 334 The vfs base is not injected so that path stay relative. This exists
336 335 to allow handling of strange encoding if needed."""
337 336 return os.path.join(*paths)
338 337
339 338 def split(self, path):
340 339 """split top-most element of a path (as os.path.split would do)
341 340
342 341 This exists to allow handling of strange encoding if needed."""
343 342 return os.path.split(path)
344 343
345 344 def lexists(self, path=None):
346 345 return os.path.lexists(self.join(path))
347 346
348 347 def lstat(self, path=None):
349 348 return os.lstat(self.join(path))
350 349
351 350 def listdir(self, path=None):
352 351 return os.listdir(self.join(path))
353 352
354 353 def makedir(self, path=None, notindexed=True):
355 354 return util.makedir(self.join(path), notindexed)
356 355
357 356 def makedirs(self, path=None, mode=None):
358 357 return util.makedirs(self.join(path), mode)
359 358
360 359 def makelock(self, info, path):
361 360 return util.makelock(info, self.join(path))
362 361
363 362 def mkdir(self, path=None):
364 363 return os.mkdir(self.join(path))
365 364
366 365 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
367 366 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
368 367 dir=self.join(dir), text=text)
369 368 dname, fname = util.split(name)
370 369 if dir:
371 370 return fd, os.path.join(dir, fname)
372 371 else:
373 372 return fd, fname
374 373
375 374 def readdir(self, path=None, stat=None, skip=None):
376 375 return osutil.listdir(self.join(path), stat, skip)
377 376
378 377 def readlock(self, path):
379 378 return util.readlock(self.join(path))
380 379
381 380 def rename(self, src, dst):
382 381 return util.rename(self.join(src), self.join(dst))
383 382
384 383 def readlink(self, path):
385 384 return os.readlink(self.join(path))
386 385
387 386 def removedirs(self, path=None):
388 387 """Remove a leaf directory and all empty intermediate ones
389 388 """
390 389 return util.removedirs(self.join(path))
391 390
392 391 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
393 392 """Remove a directory tree recursively
394 393
395 394 If ``forcibly``, this tries to remove READ-ONLY files, too.
396 395 """
397 396 if forcibly:
398 397 def onerror(function, path, excinfo):
399 398 if function is not os.remove:
400 399 raise
401 400 # read-only files cannot be unlinked under Windows
402 401 s = os.stat(path)
403 402 if (s.st_mode & stat.S_IWRITE) != 0:
404 403 raise
405 404 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
406 405 os.remove(path)
407 406 else:
408 407 onerror = None
409 408 return shutil.rmtree(self.join(path),
410 409 ignore_errors=ignore_errors, onerror=onerror)
411 410
412 411 def setflags(self, path, l, x):
413 412 return util.setflags(self.join(path), l, x)
414 413
415 414 def stat(self, path=None):
416 415 return os.stat(self.join(path))
417 416
418 417 def unlink(self, path=None):
419 418 return util.unlink(self.join(path))
420 419
421 420 def unlinkpath(self, path=None, ignoremissing=False):
422 421 return util.unlinkpath(self.join(path), ignoremissing)
423 422
424 423 def utime(self, path=None, t=None):
425 424 return os.utime(self.join(path), t)
426 425
427 426 def walk(self, path=None, onerror=None):
428 427 """Yield (dirpath, dirs, files) tuple for each directories under path
429 428
430 429 ``dirpath`` is relative one from the root of this vfs. This
431 430 uses ``os.sep`` as path separator, even you specify POSIX
432 431 style ``path``.
433 432
434 433 "The root of this vfs" is represented as empty ``dirpath``.
435 434 """
436 435 root = os.path.normpath(self.join(None))
437 436 # when dirpath == root, dirpath[prefixlen:] becomes empty
438 437 # because len(dirpath) < prefixlen.
439 438 prefixlen = len(pathutil.normasprefix(root))
440 439 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
441 440 yield (dirpath[prefixlen:], dirs, files)
442 441
443 442 @contextlib.contextmanager
444 443 def backgroundclosing(self, ui, expectedcount=-1):
445 444 """Allow files to be closed asynchronously.
446 445
447 446 When this context manager is active, ``backgroundclose`` can be passed
448 447 to ``__call__``/``open`` to result in the file possibly being closed
449 448 asynchronously, on a background thread.
450 449 """
451 450 # This is an arbitrary restriction and could be changed if we ever
452 451 # have a use case.
453 452 vfs = getattr(self, 'vfs', self)
454 453 if getattr(vfs, '_backgroundfilecloser', None):
455 454 raise error.Abort('can only have 1 active background file closer')
456 455
457 456 with backgroundfilecloser(ui, expectedcount=expectedcount) as bfc:
458 457 try:
459 458 vfs._backgroundfilecloser = bfc
460 459 yield bfc
461 460 finally:
462 461 vfs._backgroundfilecloser = None
463 462
464 463 class vfs(abstractvfs):
465 464 '''Operate files relative to a base directory
466 465
467 466 This class is used to hide the details of COW semantics and
468 467 remote file access from higher level code.
469 468 '''
470 469 def __init__(self, base, audit=True, expandpath=False, realpath=False):
471 470 if expandpath:
472 471 base = util.expandpath(base)
473 472 if realpath:
474 473 base = os.path.realpath(base)
475 474 self.base = base
476 475 self.mustaudit = audit
477 476 self.createmode = None
478 477 self._trustnlink = None
479 478
480 479 @property
481 480 def mustaudit(self):
482 481 return self._audit
483 482
484 483 @mustaudit.setter
485 484 def mustaudit(self, onoff):
486 485 self._audit = onoff
487 486 if onoff:
488 487 self.audit = pathutil.pathauditor(self.base)
489 488 else:
490 489 self.audit = util.always
491 490
492 491 @util.propertycache
493 492 def _cansymlink(self):
494 493 return util.checklink(self.base)
495 494
496 495 @util.propertycache
497 496 def _chmod(self):
498 497 return util.checkexec(self.base)
499 498
500 499 def _fixfilemode(self, name):
501 500 if self.createmode is None or not self._chmod:
502 501 return
503 502 os.chmod(name, self.createmode & 0o666)
504 503
505 504 def __call__(self, path, mode="r", text=False, atomictemp=False,
506 505 notindexed=False, backgroundclose=False):
507 506 '''Open ``path`` file, which is relative to vfs root.
508 507
509 508 Newly created directories are marked as "not to be indexed by
510 509 the content indexing service", if ``notindexed`` is specified
511 510 for "write" mode access.
512 511
513 512 If ``backgroundclose`` is passed, the file may be closed asynchronously.
514 513 It can only be used if the ``self.backgroundclosing()`` context manager
515 514 is active. This should only be specified if the following criteria hold:
516 515
517 516 1. There is a potential for writing thousands of files. Unless you
518 517 are writing thousands of files, the performance benefits of
519 518 asynchronously closing files is not realized.
520 519 2. Files are opened exactly once for the ``backgroundclosing``
521 520 active duration and are therefore free of race conditions between
522 521 closing a file on a background thread and reopening it. (If the
523 522 file were opened multiple times, there could be unflushed data
524 523 because the original file handle hasn't been flushed/closed yet.)
525 524 '''
526 525 if self._audit:
527 526 r = util.checkosfilename(path)
528 527 if r:
529 528 raise error.Abort("%s: %r" % (r, path))
530 529 self.audit(path)
531 530 f = self.join(path)
532 531
533 532 if not text and "b" not in mode:
534 533 mode += "b" # for that other OS
535 534
536 535 nlink = -1
537 536 if mode not in ('r', 'rb'):
538 537 dirname, basename = util.split(f)
539 538 # If basename is empty, then the path is malformed because it points
540 539 # to a directory. Let the posixfile() call below raise IOError.
541 540 if basename:
542 541 if atomictemp:
543 542 util.ensuredirs(dirname, self.createmode, notindexed)
544 543 return util.atomictempfile(f, mode, self.createmode)
545 544 try:
546 545 if 'w' in mode:
547 546 util.unlink(f)
548 547 nlink = 0
549 548 else:
550 549 # nlinks() may behave differently for files on Windows
551 550 # shares if the file is open.
552 551 with util.posixfile(f):
553 552 nlink = util.nlinks(f)
554 553 if nlink < 1:
555 554 nlink = 2 # force mktempcopy (issue1922)
556 555 except (OSError, IOError) as e:
557 556 if e.errno != errno.ENOENT:
558 557 raise
559 558 nlink = 0
560 559 util.ensuredirs(dirname, self.createmode, notindexed)
561 560 if nlink > 0:
562 561 if self._trustnlink is None:
563 562 self._trustnlink = nlink > 1 or util.checknlink(f)
564 563 if nlink > 1 or not self._trustnlink:
565 564 util.rename(util.mktempcopy(f), f)
566 565 fp = util.posixfile(f, mode)
567 566 if nlink == 0:
568 567 self._fixfilemode(f)
569 568
570 569 if backgroundclose:
571 570 if not self._backgroundfilecloser:
572 571 raise error.Abort('backgroundclose can only be used when a '
573 572 'backgroundclosing context manager is active')
574 573
575 574 fp = delayclosedfile(fp, self._backgroundfilecloser)
576 575
577 576 return fp
578 577
579 578 def symlink(self, src, dst):
580 579 self.audit(dst)
581 580 linkname = self.join(dst)
582 581 try:
583 582 os.unlink(linkname)
584 583 except OSError:
585 584 pass
586 585
587 586 util.ensuredirs(os.path.dirname(linkname), self.createmode)
588 587
589 588 if self._cansymlink:
590 589 try:
591 590 os.symlink(src, linkname)
592 591 except OSError as err:
593 592 raise OSError(err.errno, _('could not symlink to %r: %s') %
594 593 (src, err.strerror), linkname)
595 594 else:
596 595 self.write(dst, src)
597 596
598 597 def join(self, path, *insidef):
599 598 if path:
600 599 return os.path.join(self.base, path, *insidef)
601 600 else:
602 601 return self.base
603 602
604 603 opener = vfs
605 604
606 605 class auditvfs(object):
607 606 def __init__(self, vfs):
608 607 self.vfs = vfs
609 608
610 609 @property
611 610 def mustaudit(self):
612 611 return self.vfs.mustaudit
613 612
614 613 @mustaudit.setter
615 614 def mustaudit(self, onoff):
616 615 self.vfs.mustaudit = onoff
617 616
618 617 class filtervfs(abstractvfs, auditvfs):
619 618 '''Wrapper vfs for filtering filenames with a function.'''
620 619
621 620 def __init__(self, vfs, filter):
622 621 auditvfs.__init__(self, vfs)
623 622 self._filter = filter
624 623
625 624 def __call__(self, path, *args, **kwargs):
626 625 return self.vfs(self._filter(path), *args, **kwargs)
627 626
628 627 def join(self, path, *insidef):
629 628 if path:
630 629 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
631 630 else:
632 631 return self.vfs.join(path)
633 632
634 633 filteropener = filtervfs
635 634
636 635 class readonlyvfs(abstractvfs, auditvfs):
637 636 '''Wrapper vfs preventing any writing.'''
638 637
639 638 def __init__(self, vfs):
640 639 auditvfs.__init__(self, vfs)
641 640
642 641 def __call__(self, path, mode='r', *args, **kw):
643 642 if mode not in ('r', 'rb'):
644 643 raise error.Abort('this vfs is read only')
645 644 return self.vfs(path, mode, *args, **kw)
646 645
647 646 def join(self, path, *insidef):
648 647 return self.vfs.join(path, *insidef)
649 648
650 649 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
651 650 '''yield every hg repository under path, always recursively.
652 651 The recurse flag will only control recursion into repo working dirs'''
653 652 def errhandler(err):
654 653 if err.filename == path:
655 654 raise err
656 655 samestat = getattr(os.path, 'samestat', None)
657 656 if followsym and samestat is not None:
658 657 def adddir(dirlst, dirname):
659 658 match = False
660 659 dirstat = os.stat(dirname)
661 660 for lstdirstat in dirlst:
662 661 if samestat(dirstat, lstdirstat):
663 662 match = True
664 663 break
665 664 if not match:
666 665 dirlst.append(dirstat)
667 666 return not match
668 667 else:
669 668 followsym = False
670 669
671 670 if (seen_dirs is None) and followsym:
672 671 seen_dirs = []
673 672 adddir(seen_dirs, path)
674 673 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
675 674 dirs.sort()
676 675 if '.hg' in dirs:
677 676 yield root # found a repository
678 677 qroot = os.path.join(root, '.hg', 'patches')
679 678 if os.path.isdir(os.path.join(qroot, '.hg')):
680 679 yield qroot # we have a patch queue repo here
681 680 if recurse:
682 681 # avoid recursing inside the .hg directory
683 682 dirs.remove('.hg')
684 683 else:
685 684 dirs[:] = [] # don't descend further
686 685 elif followsym:
687 686 newdirs = []
688 687 for d in dirs:
689 688 fname = os.path.join(root, d)
690 689 if adddir(seen_dirs, fname):
691 690 if os.path.islink(fname):
692 691 for hgname in walkrepos(fname, True, seen_dirs):
693 692 yield hgname
694 693 else:
695 694 newdirs.append(d)
696 695 dirs[:] = newdirs
697 696
698 697 def osrcpath():
699 698 '''return default os-specific hgrc search path'''
700 699 path = []
701 700 defaultpath = os.path.join(util.datapath, 'default.d')
702 701 if os.path.isdir(defaultpath):
703 702 for f, kind in osutil.listdir(defaultpath):
704 703 if f.endswith('.rc'):
705 704 path.append(os.path.join(defaultpath, f))
706 705 path.extend(systemrcpath())
707 706 path.extend(userrcpath())
708 707 path = [os.path.normpath(f) for f in path]
709 708 return path
710 709
711 710 _rcpath = None
712 711
713 712 def rcpath():
714 713 '''return hgrc search path. if env var HGRCPATH is set, use it.
715 714 for each item in path, if directory, use files ending in .rc,
716 715 else use item.
717 716 make HGRCPATH empty to only look in .hg/hgrc of current repo.
718 717 if no HGRCPATH, use default os-specific path.'''
719 718 global _rcpath
720 719 if _rcpath is None:
721 720 if 'HGRCPATH' in os.environ:
722 721 _rcpath = []
723 722 for p in os.environ['HGRCPATH'].split(os.pathsep):
724 723 if not p:
725 724 continue
726 725 p = util.expandpath(p)
727 726 if os.path.isdir(p):
728 727 for f, kind in osutil.listdir(p):
729 728 if f.endswith('.rc'):
730 729 _rcpath.append(os.path.join(p, f))
731 730 else:
732 731 _rcpath.append(p)
733 732 else:
734 733 _rcpath = osrcpath()
735 734 return _rcpath
736 735
737 736 def intrev(rev):
738 737 """Return integer for a given revision that can be used in comparison or
739 738 arithmetic operation"""
740 739 if rev is None:
741 740 return wdirrev
742 741 return rev
743 742
744 743 def revsingle(repo, revspec, default='.'):
745 744 if not revspec and revspec != 0:
746 745 return repo[default]
747 746
748 747 l = revrange(repo, [revspec])
749 748 if not l:
750 749 raise error.Abort(_('empty revision set'))
751 750 return repo[l.last()]
752 751
753 752 def _pairspec(revspec):
754 753 tree = revset.parse(revspec)
755 754 tree = revset.optimize(tree, True)[1] # fix up "x^:y" -> "(x^):y"
756 755 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
757 756
758 757 def revpair(repo, revs):
759 758 if not revs:
760 759 return repo.dirstate.p1(), None
761 760
762 761 l = revrange(repo, revs)
763 762
764 763 if not l:
765 764 first = second = None
766 765 elif l.isascending():
767 766 first = l.min()
768 767 second = l.max()
769 768 elif l.isdescending():
770 769 first = l.max()
771 770 second = l.min()
772 771 else:
773 772 first = l.first()
774 773 second = l.last()
775 774
776 775 if first is None:
777 776 raise error.Abort(_('empty revision range'))
778 777 if (first == second and len(revs) >= 2
779 778 and not all(revrange(repo, [r]) for r in revs)):
780 779 raise error.Abort(_('empty revision on one side of range'))
781 780
782 781 # if top-level is range expression, the result must always be a pair
783 782 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
784 783 return repo.lookup(first), None
785 784
786 785 return repo.lookup(first), repo.lookup(second)
787 786
788 787 def revrange(repo, revs):
789 788 """Yield revision as strings from a list of revision specifications."""
790 789 allspecs = []
791 790 for spec in revs:
792 791 if isinstance(spec, int):
793 792 spec = revset.formatspec('rev(%d)', spec)
794 793 allspecs.append(spec)
795 794 m = revset.matchany(repo.ui, allspecs, repo)
796 795 return m(repo)
797 796
798 797 def meaningfulparents(repo, ctx):
799 798 """Return list of meaningful (or all if debug) parentrevs for rev.
800 799
801 800 For merges (two non-nullrev revisions) both parents are meaningful.
802 801 Otherwise the first parent revision is considered meaningful if it
803 802 is not the preceding revision.
804 803 """
805 804 parents = ctx.parents()
806 805 if len(parents) > 1:
807 806 return parents
808 807 if repo.ui.debugflag:
809 808 return [parents[0], repo['null']]
810 809 if parents[0].rev() >= intrev(ctx.rev()) - 1:
811 810 return []
812 811 return parents
813 812
814 813 def expandpats(pats):
815 814 '''Expand bare globs when running on windows.
816 815 On posix we assume it already has already been done by sh.'''
817 816 if not util.expandglobs:
818 817 return list(pats)
819 818 ret = []
820 819 for kindpat in pats:
821 820 kind, pat = matchmod._patsplit(kindpat, None)
822 821 if kind is None:
823 822 try:
824 823 globbed = glob.glob(pat)
825 824 except re.error:
826 825 globbed = [pat]
827 826 if globbed:
828 827 ret.extend(globbed)
829 828 continue
830 829 ret.append(kindpat)
831 830 return ret
832 831
833 832 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
834 833 badfn=None):
835 834 '''Return a matcher and the patterns that were used.
836 835 The matcher will warn about bad matches, unless an alternate badfn callback
837 836 is provided.'''
838 837 if pats == ("",):
839 838 pats = []
840 839 if opts is None:
841 840 opts = {}
842 841 if not globbed and default == 'relpath':
843 842 pats = expandpats(pats or [])
844 843
845 844 def bad(f, msg):
846 845 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
847 846
848 847 if badfn is None:
849 848 badfn = bad
850 849
851 850 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
852 851 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
853 852
854 853 if m.always():
855 854 pats = []
856 855 return m, pats
857 856
858 857 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
859 858 badfn=None):
860 859 '''Return a matcher that will warn about bad matches.'''
861 860 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
862 861
863 862 def matchall(repo):
864 863 '''Return a matcher that will efficiently match everything.'''
865 864 return matchmod.always(repo.root, repo.getcwd())
866 865
867 866 def matchfiles(repo, files, badfn=None):
868 867 '''Return a matcher that will efficiently match exactly these files.'''
869 868 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
870 869
871 870 def origpath(ui, repo, filepath):
872 871 '''customize where .orig files are created
873 872
874 873 Fetch user defined path from config file: [ui] origbackuppath = <path>
875 874 Fall back to default (filepath) if not specified
876 875 '''
877 876 origbackuppath = ui.config('ui', 'origbackuppath', None)
878 877 if origbackuppath is None:
879 878 return filepath + ".orig"
880 879
881 880 filepathfromroot = os.path.relpath(filepath, start=repo.root)
882 881 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
883 882
884 883 origbackupdir = repo.vfs.dirname(fullorigpath)
885 884 if not repo.vfs.exists(origbackupdir):
886 885 ui.note(_('creating directory: %s\n') % origbackupdir)
887 886 util.makedirs(origbackupdir)
888 887
889 888 return fullorigpath + ".orig"
890 889
891 890 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
892 891 if opts is None:
893 892 opts = {}
894 893 m = matcher
895 894 if dry_run is None:
896 895 dry_run = opts.get('dry_run')
897 896 if similarity is None:
898 897 similarity = float(opts.get('similarity') or 0)
899 898
900 899 ret = 0
901 900 join = lambda f: os.path.join(prefix, f)
902 901
903 902 def matchessubrepo(matcher, subpath):
904 903 if matcher.exact(subpath):
905 904 return True
906 905 for f in matcher.files():
907 906 if f.startswith(subpath):
908 907 return True
909 908 return False
910 909
911 910 wctx = repo[None]
912 911 for subpath in sorted(wctx.substate):
913 912 if opts.get('subrepos') or matchessubrepo(m, subpath):
914 913 sub = wctx.sub(subpath)
915 914 try:
916 915 submatch = matchmod.subdirmatcher(subpath, m)
917 916 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
918 917 ret = 1
919 918 except error.LookupError:
920 919 repo.ui.status(_("skipping missing subrepository: %s\n")
921 920 % join(subpath))
922 921
923 922 rejected = []
924 923 def badfn(f, msg):
925 924 if f in m.files():
926 925 m.bad(f, msg)
927 926 rejected.append(f)
928 927
929 928 badmatch = matchmod.badmatch(m, badfn)
930 929 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
931 930 badmatch)
932 931
933 932 unknownset = set(unknown + forgotten)
934 933 toprint = unknownset.copy()
935 934 toprint.update(deleted)
936 935 for abs in sorted(toprint):
937 936 if repo.ui.verbose or not m.exact(abs):
938 937 if abs in unknownset:
939 938 status = _('adding %s\n') % m.uipath(abs)
940 939 else:
941 940 status = _('removing %s\n') % m.uipath(abs)
942 941 repo.ui.status(status)
943 942
944 943 renames = _findrenames(repo, m, added + unknown, removed + deleted,
945 944 similarity)
946 945
947 946 if not dry_run:
948 947 _markchanges(repo, unknown + forgotten, deleted, renames)
949 948
950 949 for f in rejected:
951 950 if f in m.files():
952 951 return 1
953 952 return ret
954 953
955 954 def marktouched(repo, files, similarity=0.0):
956 955 '''Assert that files have somehow been operated upon. files are relative to
957 956 the repo root.'''
958 957 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
959 958 rejected = []
960 959
961 960 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
962 961
963 962 if repo.ui.verbose:
964 963 unknownset = set(unknown + forgotten)
965 964 toprint = unknownset.copy()
966 965 toprint.update(deleted)
967 966 for abs in sorted(toprint):
968 967 if abs in unknownset:
969 968 status = _('adding %s\n') % abs
970 969 else:
971 970 status = _('removing %s\n') % abs
972 971 repo.ui.status(status)
973 972
974 973 renames = _findrenames(repo, m, added + unknown, removed + deleted,
975 974 similarity)
976 975
977 976 _markchanges(repo, unknown + forgotten, deleted, renames)
978 977
979 978 for f in rejected:
980 979 if f in m.files():
981 980 return 1
982 981 return 0
983 982
984 983 def _interestingfiles(repo, matcher):
985 984 '''Walk dirstate with matcher, looking for files that addremove would care
986 985 about.
987 986
988 987 This is different from dirstate.status because it doesn't care about
989 988 whether files are modified or clean.'''
990 989 added, unknown, deleted, removed, forgotten = [], [], [], [], []
991 990 audit_path = pathutil.pathauditor(repo.root)
992 991
993 992 ctx = repo[None]
994 993 dirstate = repo.dirstate
995 994 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
996 995 full=False)
997 996 for abs, st in walkresults.iteritems():
998 997 dstate = dirstate[abs]
999 998 if dstate == '?' and audit_path.check(abs):
1000 999 unknown.append(abs)
1001 1000 elif dstate != 'r' and not st:
1002 1001 deleted.append(abs)
1003 1002 elif dstate == 'r' and st:
1004 1003 forgotten.append(abs)
1005 1004 # for finding renames
1006 1005 elif dstate == 'r' and not st:
1007 1006 removed.append(abs)
1008 1007 elif dstate == 'a':
1009 1008 added.append(abs)
1010 1009
1011 1010 return added, unknown, deleted, removed, forgotten
1012 1011
1013 1012 def _findrenames(repo, matcher, added, removed, similarity):
1014 1013 '''Find renames from removed files to added ones.'''
1015 1014 renames = {}
1016 1015 if similarity > 0:
1017 1016 for old, new, score in similar.findrenames(repo, added, removed,
1018 1017 similarity):
1019 1018 if (repo.ui.verbose or not matcher.exact(old)
1020 1019 or not matcher.exact(new)):
1021 1020 repo.ui.status(_('recording removal of %s as rename to %s '
1022 1021 '(%d%% similar)\n') %
1023 1022 (matcher.rel(old), matcher.rel(new),
1024 1023 score * 100))
1025 1024 renames[new] = old
1026 1025 return renames
1027 1026
1028 1027 def _markchanges(repo, unknown, deleted, renames):
1029 1028 '''Marks the files in unknown as added, the files in deleted as removed,
1030 1029 and the files in renames as copied.'''
1031 1030 wctx = repo[None]
1032 1031 with repo.wlock():
1033 1032 wctx.forget(deleted)
1034 1033 wctx.add(unknown)
1035 1034 for new, old in renames.iteritems():
1036 1035 wctx.copy(old, new)
1037 1036
1038 1037 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1039 1038 """Update the dirstate to reflect the intent of copying src to dst. For
1040 1039 different reasons it might not end with dst being marked as copied from src.
1041 1040 """
1042 1041 origsrc = repo.dirstate.copied(src) or src
1043 1042 if dst == origsrc: # copying back a copy?
1044 1043 if repo.dirstate[dst] not in 'mn' and not dryrun:
1045 1044 repo.dirstate.normallookup(dst)
1046 1045 else:
1047 1046 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1048 1047 if not ui.quiet:
1049 1048 ui.warn(_("%s has not been committed yet, so no copy "
1050 1049 "data will be stored for %s.\n")
1051 1050 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1052 1051 if repo.dirstate[dst] in '?r' and not dryrun:
1053 1052 wctx.add([dst])
1054 1053 elif not dryrun:
1055 1054 wctx.copy(origsrc, dst)
1056 1055
1057 1056 def readrequires(opener, supported):
1058 1057 '''Reads and parses .hg/requires and checks if all entries found
1059 1058 are in the list of supported features.'''
1060 1059 requirements = set(opener.read("requires").splitlines())
1061 1060 missings = []
1062 1061 for r in requirements:
1063 1062 if r not in supported:
1064 1063 if not r or not r[0].isalnum():
1065 1064 raise error.RequirementError(_(".hg/requires file is corrupt"))
1066 1065 missings.append(r)
1067 1066 missings.sort()
1068 1067 if missings:
1069 1068 raise error.RequirementError(
1070 1069 _("repository requires features unknown to this Mercurial: %s")
1071 1070 % " ".join(missings),
1072 1071 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1073 1072 " for more information"))
1074 1073 return requirements
1075 1074
1076 1075 def writerequires(opener, requirements):
1077 1076 with opener('requires', 'w') as fp:
1078 1077 for r in sorted(requirements):
1079 1078 fp.write("%s\n" % r)
1080 1079
1081 1080 class filecachesubentry(object):
1082 1081 def __init__(self, path, stat):
1083 1082 self.path = path
1084 1083 self.cachestat = None
1085 1084 self._cacheable = None
1086 1085
1087 1086 if stat:
1088 1087 self.cachestat = filecachesubentry.stat(self.path)
1089 1088
1090 1089 if self.cachestat:
1091 1090 self._cacheable = self.cachestat.cacheable()
1092 1091 else:
1093 1092 # None means we don't know yet
1094 1093 self._cacheable = None
1095 1094
1096 1095 def refresh(self):
1097 1096 if self.cacheable():
1098 1097 self.cachestat = filecachesubentry.stat(self.path)
1099 1098
1100 1099 def cacheable(self):
1101 1100 if self._cacheable is not None:
1102 1101 return self._cacheable
1103 1102
1104 1103 # we don't know yet, assume it is for now
1105 1104 return True
1106 1105
1107 1106 def changed(self):
1108 1107 # no point in going further if we can't cache it
1109 1108 if not self.cacheable():
1110 1109 return True
1111 1110
1112 1111 newstat = filecachesubentry.stat(self.path)
1113 1112
1114 1113 # we may not know if it's cacheable yet, check again now
1115 1114 if newstat and self._cacheable is None:
1116 1115 self._cacheable = newstat.cacheable()
1117 1116
1118 1117 # check again
1119 1118 if not self._cacheable:
1120 1119 return True
1121 1120
1122 1121 if self.cachestat != newstat:
1123 1122 self.cachestat = newstat
1124 1123 return True
1125 1124 else:
1126 1125 return False
1127 1126
1128 1127 @staticmethod
1129 1128 def stat(path):
1130 1129 try:
1131 1130 return util.cachestat(path)
1132 1131 except OSError as e:
1133 1132 if e.errno != errno.ENOENT:
1134 1133 raise
1135 1134
1136 1135 class filecacheentry(object):
1137 1136 def __init__(self, paths, stat=True):
1138 1137 self._entries = []
1139 1138 for path in paths:
1140 1139 self._entries.append(filecachesubentry(path, stat))
1141 1140
1142 1141 def changed(self):
1143 1142 '''true if any entry has changed'''
1144 1143 for entry in self._entries:
1145 1144 if entry.changed():
1146 1145 return True
1147 1146 return False
1148 1147
1149 1148 def refresh(self):
1150 1149 for entry in self._entries:
1151 1150 entry.refresh()
1152 1151
1153 1152 class filecache(object):
1154 1153 '''A property like decorator that tracks files under .hg/ for updates.
1155 1154
1156 1155 Records stat info when called in _filecache.
1157 1156
1158 1157 On subsequent calls, compares old stat info with new info, and recreates the
1159 1158 object when any of the files changes, updating the new stat info in
1160 1159 _filecache.
1161 1160
1162 1161 Mercurial either atomic renames or appends for files under .hg,
1163 1162 so to ensure the cache is reliable we need the filesystem to be able
1164 1163 to tell us if a file has been replaced. If it can't, we fallback to
1165 1164 recreating the object on every call (essentially the same behavior as
1166 1165 propertycache).
1167 1166
1168 1167 '''
1169 1168 def __init__(self, *paths):
1170 1169 self.paths = paths
1171 1170
1172 1171 def join(self, obj, fname):
1173 1172 """Used to compute the runtime path of a cached file.
1174 1173
1175 1174 Users should subclass filecache and provide their own version of this
1176 1175 function to call the appropriate join function on 'obj' (an instance
1177 1176 of the class that its member function was decorated).
1178 1177 """
1179 1178 return obj.join(fname)
1180 1179
1181 1180 def __call__(self, func):
1182 1181 self.func = func
1183 1182 self.name = func.__name__
1184 1183 return self
1185 1184
1186 1185 def __get__(self, obj, type=None):
1187 1186 # do we need to check if the file changed?
1188 1187 if self.name in obj.__dict__:
1189 1188 assert self.name in obj._filecache, self.name
1190 1189 return obj.__dict__[self.name]
1191 1190
1192 1191 entry = obj._filecache.get(self.name)
1193 1192
1194 1193 if entry:
1195 1194 if entry.changed():
1196 1195 entry.obj = self.func(obj)
1197 1196 else:
1198 1197 paths = [self.join(obj, path) for path in self.paths]
1199 1198
1200 1199 # We stat -before- creating the object so our cache doesn't lie if
1201 1200 # a writer modified between the time we read and stat
1202 1201 entry = filecacheentry(paths, True)
1203 1202 entry.obj = self.func(obj)
1204 1203
1205 1204 obj._filecache[self.name] = entry
1206 1205
1207 1206 obj.__dict__[self.name] = entry.obj
1208 1207 return entry.obj
1209 1208
1210 1209 def __set__(self, obj, value):
1211 1210 if self.name not in obj._filecache:
1212 1211 # we add an entry for the missing value because X in __dict__
1213 1212 # implies X in _filecache
1214 1213 paths = [self.join(obj, path) for path in self.paths]
1215 1214 ce = filecacheentry(paths, False)
1216 1215 obj._filecache[self.name] = ce
1217 1216 else:
1218 1217 ce = obj._filecache[self.name]
1219 1218
1220 1219 ce.obj = value # update cached copy
1221 1220 obj.__dict__[self.name] = value # update copy returned by obj.x
1222 1221
1223 1222 def __delete__(self, obj):
1224 1223 try:
1225 1224 del obj.__dict__[self.name]
1226 1225 except KeyError:
1227 1226 raise AttributeError(self.name)
1228 1227
1229 1228 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1230 1229 if lock is None:
1231 1230 raise error.LockInheritanceContractViolation(
1232 1231 'lock can only be inherited while held')
1233 1232 if environ is None:
1234 1233 environ = {}
1235 1234 with lock.inherit() as locker:
1236 1235 environ[envvar] = locker
1237 1236 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1238 1237
1239 1238 def wlocksub(repo, cmd, *args, **kwargs):
1240 1239 """run cmd as a subprocess that allows inheriting repo's wlock
1241 1240
1242 1241 This can only be called while the wlock is held. This takes all the
1243 1242 arguments that ui.system does, and returns the exit code of the
1244 1243 subprocess."""
1245 1244 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1246 1245 **kwargs)
1247 1246
1248 1247 def gdinitconfig(ui):
1249 1248 """helper function to know if a repo should be created as general delta
1250 1249 """
1251 1250 # experimental config: format.generaldelta
1252 1251 return (ui.configbool('format', 'generaldelta', False)
1253 1252 or ui.configbool('format', 'usegeneraldelta', True))
1254 1253
1255 1254 def gddeltaconfig(ui):
1256 1255 """helper function to know if incoming delta should be optimised
1257 1256 """
1258 1257 # experimental config: format.generaldelta
1259 1258 return ui.configbool('format', 'generaldelta', False)
1260 1259
1261 1260 class delayclosedfile(object):
1262 1261 """Proxy for a file object whose close is delayed.
1263 1262
1264 1263 Do not instantiate outside of the vfs layer.
1265 1264 """
1266 1265
1267 1266 def __init__(self, fh, closer):
1268 1267 object.__setattr__(self, '_origfh', fh)
1269 1268 object.__setattr__(self, '_closer', closer)
1270 1269
1271 1270 def __getattr__(self, attr):
1272 1271 return getattr(self._origfh, attr)
1273 1272
1274 1273 def __setattr__(self, attr, value):
1275 1274 return setattr(self._origfh, attr, value)
1276 1275
1277 1276 def __delattr__(self, attr):
1278 1277 return delattr(self._origfh, attr)
1279 1278
1280 1279 def __enter__(self):
1281 1280 return self._origfh.__enter__()
1282 1281
1283 1282 def __exit__(self, exc_type, exc_value, exc_tb):
1284 1283 self._closer.close(self._origfh)
1285 1284
1286 1285 def close(self):
1287 1286 self._closer.close(self._origfh)
1288 1287
1289 1288 class backgroundfilecloser(object):
1290 1289 """Coordinates background closing of file handles on multiple threads."""
1291 1290 def __init__(self, ui, expectedcount=-1):
1292 1291 self._running = False
1293 1292 self._entered = False
1294 1293 self._threads = []
1295 1294 self._threadexception = None
1296 1295
1297 1296 # Only Windows/NTFS has slow file closing. So only enable by default
1298 1297 # on that platform. But allow to be enabled elsewhere for testing.
1299 1298 defaultenabled = os.name == 'nt'
1300 1299 enabled = ui.configbool('worker', 'backgroundclose', defaultenabled)
1301 1300
1302 1301 if not enabled:
1303 1302 return
1304 1303
1305 1304 # There is overhead to starting and stopping the background threads.
1306 1305 # Don't do background processing unless the file count is large enough
1307 1306 # to justify it.
1308 1307 minfilecount = ui.configint('worker', 'backgroundcloseminfilecount',
1309 1308 2048)
1310 1309 # FUTURE dynamically start background threads after minfilecount closes.
1311 1310 # (We don't currently have any callers that don't know their file count)
1312 1311 if expectedcount > 0 and expectedcount < minfilecount:
1313 1312 return
1314 1313
1315 1314 # Windows defaults to a limit of 512 open files. A buffer of 128
1316 1315 # should give us enough headway.
1317 1316 maxqueue = ui.configint('worker', 'backgroundclosemaxqueue', 384)
1318 1317 threadcount = ui.configint('worker', 'backgroundclosethreadcount', 4)
1319 1318
1320 1319 ui.debug('starting %d threads for background file closing\n' %
1321 1320 threadcount)
1322 1321
1323 self._queue = Queue.Queue(maxsize=maxqueue)
1322 self._queue = util.queue(maxsize=maxqueue)
1324 1323 self._running = True
1325 1324
1326 1325 for i in range(threadcount):
1327 1326 t = threading.Thread(target=self._worker, name='backgroundcloser')
1328 1327 self._threads.append(t)
1329 1328 t.start()
1330 1329
1331 1330 def __enter__(self):
1332 1331 self._entered = True
1333 1332 return self
1334 1333
1335 1334 def __exit__(self, exc_type, exc_value, exc_tb):
1336 1335 self._running = False
1337 1336
1338 1337 # Wait for threads to finish closing so open files don't linger for
1339 1338 # longer than lifetime of context manager.
1340 1339 for t in self._threads:
1341 1340 t.join()
1342 1341
1343 1342 def _worker(self):
1344 1343 """Main routine for worker thread."""
1345 1344 while True:
1346 1345 try:
1347 1346 fh = self._queue.get(block=True, timeout=0.100)
1348 1347 # Need to catch or the thread will terminate and
1349 1348 # we could orphan file descriptors.
1350 1349 try:
1351 1350 fh.close()
1352 1351 except Exception as e:
1353 1352 # Stash so can re-raise from main thread later.
1354 1353 self._threadexception = e
1355 except Queue.Empty:
1354 except util.empty:
1356 1355 if not self._running:
1357 1356 break
1358 1357
1359 1358 def close(self, fh):
1360 1359 """Schedule a file for closing."""
1361 1360 if not self._entered:
1362 1361 raise error.Abort('can only call close() when context manager '
1363 1362 'active')
1364 1363
1365 1364 # If a background thread encountered an exception, raise now so we fail
1366 1365 # fast. Otherwise we may potentially go on for minutes until the error
1367 1366 # is acted on.
1368 1367 if self._threadexception:
1369 1368 e = self._threadexception
1370 1369 self._threadexception = None
1371 1370 raise e
1372 1371
1373 1372 # If we're not actively running, close synchronously.
1374 1373 if not self._running:
1375 1374 fh.close()
1376 1375 return
1377 1376
1378 1377 self._queue.put(fh, block=True, timeout=None)
1379 1378
@@ -1,232 +1,232
1 1 #require test-repo
2 2
3 3 $ cd "$TESTDIR"/..
4 4
5 5 $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs python contrib/check-py3-compat.py
6 6 doc/check-seclevel.py not using absolute_import
7 7 doc/gendoc.py not using absolute_import
8 8 doc/hgmanpage.py not using absolute_import
9 9 hgext/color.py not using absolute_import
10 10 hgext/eol.py not using absolute_import
11 11 hgext/extdiff.py not using absolute_import
12 12 hgext/factotum.py not using absolute_import
13 13 hgext/fetch.py not using absolute_import
14 14 hgext/fsmonitor/pywatchman/__init__.py not using absolute_import
15 15 hgext/fsmonitor/pywatchman/__init__.py requires print_function
16 16 hgext/fsmonitor/pywatchman/capabilities.py not using absolute_import
17 17 hgext/fsmonitor/pywatchman/pybser.py not using absolute_import
18 18 hgext/gpg.py not using absolute_import
19 19 hgext/graphlog.py not using absolute_import
20 20 hgext/hgcia.py not using absolute_import
21 21 hgext/hgk.py not using absolute_import
22 22 hgext/highlight/__init__.py not using absolute_import
23 23 hgext/highlight/highlight.py not using absolute_import
24 24 hgext/histedit.py not using absolute_import
25 25 hgext/largefiles/__init__.py not using absolute_import
26 26 hgext/largefiles/basestore.py not using absolute_import
27 27 hgext/largefiles/lfcommands.py not using absolute_import
28 28 hgext/largefiles/lfutil.py not using absolute_import
29 29 hgext/largefiles/localstore.py not using absolute_import
30 30 hgext/largefiles/overrides.py not using absolute_import
31 31 hgext/largefiles/proto.py not using absolute_import
32 32 hgext/largefiles/remotestore.py not using absolute_import
33 33 hgext/largefiles/reposetup.py not using absolute_import
34 34 hgext/largefiles/uisetup.py not using absolute_import
35 35 hgext/largefiles/wirestore.py not using absolute_import
36 36 hgext/mq.py not using absolute_import
37 37 hgext/rebase.py not using absolute_import
38 38 hgext/share.py not using absolute_import
39 39 hgext/win32text.py not using absolute_import
40 40 i18n/check-translation.py not using absolute_import
41 41 i18n/polib.py not using absolute_import
42 42 setup.py not using absolute_import
43 43 tests/heredoctest.py requires print_function
44 44 tests/killdaemons.py not using absolute_import
45 45 tests/md5sum.py not using absolute_import
46 46 tests/mockblackbox.py not using absolute_import
47 47 tests/printenv.py not using absolute_import
48 48 tests/readlink.py not using absolute_import
49 49 tests/readlink.py requires print_function
50 50 tests/revlog-formatv0.py not using absolute_import
51 51 tests/run-tests.py not using absolute_import
52 52 tests/sitecustomize.py not using absolute_import
53 53 tests/svn-safe-append.py not using absolute_import
54 54 tests/svnxml.py not using absolute_import
55 55 tests/test-atomictempfile.py not using absolute_import
56 56 tests/test-demandimport.py not using absolute_import
57 57 tests/test-demandimport.py requires print_function
58 58 tests/test-doctest.py not using absolute_import
59 59 tests/test-hgwebdir-paths.py not using absolute_import
60 60 tests/test-lrucachedict.py not using absolute_import
61 61 tests/test-lrucachedict.py requires print_function
62 62 tests/test-manifest.py not using absolute_import
63 63 tests/test-pathencode.py not using absolute_import
64 64 tests/test-pathencode.py requires print_function
65 65 tests/test-run-tests.py not using absolute_import
66 66 tests/test-simplemerge.py not using absolute_import
67 67 tests/test-symlink-os-yes-fs-no.py not using absolute_import
68 68 tests/test-trusted.py not using absolute_import
69 69 tests/test-trusted.py requires print_function
70 70 tests/test-ui-color.py not using absolute_import
71 71 tests/test-url.py not using absolute_import
72 72
73 73 #if py3exe
74 74 $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs $PYTHON3 contrib/check-py3-compat.py
75 75 contrib/check-code.py: invalid syntax: (unicode error) 'unicodeescape' codec can't decode bytes in position *-*: malformed \N character escape (<unknown>, line *) (glob)
76 76 doc/hgmanpage.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
77 77 hgext/acl.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
78 78 hgext/automv.py: error importing module: <SyntaxError> invalid syntax (commands.py, line *) (line *) (glob)
79 79 hgext/blackbox.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
80 80 hgext/bugzilla.py: error importing module: <ImportError> No module named 'urlparse' (line *) (glob)
81 81 hgext/censor.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
82 82 hgext/chgserver.py: error importing module: <ImportError> No module named 'SocketServer' (line *) (glob)
83 83 hgext/children.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
84 84 hgext/churn.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
85 85 hgext/clonebundles.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
86 86 hgext/color.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
87 87 hgext/convert/bzr.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
88 88 hgext/convert/common.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob)
89 89 hgext/convert/convcmd.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
90 90 hgext/convert/cvs.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
91 91 hgext/convert/cvsps.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob)
92 92 hgext/convert/darcs.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
93 93 hgext/convert/filemap.py: error importing module: <SystemError> Parent module 'hgext.convert' not loaded, cannot perform relative import (line *) (glob)
94 94 hgext/convert/git.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
95 95 hgext/convert/gnuarch.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
96 96 hgext/convert/hg.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
97 97 hgext/convert/monotone.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
98 98 hgext/convert/p*.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
99 99 hgext/convert/subversion.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob)
100 100 hgext/convert/transport.py: error importing module: <ImportError> No module named 'svn.client' (line *) (glob)
101 101 hgext/eol.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
102 102 hgext/extdiff.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
103 103 hgext/factotum.py: error importing: <ImportError> No module named 'cStringIO' (error at url.py:*) (glob)
104 104 hgext/fetch.py: error importing module: <SyntaxError> invalid syntax (commands.py, line *) (line *) (glob)
105 105 hgext/fsmonitor/state.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
106 106 hgext/fsmonitor/watchmanclient.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
107 107 hgext/gpg.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
108 108 hgext/graphlog.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
109 109 hgext/hgcia.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
110 110 hgext/hgk.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
111 111 hgext/highlight/highlight.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
112 112 hgext/histedit.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
113 113 hgext/keyword.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
114 114 hgext/largefiles/basestore.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
115 115 hgext/largefiles/lfcommands.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
116 116 hgext/largefiles/lfutil.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
117 117 hgext/largefiles/localstore.py: error importing module: <ImportError> No module named 'lfutil' (line *) (glob)
118 118 hgext/largefiles/overrides.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
119 119 hgext/largefiles/proto.py: error importing module: <ImportError> No module named 'urllib2' (line *) (glob)
120 120 hgext/largefiles/remotestore.py: error importing module: <ImportError> No module named 'urllib2' (line *) (glob)
121 121 hgext/largefiles/reposetup.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
122 122 hgext/largefiles/uisetup.py: error importing module: <SyntaxError> invalid syntax (archival.py, line *) (line *) (glob)
123 123 hgext/largefiles/wirestore.py: error importing module: <ImportError> No module named 'lfutil' (line *) (glob)
124 124 hgext/mq.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
125 125 hgext/notify.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
126 126 hgext/pager.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
127 127 hgext/patchbomb.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
128 128 hgext/purge.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
129 129 hgext/rebase.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
130 130 hgext/record.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
131 131 hgext/relink.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
132 132 hgext/schemes.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
133 133 hgext/share.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
134 134 hgext/shelve.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
135 135 hgext/strip.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
136 136 hgext/transplant.py: error importing: <SyntaxError> invalid syntax (bundle*.py, line *) (error at bundlerepo.py:*) (glob)
137 137 hgext/win*text.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
138 138 mercurial/archival.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
139 139 mercurial/bookmarks.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
140 mercurial/branchmap.py: error importing: <ImportError> No module named 'Queue' (error at scmutil.py:*) (glob)
140 mercurial/branchmap.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
141 141 mercurial/bundle*.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
142 142 mercurial/bundlerepo.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
143 143 mercurial/byterange.py: error importing module: <ImportError> No module named 'urllib2' (line *) (glob)
144 mercurial/changegroup.py: error importing: <ImportError> No module named 'Queue' (error at scmutil.py:*) (glob)
144 mercurial/changegroup.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
145 145 mercurial/changelog.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
146 146 mercurial/cmdutil.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
147 147 mercurial/commands.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
148 148 mercurial/commandserver.py: error importing module: <ImportError> No module named 'SocketServer' (line *) (glob)
149 149 mercurial/config.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
150 150 mercurial/context.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
151 151 mercurial/copies.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
152 152 mercurial/crecord.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
153 153 mercurial/destutil.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
154 154 mercurial/dirstate.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
155 155 mercurial/discovery.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
156 156 mercurial/dispatch.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
157 157 mercurial/exchange.py: error importing module: <ImportError> No module named 'urllib2' (line *) (glob)
158 158 mercurial/extensions.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
159 159 mercurial/filelog.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
160 160 mercurial/filemerge.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
161 161 mercurial/fileset.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
162 162 mercurial/formatter.py: error importing module: <ImportError> No module named 'cPickle' (line *) (glob)
163 163 mercurial/graphmod.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
164 164 mercurial/help.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
165 165 mercurial/hg.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
166 166 mercurial/hgweb/common.py: error importing module: <ImportError> No module named 'BaseHTTPServer' (line *) (glob)
167 167 mercurial/hgweb/hgweb_mod.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
168 168 mercurial/hgweb/hgwebdir_mod.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
169 169 mercurial/hgweb/protocol.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
170 170 mercurial/hgweb/request.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
171 171 mercurial/hgweb/server.py: error importing module: <ImportError> No module named 'BaseHTTPServer' (line *) (glob)
172 172 mercurial/hgweb/webcommands.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
173 173 mercurial/hgweb/webutil.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
174 174 mercurial/hgweb/wsgicgi.py: error importing module: <SystemError> Parent module 'mercurial.hgweb' not loaded, cannot perform relative import (line *) (glob)
175 175 mercurial/hook.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
176 176 mercurial/httpclient/_readers.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
177 177 mercurial/httpconnection.py: error importing module: <ImportError> No module named 'urllib2' (line *) (glob)
178 178 mercurial/httppeer.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
179 179 mercurial/keepalive.py: error importing module: <ImportError> No module named 'httplib' (line *) (glob)
180 180 mercurial/localrepo.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
181 181 mercurial/lock.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
182 182 mercurial/mail.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
183 183 mercurial/manifest.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
184 184 mercurial/match.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
185 185 mercurial/mdiff.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
186 186 mercurial/merge.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
187 187 mercurial/minirst.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
188 188 mercurial/namespaces.py: error importing: <ImportError> No module named 'cStringIO' (error at patch.py:*) (glob)
189 189 mercurial/obsolete.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
190 190 mercurial/patch.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
191 191 mercurial/pathutil.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
192 192 mercurial/peer.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
193 193 mercurial/pure/mpatch.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
194 194 mercurial/pure/parsers.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
195 195 mercurial/pushkey.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
196 196 mercurial/pvec.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
197 197 mercurial/registrar.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
198 198 mercurial/repair.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
199 199 mercurial/repoview.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
200 200 mercurial/revlog.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
201 201 mercurial/revset.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
202 mercurial/scmutil.py: error importing module: <ImportError> No module named 'Queue' (line *) (glob)
202 mercurial/scmutil.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
203 203 mercurial/scmwindows.py: error importing module: <ImportError> No module named '_winreg' (line *) (glob)
204 204 mercurial/similar.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
205 205 mercurial/simplemerge.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
206 206 mercurial/sshpeer.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
207 207 mercurial/sshserver.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
208 208 mercurial/sslutil.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
209 209 mercurial/statichttprepo.py: error importing module: <ImportError> No module named 'urllib2' (line *) (glob)
210 210 mercurial/store.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
211 mercurial/streamclone.py: error importing: <ImportError> No module named 'Queue' (error at scmutil.py:*) (glob)
211 mercurial/streamclone.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
212 212 mercurial/subrepo.py: error importing: <ImportError> No module named 'cStringIO' (error at cmdutil.py:*) (glob)
213 213 mercurial/tagmerge.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
214 214 mercurial/tags.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
215 215 mercurial/templatefilters.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
216 216 mercurial/templatekw.py: error importing: <ImportError> No module named 'cStringIO' (error at patch.py:*) (glob)
217 217 mercurial/templater.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
218 218 mercurial/transaction.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
219 219 mercurial/ui.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
220 220 mercurial/unionrepo.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
221 221 mercurial/url.py: error importing module: <ImportError> No module named 'cStringIO' (line *) (glob)
222 222 mercurial/util.py: error importing: <ImportError> No module named 'cStringIO' (error at parsers.py:*) (glob)
223 223 mercurial/verify.py: error importing: <ImportError> No module named 'cStringIO' (error at mpatch.py:*) (glob)
224 224 mercurial/win*.py: error importing module: <ImportError> No module named 'msvcrt' (line *) (glob)
225 225 mercurial/windows.py: error importing module: <ImportError> No module named '_winreg' (line *) (glob)
226 226 mercurial/wireproto.py: error importing module: <SyntaxError> invalid syntax (bundle*.py, line *) (line *) (glob)
227 227 tests/readlink.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
228 228 tests/test-demandimport.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
229 229 tests/test-lrucachedict.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
230 230 tests/test-trusted.py: invalid syntax: invalid syntax (<unknown>, line *) (glob)
231 231
232 232 #endif
General Comments 0
You need to be logged in to leave comments. Login now