##// END OF EJS Templates
date: reallow negative timestamp, fix for Windows buggy gmtime() (issue2513)...
Florent Gallaire -
r28825:87c6ad22 default
parent child Browse files
Show More
@@ -1,2742 +1,2741
1 1 # util.py - Mercurial utility functions and platform specific implementations
2 2 #
3 3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 6 #
7 7 # This software may be used and distributed according to the terms of the
8 8 # GNU General Public License version 2 or any later version.
9 9
10 10 """Mercurial utility functions and platform specific implementations.
11 11
12 12 This contains helper routines that are independent of the SCM core and
13 13 hide platform-specific details from the core.
14 14 """
15 15
16 16 from __future__ import absolute_import
17 17
18 18 import bz2
19 19 import calendar
20 20 import collections
21 21 import datetime
22 22 import errno
23 23 import gc
24 24 import hashlib
25 25 import imp
26 26 import os
27 27 import re as remod
28 28 import shutil
29 29 import signal
30 30 import socket
31 31 import subprocess
32 32 import sys
33 33 import tempfile
34 34 import textwrap
35 35 import time
36 36 import traceback
37 37 import urllib
38 38 import zlib
39 39
40 40 from . import (
41 41 encoding,
42 42 error,
43 43 i18n,
44 44 osutil,
45 45 parsers,
46 46 pycompat,
47 47 )
48 48
49 49 for attr in (
50 50 'empty',
51 51 'queue',
52 52 ):
53 53 globals()[attr] = getattr(pycompat, attr)
54 54
55 55 if os.name == 'nt':
56 56 from . import windows as platform
57 57 else:
58 58 from . import posix as platform
59 59
60 60 md5 = hashlib.md5
61 61 sha1 = hashlib.sha1
62 62 sha512 = hashlib.sha512
63 63 _ = i18n._
64 64
65 65 cachestat = platform.cachestat
66 66 checkexec = platform.checkexec
67 67 checklink = platform.checklink
68 68 copymode = platform.copymode
69 69 executablepath = platform.executablepath
70 70 expandglobs = platform.expandglobs
71 71 explainexit = platform.explainexit
72 72 findexe = platform.findexe
73 73 gethgcmd = platform.gethgcmd
74 74 getuser = platform.getuser
75 75 getpid = os.getpid
76 76 groupmembers = platform.groupmembers
77 77 groupname = platform.groupname
78 78 hidewindow = platform.hidewindow
79 79 isexec = platform.isexec
80 80 isowner = platform.isowner
81 81 localpath = platform.localpath
82 82 lookupreg = platform.lookupreg
83 83 makedir = platform.makedir
84 84 nlinks = platform.nlinks
85 85 normpath = platform.normpath
86 86 normcase = platform.normcase
87 87 normcasespec = platform.normcasespec
88 88 normcasefallback = platform.normcasefallback
89 89 openhardlinks = platform.openhardlinks
90 90 oslink = platform.oslink
91 91 parsepatchoutput = platform.parsepatchoutput
92 92 pconvert = platform.pconvert
93 93 poll = platform.poll
94 94 popen = platform.popen
95 95 posixfile = platform.posixfile
96 96 quotecommand = platform.quotecommand
97 97 readpipe = platform.readpipe
98 98 rename = platform.rename
99 99 removedirs = platform.removedirs
100 100 samedevice = platform.samedevice
101 101 samefile = platform.samefile
102 102 samestat = platform.samestat
103 103 setbinary = platform.setbinary
104 104 setflags = platform.setflags
105 105 setsignalhandler = platform.setsignalhandler
106 106 shellquote = platform.shellquote
107 107 spawndetached = platform.spawndetached
108 108 split = platform.split
109 109 sshargs = platform.sshargs
110 110 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
111 111 statisexec = platform.statisexec
112 112 statislink = platform.statislink
113 113 termwidth = platform.termwidth
114 114 testpid = platform.testpid
115 115 umask = platform.umask
116 116 unlink = platform.unlink
117 117 unlinkpath = platform.unlinkpath
118 118 username = platform.username
119 119
120 120 # Python compatibility
121 121
122 122 _notset = object()
123 123
124 124 # disable Python's problematic floating point timestamps (issue4836)
125 125 # (Python hypocritically says you shouldn't change this behavior in
126 126 # libraries, and sure enough Mercurial is not a library.)
127 127 os.stat_float_times(False)
128 128
129 129 def safehasattr(thing, attr):
130 130 return getattr(thing, attr, _notset) is not _notset
131 131
132 132 DIGESTS = {
133 133 'md5': md5,
134 134 'sha1': sha1,
135 135 'sha512': sha512,
136 136 }
137 137 # List of digest types from strongest to weakest
138 138 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
139 139
140 140 for k in DIGESTS_BY_STRENGTH:
141 141 assert k in DIGESTS
142 142
143 143 class digester(object):
144 144 """helper to compute digests.
145 145
146 146 This helper can be used to compute one or more digests given their name.
147 147
148 148 >>> d = digester(['md5', 'sha1'])
149 149 >>> d.update('foo')
150 150 >>> [k for k in sorted(d)]
151 151 ['md5', 'sha1']
152 152 >>> d['md5']
153 153 'acbd18db4cc2f85cedef654fccc4a4d8'
154 154 >>> d['sha1']
155 155 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
156 156 >>> digester.preferred(['md5', 'sha1'])
157 157 'sha1'
158 158 """
159 159
160 160 def __init__(self, digests, s=''):
161 161 self._hashes = {}
162 162 for k in digests:
163 163 if k not in DIGESTS:
164 164 raise Abort(_('unknown digest type: %s') % k)
165 165 self._hashes[k] = DIGESTS[k]()
166 166 if s:
167 167 self.update(s)
168 168
169 169 def update(self, data):
170 170 for h in self._hashes.values():
171 171 h.update(data)
172 172
173 173 def __getitem__(self, key):
174 174 if key not in DIGESTS:
175 175 raise Abort(_('unknown digest type: %s') % k)
176 176 return self._hashes[key].hexdigest()
177 177
178 178 def __iter__(self):
179 179 return iter(self._hashes)
180 180
181 181 @staticmethod
182 182 def preferred(supported):
183 183 """returns the strongest digest type in both supported and DIGESTS."""
184 184
185 185 for k in DIGESTS_BY_STRENGTH:
186 186 if k in supported:
187 187 return k
188 188 return None
189 189
190 190 class digestchecker(object):
191 191 """file handle wrapper that additionally checks content against a given
192 192 size and digests.
193 193
194 194 d = digestchecker(fh, size, {'md5': '...'})
195 195
196 196 When multiple digests are given, all of them are validated.
197 197 """
198 198
199 199 def __init__(self, fh, size, digests):
200 200 self._fh = fh
201 201 self._size = size
202 202 self._got = 0
203 203 self._digests = dict(digests)
204 204 self._digester = digester(self._digests.keys())
205 205
206 206 def read(self, length=-1):
207 207 content = self._fh.read(length)
208 208 self._digester.update(content)
209 209 self._got += len(content)
210 210 return content
211 211
212 212 def validate(self):
213 213 if self._size != self._got:
214 214 raise Abort(_('size mismatch: expected %d, got %d') %
215 215 (self._size, self._got))
216 216 for k, v in self._digests.items():
217 217 if v != self._digester[k]:
218 218 # i18n: first parameter is a digest name
219 219 raise Abort(_('%s mismatch: expected %s, got %s') %
220 220 (k, v, self._digester[k]))
221 221
222 222 try:
223 223 buffer = buffer
224 224 except NameError:
225 225 if sys.version_info[0] < 3:
226 226 def buffer(sliceable, offset=0):
227 227 return sliceable[offset:]
228 228 else:
229 229 def buffer(sliceable, offset=0):
230 230 return memoryview(sliceable)[offset:]
231 231
232 232 closefds = os.name == 'posix'
233 233
234 234 _chunksize = 4096
235 235
236 236 class bufferedinputpipe(object):
237 237 """a manually buffered input pipe
238 238
239 239 Python will not let us use buffered IO and lazy reading with 'polling' at
240 240 the same time. We cannot probe the buffer state and select will not detect
241 241 that data are ready to read if they are already buffered.
242 242
243 243 This class let us work around that by implementing its own buffering
244 244 (allowing efficient readline) while offering a way to know if the buffer is
245 245 empty from the output (allowing collaboration of the buffer with polling).
246 246
247 247 This class lives in the 'util' module because it makes use of the 'os'
248 248 module from the python stdlib.
249 249 """
250 250
251 251 def __init__(self, input):
252 252 self._input = input
253 253 self._buffer = []
254 254 self._eof = False
255 255 self._lenbuf = 0
256 256
257 257 @property
258 258 def hasbuffer(self):
259 259 """True is any data is currently buffered
260 260
261 261 This will be used externally a pre-step for polling IO. If there is
262 262 already data then no polling should be set in place."""
263 263 return bool(self._buffer)
264 264
265 265 @property
266 266 def closed(self):
267 267 return self._input.closed
268 268
269 269 def fileno(self):
270 270 return self._input.fileno()
271 271
272 272 def close(self):
273 273 return self._input.close()
274 274
275 275 def read(self, size):
276 276 while (not self._eof) and (self._lenbuf < size):
277 277 self._fillbuffer()
278 278 return self._frombuffer(size)
279 279
280 280 def readline(self, *args, **kwargs):
281 281 if 1 < len(self._buffer):
282 282 # this should not happen because both read and readline end with a
283 283 # _frombuffer call that collapse it.
284 284 self._buffer = [''.join(self._buffer)]
285 285 self._lenbuf = len(self._buffer[0])
286 286 lfi = -1
287 287 if self._buffer:
288 288 lfi = self._buffer[-1].find('\n')
289 289 while (not self._eof) and lfi < 0:
290 290 self._fillbuffer()
291 291 if self._buffer:
292 292 lfi = self._buffer[-1].find('\n')
293 293 size = lfi + 1
294 294 if lfi < 0: # end of file
295 295 size = self._lenbuf
296 296 elif 1 < len(self._buffer):
297 297 # we need to take previous chunks into account
298 298 size += self._lenbuf - len(self._buffer[-1])
299 299 return self._frombuffer(size)
300 300
301 301 def _frombuffer(self, size):
302 302 """return at most 'size' data from the buffer
303 303
304 304 The data are removed from the buffer."""
305 305 if size == 0 or not self._buffer:
306 306 return ''
307 307 buf = self._buffer[0]
308 308 if 1 < len(self._buffer):
309 309 buf = ''.join(self._buffer)
310 310
311 311 data = buf[:size]
312 312 buf = buf[len(data):]
313 313 if buf:
314 314 self._buffer = [buf]
315 315 self._lenbuf = len(buf)
316 316 else:
317 317 self._buffer = []
318 318 self._lenbuf = 0
319 319 return data
320 320
321 321 def _fillbuffer(self):
322 322 """read data to the buffer"""
323 323 data = os.read(self._input.fileno(), _chunksize)
324 324 if not data:
325 325 self._eof = True
326 326 else:
327 327 self._lenbuf += len(data)
328 328 self._buffer.append(data)
329 329
330 330 def popen2(cmd, env=None, newlines=False):
331 331 # Setting bufsize to -1 lets the system decide the buffer size.
332 332 # The default for bufsize is 0, meaning unbuffered. This leads to
333 333 # poor performance on Mac OS X: http://bugs.python.org/issue4194
334 334 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
335 335 close_fds=closefds,
336 336 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
337 337 universal_newlines=newlines,
338 338 env=env)
339 339 return p.stdin, p.stdout
340 340
341 341 def popen3(cmd, env=None, newlines=False):
342 342 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
343 343 return stdin, stdout, stderr
344 344
345 345 def popen4(cmd, env=None, newlines=False, bufsize=-1):
346 346 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
347 347 close_fds=closefds,
348 348 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
349 349 stderr=subprocess.PIPE,
350 350 universal_newlines=newlines,
351 351 env=env)
352 352 return p.stdin, p.stdout, p.stderr, p
353 353
354 354 def version():
355 355 """Return version information if available."""
356 356 try:
357 357 from . import __version__
358 358 return __version__.version
359 359 except ImportError:
360 360 return 'unknown'
361 361
362 362 def versiontuple(v=None, n=4):
363 363 """Parses a Mercurial version string into an N-tuple.
364 364
365 365 The version string to be parsed is specified with the ``v`` argument.
366 366 If it isn't defined, the current Mercurial version string will be parsed.
367 367
368 368 ``n`` can be 2, 3, or 4. Here is how some version strings map to
369 369 returned values:
370 370
371 371 >>> v = '3.6.1+190-df9b73d2d444'
372 372 >>> versiontuple(v, 2)
373 373 (3, 6)
374 374 >>> versiontuple(v, 3)
375 375 (3, 6, 1)
376 376 >>> versiontuple(v, 4)
377 377 (3, 6, 1, '190-df9b73d2d444')
378 378
379 379 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
380 380 (3, 6, 1, '190-df9b73d2d444+20151118')
381 381
382 382 >>> v = '3.6'
383 383 >>> versiontuple(v, 2)
384 384 (3, 6)
385 385 >>> versiontuple(v, 3)
386 386 (3, 6, None)
387 387 >>> versiontuple(v, 4)
388 388 (3, 6, None, None)
389 389 """
390 390 if not v:
391 391 v = version()
392 392 parts = v.split('+', 1)
393 393 if len(parts) == 1:
394 394 vparts, extra = parts[0], None
395 395 else:
396 396 vparts, extra = parts
397 397
398 398 vints = []
399 399 for i in vparts.split('.'):
400 400 try:
401 401 vints.append(int(i))
402 402 except ValueError:
403 403 break
404 404 # (3, 6) -> (3, 6, None)
405 405 while len(vints) < 3:
406 406 vints.append(None)
407 407
408 408 if n == 2:
409 409 return (vints[0], vints[1])
410 410 if n == 3:
411 411 return (vints[0], vints[1], vints[2])
412 412 if n == 4:
413 413 return (vints[0], vints[1], vints[2], extra)
414 414
415 415 # used by parsedate
416 416 defaultdateformats = (
417 417 '%Y-%m-%d %H:%M:%S',
418 418 '%Y-%m-%d %I:%M:%S%p',
419 419 '%Y-%m-%d %H:%M',
420 420 '%Y-%m-%d %I:%M%p',
421 421 '%Y-%m-%d',
422 422 '%m-%d',
423 423 '%m/%d',
424 424 '%m/%d/%y',
425 425 '%m/%d/%Y',
426 426 '%a %b %d %H:%M:%S %Y',
427 427 '%a %b %d %I:%M:%S%p %Y',
428 428 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
429 429 '%b %d %H:%M:%S %Y',
430 430 '%b %d %I:%M:%S%p %Y',
431 431 '%b %d %H:%M:%S',
432 432 '%b %d %I:%M:%S%p',
433 433 '%b %d %H:%M',
434 434 '%b %d %I:%M%p',
435 435 '%b %d %Y',
436 436 '%b %d',
437 437 '%H:%M:%S',
438 438 '%I:%M:%S%p',
439 439 '%H:%M',
440 440 '%I:%M%p',
441 441 )
442 442
443 443 extendeddateformats = defaultdateformats + (
444 444 "%Y",
445 445 "%Y-%m",
446 446 "%b",
447 447 "%b %Y",
448 448 )
449 449
450 450 def cachefunc(func):
451 451 '''cache the result of function calls'''
452 452 # XXX doesn't handle keywords args
453 453 if func.func_code.co_argcount == 0:
454 454 cache = []
455 455 def f():
456 456 if len(cache) == 0:
457 457 cache.append(func())
458 458 return cache[0]
459 459 return f
460 460 cache = {}
461 461 if func.func_code.co_argcount == 1:
462 462 # we gain a small amount of time because
463 463 # we don't need to pack/unpack the list
464 464 def f(arg):
465 465 if arg not in cache:
466 466 cache[arg] = func(arg)
467 467 return cache[arg]
468 468 else:
469 469 def f(*args):
470 470 if args not in cache:
471 471 cache[args] = func(*args)
472 472 return cache[args]
473 473
474 474 return f
475 475
476 476 class sortdict(dict):
477 477 '''a simple sorted dictionary'''
478 478 def __init__(self, data=None):
479 479 self._list = []
480 480 if data:
481 481 self.update(data)
482 482 def copy(self):
483 483 return sortdict(self)
484 484 def __setitem__(self, key, val):
485 485 if key in self:
486 486 self._list.remove(key)
487 487 self._list.append(key)
488 488 dict.__setitem__(self, key, val)
489 489 def __iter__(self):
490 490 return self._list.__iter__()
491 491 def update(self, src):
492 492 if isinstance(src, dict):
493 493 src = src.iteritems()
494 494 for k, v in src:
495 495 self[k] = v
496 496 def clear(self):
497 497 dict.clear(self)
498 498 self._list = []
499 499 def items(self):
500 500 return [(k, self[k]) for k in self._list]
501 501 def __delitem__(self, key):
502 502 dict.__delitem__(self, key)
503 503 self._list.remove(key)
504 504 def pop(self, key, *args, **kwargs):
505 505 dict.pop(self, key, *args, **kwargs)
506 506 try:
507 507 self._list.remove(key)
508 508 except ValueError:
509 509 pass
510 510 def keys(self):
511 511 return self._list
512 512 def iterkeys(self):
513 513 return self._list.__iter__()
514 514 def iteritems(self):
515 515 for k in self._list:
516 516 yield k, self[k]
517 517 def insert(self, index, key, val):
518 518 self._list.insert(index, key)
519 519 dict.__setitem__(self, key, val)
520 520
521 521 class _lrucachenode(object):
522 522 """A node in a doubly linked list.
523 523
524 524 Holds a reference to nodes on either side as well as a key-value
525 525 pair for the dictionary entry.
526 526 """
527 527 __slots__ = ('next', 'prev', 'key', 'value')
528 528
529 529 def __init__(self):
530 530 self.next = None
531 531 self.prev = None
532 532
533 533 self.key = _notset
534 534 self.value = None
535 535
536 536 def markempty(self):
537 537 """Mark the node as emptied."""
538 538 self.key = _notset
539 539
540 540 class lrucachedict(object):
541 541 """Dict that caches most recent accesses and sets.
542 542
543 543 The dict consists of an actual backing dict - indexed by original
544 544 key - and a doubly linked circular list defining the order of entries in
545 545 the cache.
546 546
547 547 The head node is the newest entry in the cache. If the cache is full,
548 548 we recycle head.prev and make it the new head. Cache accesses result in
549 549 the node being moved to before the existing head and being marked as the
550 550 new head node.
551 551 """
552 552 def __init__(self, max):
553 553 self._cache = {}
554 554
555 555 self._head = head = _lrucachenode()
556 556 head.prev = head
557 557 head.next = head
558 558 self._size = 1
559 559 self._capacity = max
560 560
561 561 def __len__(self):
562 562 return len(self._cache)
563 563
564 564 def __contains__(self, k):
565 565 return k in self._cache
566 566
567 567 def __iter__(self):
568 568 # We don't have to iterate in cache order, but why not.
569 569 n = self._head
570 570 for i in range(len(self._cache)):
571 571 yield n.key
572 572 n = n.next
573 573
574 574 def __getitem__(self, k):
575 575 node = self._cache[k]
576 576 self._movetohead(node)
577 577 return node.value
578 578
579 579 def __setitem__(self, k, v):
580 580 node = self._cache.get(k)
581 581 # Replace existing value and mark as newest.
582 582 if node is not None:
583 583 node.value = v
584 584 self._movetohead(node)
585 585 return
586 586
587 587 if self._size < self._capacity:
588 588 node = self._addcapacity()
589 589 else:
590 590 # Grab the last/oldest item.
591 591 node = self._head.prev
592 592
593 593 # At capacity. Kill the old entry.
594 594 if node.key is not _notset:
595 595 del self._cache[node.key]
596 596
597 597 node.key = k
598 598 node.value = v
599 599 self._cache[k] = node
600 600 # And mark it as newest entry. No need to adjust order since it
601 601 # is already self._head.prev.
602 602 self._head = node
603 603
604 604 def __delitem__(self, k):
605 605 node = self._cache.pop(k)
606 606 node.markempty()
607 607
608 608 # Temporarily mark as newest item before re-adjusting head to make
609 609 # this node the oldest item.
610 610 self._movetohead(node)
611 611 self._head = node.next
612 612
613 613 # Additional dict methods.
614 614
615 615 def get(self, k, default=None):
616 616 try:
617 617 return self._cache[k]
618 618 except KeyError:
619 619 return default
620 620
621 621 def clear(self):
622 622 n = self._head
623 623 while n.key is not _notset:
624 624 n.markempty()
625 625 n = n.next
626 626
627 627 self._cache.clear()
628 628
629 629 def copy(self):
630 630 result = lrucachedict(self._capacity)
631 631 n = self._head.prev
632 632 # Iterate in oldest-to-newest order, so the copy has the right ordering
633 633 for i in range(len(self._cache)):
634 634 result[n.key] = n.value
635 635 n = n.prev
636 636 return result
637 637
638 638 def _movetohead(self, node):
639 639 """Mark a node as the newest, making it the new head.
640 640
641 641 When a node is accessed, it becomes the freshest entry in the LRU
642 642 list, which is denoted by self._head.
643 643
644 644 Visually, let's make ``N`` the new head node (* denotes head):
645 645
646 646 previous/oldest <-> head <-> next/next newest
647 647
648 648 ----<->--- A* ---<->-----
649 649 | |
650 650 E <-> D <-> N <-> C <-> B
651 651
652 652 To:
653 653
654 654 ----<->--- N* ---<->-----
655 655 | |
656 656 E <-> D <-> C <-> B <-> A
657 657
658 658 This requires the following moves:
659 659
660 660 C.next = D (node.prev.next = node.next)
661 661 D.prev = C (node.next.prev = node.prev)
662 662 E.next = N (head.prev.next = node)
663 663 N.prev = E (node.prev = head.prev)
664 664 N.next = A (node.next = head)
665 665 A.prev = N (head.prev = node)
666 666 """
667 667 head = self._head
668 668 # C.next = D
669 669 node.prev.next = node.next
670 670 # D.prev = C
671 671 node.next.prev = node.prev
672 672 # N.prev = E
673 673 node.prev = head.prev
674 674 # N.next = A
675 675 # It is tempting to do just "head" here, however if node is
676 676 # adjacent to head, this will do bad things.
677 677 node.next = head.prev.next
678 678 # E.next = N
679 679 node.next.prev = node
680 680 # A.prev = N
681 681 node.prev.next = node
682 682
683 683 self._head = node
684 684
685 685 def _addcapacity(self):
686 686 """Add a node to the circular linked list.
687 687
688 688 The new node is inserted before the head node.
689 689 """
690 690 head = self._head
691 691 node = _lrucachenode()
692 692 head.prev.next = node
693 693 node.prev = head.prev
694 694 node.next = head
695 695 head.prev = node
696 696 self._size += 1
697 697 return node
698 698
699 699 def lrucachefunc(func):
700 700 '''cache most recent results of function calls'''
701 701 cache = {}
702 702 order = collections.deque()
703 703 if func.func_code.co_argcount == 1:
704 704 def f(arg):
705 705 if arg not in cache:
706 706 if len(cache) > 20:
707 707 del cache[order.popleft()]
708 708 cache[arg] = func(arg)
709 709 else:
710 710 order.remove(arg)
711 711 order.append(arg)
712 712 return cache[arg]
713 713 else:
714 714 def f(*args):
715 715 if args not in cache:
716 716 if len(cache) > 20:
717 717 del cache[order.popleft()]
718 718 cache[args] = func(*args)
719 719 else:
720 720 order.remove(args)
721 721 order.append(args)
722 722 return cache[args]
723 723
724 724 return f
725 725
726 726 class propertycache(object):
727 727 def __init__(self, func):
728 728 self.func = func
729 729 self.name = func.__name__
730 730 def __get__(self, obj, type=None):
731 731 result = self.func(obj)
732 732 self.cachevalue(obj, result)
733 733 return result
734 734
735 735 def cachevalue(self, obj, value):
736 736 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
737 737 obj.__dict__[self.name] = value
738 738
739 739 def pipefilter(s, cmd):
740 740 '''filter string S through command CMD, returning its output'''
741 741 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
742 742 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
743 743 pout, perr = p.communicate(s)
744 744 return pout
745 745
746 746 def tempfilter(s, cmd):
747 747 '''filter string S through a pair of temporary files with CMD.
748 748 CMD is used as a template to create the real command to be run,
749 749 with the strings INFILE and OUTFILE replaced by the real names of
750 750 the temporary files generated.'''
751 751 inname, outname = None, None
752 752 try:
753 753 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
754 754 fp = os.fdopen(infd, 'wb')
755 755 fp.write(s)
756 756 fp.close()
757 757 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
758 758 os.close(outfd)
759 759 cmd = cmd.replace('INFILE', inname)
760 760 cmd = cmd.replace('OUTFILE', outname)
761 761 code = os.system(cmd)
762 762 if sys.platform == 'OpenVMS' and code & 1:
763 763 code = 0
764 764 if code:
765 765 raise Abort(_("command '%s' failed: %s") %
766 766 (cmd, explainexit(code)))
767 767 return readfile(outname)
768 768 finally:
769 769 try:
770 770 if inname:
771 771 os.unlink(inname)
772 772 except OSError:
773 773 pass
774 774 try:
775 775 if outname:
776 776 os.unlink(outname)
777 777 except OSError:
778 778 pass
779 779
780 780 filtertable = {
781 781 'tempfile:': tempfilter,
782 782 'pipe:': pipefilter,
783 783 }
784 784
785 785 def filter(s, cmd):
786 786 "filter a string through a command that transforms its input to its output"
787 787 for name, fn in filtertable.iteritems():
788 788 if cmd.startswith(name):
789 789 return fn(s, cmd[len(name):].lstrip())
790 790 return pipefilter(s, cmd)
791 791
792 792 def binary(s):
793 793 """return true if a string is binary data"""
794 794 return bool(s and '\0' in s)
795 795
796 796 def increasingchunks(source, min=1024, max=65536):
797 797 '''return no less than min bytes per chunk while data remains,
798 798 doubling min after each chunk until it reaches max'''
799 799 def log2(x):
800 800 if not x:
801 801 return 0
802 802 i = 0
803 803 while x:
804 804 x >>= 1
805 805 i += 1
806 806 return i - 1
807 807
808 808 buf = []
809 809 blen = 0
810 810 for chunk in source:
811 811 buf.append(chunk)
812 812 blen += len(chunk)
813 813 if blen >= min:
814 814 if min < max:
815 815 min = min << 1
816 816 nmin = 1 << log2(blen)
817 817 if nmin > min:
818 818 min = nmin
819 819 if min > max:
820 820 min = max
821 821 yield ''.join(buf)
822 822 blen = 0
823 823 buf = []
824 824 if buf:
825 825 yield ''.join(buf)
826 826
827 827 Abort = error.Abort
828 828
829 829 def always(fn):
830 830 return True
831 831
832 832 def never(fn):
833 833 return False
834 834
835 835 def nogc(func):
836 836 """disable garbage collector
837 837
838 838 Python's garbage collector triggers a GC each time a certain number of
839 839 container objects (the number being defined by gc.get_threshold()) are
840 840 allocated even when marked not to be tracked by the collector. Tracking has
841 841 no effect on when GCs are triggered, only on what objects the GC looks
842 842 into. As a workaround, disable GC while building complex (huge)
843 843 containers.
844 844
845 845 This garbage collector issue have been fixed in 2.7.
846 846 """
847 847 def wrapper(*args, **kwargs):
848 848 gcenabled = gc.isenabled()
849 849 gc.disable()
850 850 try:
851 851 return func(*args, **kwargs)
852 852 finally:
853 853 if gcenabled:
854 854 gc.enable()
855 855 return wrapper
856 856
857 857 def pathto(root, n1, n2):
858 858 '''return the relative path from one place to another.
859 859 root should use os.sep to separate directories
860 860 n1 should use os.sep to separate directories
861 861 n2 should use "/" to separate directories
862 862 returns an os.sep-separated path.
863 863
864 864 If n1 is a relative path, it's assumed it's
865 865 relative to root.
866 866 n2 should always be relative to root.
867 867 '''
868 868 if not n1:
869 869 return localpath(n2)
870 870 if os.path.isabs(n1):
871 871 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
872 872 return os.path.join(root, localpath(n2))
873 873 n2 = '/'.join((pconvert(root), n2))
874 874 a, b = splitpath(n1), n2.split('/')
875 875 a.reverse()
876 876 b.reverse()
877 877 while a and b and a[-1] == b[-1]:
878 878 a.pop()
879 879 b.pop()
880 880 b.reverse()
881 881 return os.sep.join((['..'] * len(a)) + b) or '.'
882 882
883 883 def mainfrozen():
884 884 """return True if we are a frozen executable.
885 885
886 886 The code supports py2exe (most common, Windows only) and tools/freeze
887 887 (portable, not much used).
888 888 """
889 889 return (safehasattr(sys, "frozen") or # new py2exe
890 890 safehasattr(sys, "importers") or # old py2exe
891 891 imp.is_frozen("__main__")) # tools/freeze
892 892
893 893 # the location of data files matching the source code
894 894 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
895 895 # executable version (py2exe) doesn't support __file__
896 896 datapath = os.path.dirname(sys.executable)
897 897 else:
898 898 datapath = os.path.dirname(__file__)
899 899
900 900 i18n.setdatapath(datapath)
901 901
902 902 _hgexecutable = None
903 903
904 904 def hgexecutable():
905 905 """return location of the 'hg' executable.
906 906
907 907 Defaults to $HG or 'hg' in the search path.
908 908 """
909 909 if _hgexecutable is None:
910 910 hg = os.environ.get('HG')
911 911 mainmod = sys.modules['__main__']
912 912 if hg:
913 913 _sethgexecutable(hg)
914 914 elif mainfrozen():
915 915 if getattr(sys, 'frozen', None) == 'macosx_app':
916 916 # Env variable set by py2app
917 917 _sethgexecutable(os.environ['EXECUTABLEPATH'])
918 918 else:
919 919 _sethgexecutable(sys.executable)
920 920 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
921 921 _sethgexecutable(mainmod.__file__)
922 922 else:
923 923 exe = findexe('hg') or os.path.basename(sys.argv[0])
924 924 _sethgexecutable(exe)
925 925 return _hgexecutable
926 926
927 927 def _sethgexecutable(path):
928 928 """set location of the 'hg' executable"""
929 929 global _hgexecutable
930 930 _hgexecutable = path
931 931
932 932 def _isstdout(f):
933 933 fileno = getattr(f, 'fileno', None)
934 934 return fileno and fileno() == sys.__stdout__.fileno()
935 935
936 936 def system(cmd, environ=None, cwd=None, onerr=None, errprefix=None, out=None):
937 937 '''enhanced shell command execution.
938 938 run with environment maybe modified, maybe in different dir.
939 939
940 940 if command fails and onerr is None, return status, else raise onerr
941 941 object as exception.
942 942
943 943 if out is specified, it is assumed to be a file-like object that has a
944 944 write() method. stdout and stderr will be redirected to out.'''
945 945 if environ is None:
946 946 environ = {}
947 947 try:
948 948 sys.stdout.flush()
949 949 except Exception:
950 950 pass
951 951 def py2shell(val):
952 952 'convert python object into string that is useful to shell'
953 953 if val is None or val is False:
954 954 return '0'
955 955 if val is True:
956 956 return '1'
957 957 return str(val)
958 958 origcmd = cmd
959 959 cmd = quotecommand(cmd)
960 960 if sys.platform == 'plan9' and (sys.version_info[0] == 2
961 961 and sys.version_info[1] < 7):
962 962 # subprocess kludge to work around issues in half-baked Python
963 963 # ports, notably bichued/python:
964 964 if not cwd is None:
965 965 os.chdir(cwd)
966 966 rc = os.system(cmd)
967 967 else:
968 968 env = dict(os.environ)
969 969 env.update((k, py2shell(v)) for k, v in environ.iteritems())
970 970 env['HG'] = hgexecutable()
971 971 if out is None or _isstdout(out):
972 972 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
973 973 env=env, cwd=cwd)
974 974 else:
975 975 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
976 976 env=env, cwd=cwd, stdout=subprocess.PIPE,
977 977 stderr=subprocess.STDOUT)
978 978 while True:
979 979 line = proc.stdout.readline()
980 980 if not line:
981 981 break
982 982 out.write(line)
983 983 proc.wait()
984 984 rc = proc.returncode
985 985 if sys.platform == 'OpenVMS' and rc & 1:
986 986 rc = 0
987 987 if rc and onerr:
988 988 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
989 989 explainexit(rc)[0])
990 990 if errprefix:
991 991 errmsg = '%s: %s' % (errprefix, errmsg)
992 992 raise onerr(errmsg)
993 993 return rc
994 994
995 995 def checksignature(func):
996 996 '''wrap a function with code to check for calling errors'''
997 997 def check(*args, **kwargs):
998 998 try:
999 999 return func(*args, **kwargs)
1000 1000 except TypeError:
1001 1001 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1002 1002 raise error.SignatureError
1003 1003 raise
1004 1004
1005 1005 return check
1006 1006
1007 1007 def copyfile(src, dest, hardlink=False, copystat=False):
1008 1008 '''copy a file, preserving mode and optionally other stat info like
1009 1009 atime/mtime'''
1010 1010 if os.path.lexists(dest):
1011 1011 unlink(dest)
1012 1012 # hardlinks are problematic on CIFS, quietly ignore this flag
1013 1013 # until we find a way to work around it cleanly (issue4546)
1014 1014 if False and hardlink:
1015 1015 try:
1016 1016 oslink(src, dest)
1017 1017 return
1018 1018 except (IOError, OSError):
1019 1019 pass # fall back to normal copy
1020 1020 if os.path.islink(src):
1021 1021 os.symlink(os.readlink(src), dest)
1022 1022 # copytime is ignored for symlinks, but in general copytime isn't needed
1023 1023 # for them anyway
1024 1024 else:
1025 1025 try:
1026 1026 shutil.copyfile(src, dest)
1027 1027 if copystat:
1028 1028 # copystat also copies mode
1029 1029 shutil.copystat(src, dest)
1030 1030 else:
1031 1031 shutil.copymode(src, dest)
1032 1032 except shutil.Error as inst:
1033 1033 raise Abort(str(inst))
1034 1034
1035 1035 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1036 1036 """Copy a directory tree using hardlinks if possible."""
1037 1037 num = 0
1038 1038
1039 1039 if hardlink is None:
1040 1040 hardlink = (os.stat(src).st_dev ==
1041 1041 os.stat(os.path.dirname(dst)).st_dev)
1042 1042 if hardlink:
1043 1043 topic = _('linking')
1044 1044 else:
1045 1045 topic = _('copying')
1046 1046
1047 1047 if os.path.isdir(src):
1048 1048 os.mkdir(dst)
1049 1049 for name, kind in osutil.listdir(src):
1050 1050 srcname = os.path.join(src, name)
1051 1051 dstname = os.path.join(dst, name)
1052 1052 def nprog(t, pos):
1053 1053 if pos is not None:
1054 1054 return progress(t, pos + num)
1055 1055 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1056 1056 num += n
1057 1057 else:
1058 1058 if hardlink:
1059 1059 try:
1060 1060 oslink(src, dst)
1061 1061 except (IOError, OSError):
1062 1062 hardlink = False
1063 1063 shutil.copy(src, dst)
1064 1064 else:
1065 1065 shutil.copy(src, dst)
1066 1066 num += 1
1067 1067 progress(topic, num)
1068 1068 progress(topic, None)
1069 1069
1070 1070 return hardlink, num
1071 1071
1072 1072 _winreservednames = '''con prn aux nul
1073 1073 com1 com2 com3 com4 com5 com6 com7 com8 com9
1074 1074 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
1075 1075 _winreservedchars = ':*?"<>|'
1076 1076 def checkwinfilename(path):
1077 1077 r'''Check that the base-relative path is a valid filename on Windows.
1078 1078 Returns None if the path is ok, or a UI string describing the problem.
1079 1079
1080 1080 >>> checkwinfilename("just/a/normal/path")
1081 1081 >>> checkwinfilename("foo/bar/con.xml")
1082 1082 "filename contains 'con', which is reserved on Windows"
1083 1083 >>> checkwinfilename("foo/con.xml/bar")
1084 1084 "filename contains 'con', which is reserved on Windows"
1085 1085 >>> checkwinfilename("foo/bar/xml.con")
1086 1086 >>> checkwinfilename("foo/bar/AUX/bla.txt")
1087 1087 "filename contains 'AUX', which is reserved on Windows"
1088 1088 >>> checkwinfilename("foo/bar/bla:.txt")
1089 1089 "filename contains ':', which is reserved on Windows"
1090 1090 >>> checkwinfilename("foo/bar/b\07la.txt")
1091 1091 "filename contains '\\x07', which is invalid on Windows"
1092 1092 >>> checkwinfilename("foo/bar/bla ")
1093 1093 "filename ends with ' ', which is not allowed on Windows"
1094 1094 >>> checkwinfilename("../bar")
1095 1095 >>> checkwinfilename("foo\\")
1096 1096 "filename ends with '\\', which is invalid on Windows"
1097 1097 >>> checkwinfilename("foo\\/bar")
1098 1098 "directory name ends with '\\', which is invalid on Windows"
1099 1099 '''
1100 1100 if path.endswith('\\'):
1101 1101 return _("filename ends with '\\', which is invalid on Windows")
1102 1102 if '\\/' in path:
1103 1103 return _("directory name ends with '\\', which is invalid on Windows")
1104 1104 for n in path.replace('\\', '/').split('/'):
1105 1105 if not n:
1106 1106 continue
1107 1107 for c in n:
1108 1108 if c in _winreservedchars:
1109 1109 return _("filename contains '%s', which is reserved "
1110 1110 "on Windows") % c
1111 1111 if ord(c) <= 31:
1112 1112 return _("filename contains %r, which is invalid "
1113 1113 "on Windows") % c
1114 1114 base = n.split('.')[0]
1115 1115 if base and base.lower() in _winreservednames:
1116 1116 return _("filename contains '%s', which is reserved "
1117 1117 "on Windows") % base
1118 1118 t = n[-1]
1119 1119 if t in '. ' and n not in '..':
1120 1120 return _("filename ends with '%s', which is not allowed "
1121 1121 "on Windows") % t
1122 1122
1123 1123 if os.name == 'nt':
1124 1124 checkosfilename = checkwinfilename
1125 1125 else:
1126 1126 checkosfilename = platform.checkosfilename
1127 1127
1128 1128 def makelock(info, pathname):
1129 1129 try:
1130 1130 return os.symlink(info, pathname)
1131 1131 except OSError as why:
1132 1132 if why.errno == errno.EEXIST:
1133 1133 raise
1134 1134 except AttributeError: # no symlink in os
1135 1135 pass
1136 1136
1137 1137 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1138 1138 os.write(ld, info)
1139 1139 os.close(ld)
1140 1140
1141 1141 def readlock(pathname):
1142 1142 try:
1143 1143 return os.readlink(pathname)
1144 1144 except OSError as why:
1145 1145 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1146 1146 raise
1147 1147 except AttributeError: # no symlink in os
1148 1148 pass
1149 1149 fp = posixfile(pathname)
1150 1150 r = fp.read()
1151 1151 fp.close()
1152 1152 return r
1153 1153
1154 1154 def fstat(fp):
1155 1155 '''stat file object that may not have fileno method.'''
1156 1156 try:
1157 1157 return os.fstat(fp.fileno())
1158 1158 except AttributeError:
1159 1159 return os.stat(fp.name)
1160 1160
1161 1161 # File system features
1162 1162
1163 1163 def checkcase(path):
1164 1164 """
1165 1165 Return true if the given path is on a case-sensitive filesystem
1166 1166
1167 1167 Requires a path (like /foo/.hg) ending with a foldable final
1168 1168 directory component.
1169 1169 """
1170 1170 s1 = os.lstat(path)
1171 1171 d, b = os.path.split(path)
1172 1172 b2 = b.upper()
1173 1173 if b == b2:
1174 1174 b2 = b.lower()
1175 1175 if b == b2:
1176 1176 return True # no evidence against case sensitivity
1177 1177 p2 = os.path.join(d, b2)
1178 1178 try:
1179 1179 s2 = os.lstat(p2)
1180 1180 if s2 == s1:
1181 1181 return False
1182 1182 return True
1183 1183 except OSError:
1184 1184 return True
1185 1185
1186 1186 try:
1187 1187 import re2
1188 1188 _re2 = None
1189 1189 except ImportError:
1190 1190 _re2 = False
1191 1191
1192 1192 class _re(object):
1193 1193 def _checkre2(self):
1194 1194 global _re2
1195 1195 try:
1196 1196 # check if match works, see issue3964
1197 1197 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1198 1198 except ImportError:
1199 1199 _re2 = False
1200 1200
1201 1201 def compile(self, pat, flags=0):
1202 1202 '''Compile a regular expression, using re2 if possible
1203 1203
1204 1204 For best performance, use only re2-compatible regexp features. The
1205 1205 only flags from the re module that are re2-compatible are
1206 1206 IGNORECASE and MULTILINE.'''
1207 1207 if _re2 is None:
1208 1208 self._checkre2()
1209 1209 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1210 1210 if flags & remod.IGNORECASE:
1211 1211 pat = '(?i)' + pat
1212 1212 if flags & remod.MULTILINE:
1213 1213 pat = '(?m)' + pat
1214 1214 try:
1215 1215 return re2.compile(pat)
1216 1216 except re2.error:
1217 1217 pass
1218 1218 return remod.compile(pat, flags)
1219 1219
1220 1220 @propertycache
1221 1221 def escape(self):
1222 1222 '''Return the version of escape corresponding to self.compile.
1223 1223
1224 1224 This is imperfect because whether re2 or re is used for a particular
1225 1225 function depends on the flags, etc, but it's the best we can do.
1226 1226 '''
1227 1227 global _re2
1228 1228 if _re2 is None:
1229 1229 self._checkre2()
1230 1230 if _re2:
1231 1231 return re2.escape
1232 1232 else:
1233 1233 return remod.escape
1234 1234
1235 1235 re = _re()
1236 1236
1237 1237 _fspathcache = {}
1238 1238 def fspath(name, root):
1239 1239 '''Get name in the case stored in the filesystem
1240 1240
1241 1241 The name should be relative to root, and be normcase-ed for efficiency.
1242 1242
1243 1243 Note that this function is unnecessary, and should not be
1244 1244 called, for case-sensitive filesystems (simply because it's expensive).
1245 1245
1246 1246 The root should be normcase-ed, too.
1247 1247 '''
1248 1248 def _makefspathcacheentry(dir):
1249 1249 return dict((normcase(n), n) for n in os.listdir(dir))
1250 1250
1251 1251 seps = os.sep
1252 1252 if os.altsep:
1253 1253 seps = seps + os.altsep
1254 1254 # Protect backslashes. This gets silly very quickly.
1255 1255 seps.replace('\\','\\\\')
1256 1256 pattern = remod.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
1257 1257 dir = os.path.normpath(root)
1258 1258 result = []
1259 1259 for part, sep in pattern.findall(name):
1260 1260 if sep:
1261 1261 result.append(sep)
1262 1262 continue
1263 1263
1264 1264 if dir not in _fspathcache:
1265 1265 _fspathcache[dir] = _makefspathcacheentry(dir)
1266 1266 contents = _fspathcache[dir]
1267 1267
1268 1268 found = contents.get(part)
1269 1269 if not found:
1270 1270 # retry "once per directory" per "dirstate.walk" which
1271 1271 # may take place for each patches of "hg qpush", for example
1272 1272 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1273 1273 found = contents.get(part)
1274 1274
1275 1275 result.append(found or part)
1276 1276 dir = os.path.join(dir, part)
1277 1277
1278 1278 return ''.join(result)
1279 1279
1280 1280 def checknlink(testfile):
1281 1281 '''check whether hardlink count reporting works properly'''
1282 1282
1283 1283 # testfile may be open, so we need a separate file for checking to
1284 1284 # work around issue2543 (or testfile may get lost on Samba shares)
1285 1285 f1 = testfile + ".hgtmp1"
1286 1286 if os.path.lexists(f1):
1287 1287 return False
1288 1288 try:
1289 1289 posixfile(f1, 'w').close()
1290 1290 except IOError:
1291 1291 return False
1292 1292
1293 1293 f2 = testfile + ".hgtmp2"
1294 1294 fd = None
1295 1295 try:
1296 1296 oslink(f1, f2)
1297 1297 # nlinks() may behave differently for files on Windows shares if
1298 1298 # the file is open.
1299 1299 fd = posixfile(f2)
1300 1300 return nlinks(f2) > 1
1301 1301 except OSError:
1302 1302 return False
1303 1303 finally:
1304 1304 if fd is not None:
1305 1305 fd.close()
1306 1306 for f in (f1, f2):
1307 1307 try:
1308 1308 os.unlink(f)
1309 1309 except OSError:
1310 1310 pass
1311 1311
1312 1312 def endswithsep(path):
1313 1313 '''Check path ends with os.sep or os.altsep.'''
1314 1314 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
1315 1315
1316 1316 def splitpath(path):
1317 1317 '''Split path by os.sep.
1318 1318 Note that this function does not use os.altsep because this is
1319 1319 an alternative of simple "xxx.split(os.sep)".
1320 1320 It is recommended to use os.path.normpath() before using this
1321 1321 function if need.'''
1322 1322 return path.split(os.sep)
1323 1323
1324 1324 def gui():
1325 1325 '''Are we running in a GUI?'''
1326 1326 if sys.platform == 'darwin':
1327 1327 if 'SSH_CONNECTION' in os.environ:
1328 1328 # handle SSH access to a box where the user is logged in
1329 1329 return False
1330 1330 elif getattr(osutil, 'isgui', None):
1331 1331 # check if a CoreGraphics session is available
1332 1332 return osutil.isgui()
1333 1333 else:
1334 1334 # pure build; use a safe default
1335 1335 return True
1336 1336 else:
1337 1337 return os.name == "nt" or os.environ.get("DISPLAY")
1338 1338
1339 1339 def mktempcopy(name, emptyok=False, createmode=None):
1340 1340 """Create a temporary file with the same contents from name
1341 1341
1342 1342 The permission bits are copied from the original file.
1343 1343
1344 1344 If the temporary file is going to be truncated immediately, you
1345 1345 can use emptyok=True as an optimization.
1346 1346
1347 1347 Returns the name of the temporary file.
1348 1348 """
1349 1349 d, fn = os.path.split(name)
1350 1350 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1351 1351 os.close(fd)
1352 1352 # Temporary files are created with mode 0600, which is usually not
1353 1353 # what we want. If the original file already exists, just copy
1354 1354 # its mode. Otherwise, manually obey umask.
1355 1355 copymode(name, temp, createmode)
1356 1356 if emptyok:
1357 1357 return temp
1358 1358 try:
1359 1359 try:
1360 1360 ifp = posixfile(name, "rb")
1361 1361 except IOError as inst:
1362 1362 if inst.errno == errno.ENOENT:
1363 1363 return temp
1364 1364 if not getattr(inst, 'filename', None):
1365 1365 inst.filename = name
1366 1366 raise
1367 1367 ofp = posixfile(temp, "wb")
1368 1368 for chunk in filechunkiter(ifp):
1369 1369 ofp.write(chunk)
1370 1370 ifp.close()
1371 1371 ofp.close()
1372 1372 except: # re-raises
1373 1373 try: os.unlink(temp)
1374 1374 except OSError: pass
1375 1375 raise
1376 1376 return temp
1377 1377
1378 1378 class atomictempfile(object):
1379 1379 '''writable file object that atomically updates a file
1380 1380
1381 1381 All writes will go to a temporary copy of the original file. Call
1382 1382 close() when you are done writing, and atomictempfile will rename
1383 1383 the temporary copy to the original name, making the changes
1384 1384 visible. If the object is destroyed without being closed, all your
1385 1385 writes are discarded.
1386 1386 '''
1387 1387 def __init__(self, name, mode='w+b', createmode=None):
1388 1388 self.__name = name # permanent name
1389 1389 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1390 1390 createmode=createmode)
1391 1391 self._fp = posixfile(self._tempname, mode)
1392 1392
1393 1393 # delegated methods
1394 1394 self.write = self._fp.write
1395 1395 self.seek = self._fp.seek
1396 1396 self.tell = self._fp.tell
1397 1397 self.fileno = self._fp.fileno
1398 1398
1399 1399 def close(self):
1400 1400 if not self._fp.closed:
1401 1401 self._fp.close()
1402 1402 rename(self._tempname, localpath(self.__name))
1403 1403
1404 1404 def discard(self):
1405 1405 if not self._fp.closed:
1406 1406 try:
1407 1407 os.unlink(self._tempname)
1408 1408 except OSError:
1409 1409 pass
1410 1410 self._fp.close()
1411 1411
1412 1412 def __del__(self):
1413 1413 if safehasattr(self, '_fp'): # constructor actually did something
1414 1414 self.discard()
1415 1415
1416 1416 def makedirs(name, mode=None, notindexed=False):
1417 1417 """recursive directory creation with parent mode inheritance"""
1418 1418 try:
1419 1419 makedir(name, notindexed)
1420 1420 except OSError as err:
1421 1421 if err.errno == errno.EEXIST:
1422 1422 return
1423 1423 if err.errno != errno.ENOENT or not name:
1424 1424 raise
1425 1425 parent = os.path.dirname(os.path.abspath(name))
1426 1426 if parent == name:
1427 1427 raise
1428 1428 makedirs(parent, mode, notindexed)
1429 1429 makedir(name, notindexed)
1430 1430 if mode is not None:
1431 1431 os.chmod(name, mode)
1432 1432
1433 1433 def ensuredirs(name, mode=None, notindexed=False):
1434 1434 """race-safe recursive directory creation
1435 1435
1436 1436 Newly created directories are marked as "not to be indexed by
1437 1437 the content indexing service", if ``notindexed`` is specified
1438 1438 for "write" mode access.
1439 1439 """
1440 1440 if os.path.isdir(name):
1441 1441 return
1442 1442 parent = os.path.dirname(os.path.abspath(name))
1443 1443 if parent != name:
1444 1444 ensuredirs(parent, mode, notindexed)
1445 1445 try:
1446 1446 makedir(name, notindexed)
1447 1447 except OSError as err:
1448 1448 if err.errno == errno.EEXIST and os.path.isdir(name):
1449 1449 # someone else seems to have won a directory creation race
1450 1450 return
1451 1451 raise
1452 1452 if mode is not None:
1453 1453 os.chmod(name, mode)
1454 1454
1455 1455 def readfile(path):
1456 1456 with open(path, 'rb') as fp:
1457 1457 return fp.read()
1458 1458
1459 1459 def writefile(path, text):
1460 1460 with open(path, 'wb') as fp:
1461 1461 fp.write(text)
1462 1462
1463 1463 def appendfile(path, text):
1464 1464 with open(path, 'ab') as fp:
1465 1465 fp.write(text)
1466 1466
1467 1467 class chunkbuffer(object):
1468 1468 """Allow arbitrary sized chunks of data to be efficiently read from an
1469 1469 iterator over chunks of arbitrary size."""
1470 1470
1471 1471 def __init__(self, in_iter):
1472 1472 """in_iter is the iterator that's iterating over the input chunks.
1473 1473 targetsize is how big a buffer to try to maintain."""
1474 1474 def splitbig(chunks):
1475 1475 for chunk in chunks:
1476 1476 if len(chunk) > 2**20:
1477 1477 pos = 0
1478 1478 while pos < len(chunk):
1479 1479 end = pos + 2 ** 18
1480 1480 yield chunk[pos:end]
1481 1481 pos = end
1482 1482 else:
1483 1483 yield chunk
1484 1484 self.iter = splitbig(in_iter)
1485 1485 self._queue = collections.deque()
1486 1486 self._chunkoffset = 0
1487 1487
1488 1488 def read(self, l=None):
1489 1489 """Read L bytes of data from the iterator of chunks of data.
1490 1490 Returns less than L bytes if the iterator runs dry.
1491 1491
1492 1492 If size parameter is omitted, read everything"""
1493 1493 if l is None:
1494 1494 return ''.join(self.iter)
1495 1495
1496 1496 left = l
1497 1497 buf = []
1498 1498 queue = self._queue
1499 1499 while left > 0:
1500 1500 # refill the queue
1501 1501 if not queue:
1502 1502 target = 2**18
1503 1503 for chunk in self.iter:
1504 1504 queue.append(chunk)
1505 1505 target -= len(chunk)
1506 1506 if target <= 0:
1507 1507 break
1508 1508 if not queue:
1509 1509 break
1510 1510
1511 1511 # The easy way to do this would be to queue.popleft(), modify the
1512 1512 # chunk (if necessary), then queue.appendleft(). However, for cases
1513 1513 # where we read partial chunk content, this incurs 2 dequeue
1514 1514 # mutations and creates a new str for the remaining chunk in the
1515 1515 # queue. Our code below avoids this overhead.
1516 1516
1517 1517 chunk = queue[0]
1518 1518 chunkl = len(chunk)
1519 1519 offset = self._chunkoffset
1520 1520
1521 1521 # Use full chunk.
1522 1522 if offset == 0 and left >= chunkl:
1523 1523 left -= chunkl
1524 1524 queue.popleft()
1525 1525 buf.append(chunk)
1526 1526 # self._chunkoffset remains at 0.
1527 1527 continue
1528 1528
1529 1529 chunkremaining = chunkl - offset
1530 1530
1531 1531 # Use all of unconsumed part of chunk.
1532 1532 if left >= chunkremaining:
1533 1533 left -= chunkremaining
1534 1534 queue.popleft()
1535 1535 # offset == 0 is enabled by block above, so this won't merely
1536 1536 # copy via ``chunk[0:]``.
1537 1537 buf.append(chunk[offset:])
1538 1538 self._chunkoffset = 0
1539 1539
1540 1540 # Partial chunk needed.
1541 1541 else:
1542 1542 buf.append(chunk[offset:offset + left])
1543 1543 self._chunkoffset += left
1544 1544 left -= chunkremaining
1545 1545
1546 1546 return ''.join(buf)
1547 1547
1548 1548 def filechunkiter(f, size=65536, limit=None):
1549 1549 """Create a generator that produces the data in the file size
1550 1550 (default 65536) bytes at a time, up to optional limit (default is
1551 1551 to read all data). Chunks may be less than size bytes if the
1552 1552 chunk is the last chunk in the file, or the file is a socket or
1553 1553 some other type of file that sometimes reads less data than is
1554 1554 requested."""
1555 1555 assert size >= 0
1556 1556 assert limit is None or limit >= 0
1557 1557 while True:
1558 1558 if limit is None:
1559 1559 nbytes = size
1560 1560 else:
1561 1561 nbytes = min(limit, size)
1562 1562 s = nbytes and f.read(nbytes)
1563 1563 if not s:
1564 1564 break
1565 1565 if limit:
1566 1566 limit -= len(s)
1567 1567 yield s
1568 1568
1569 1569 def makedate(timestamp=None):
1570 1570 '''Return a unix timestamp (or the current time) as a (unixtime,
1571 1571 offset) tuple based off the local timezone.'''
1572 1572 if timestamp is None:
1573 1573 timestamp = time.time()
1574 1574 if timestamp < 0:
1575 1575 hint = _("check your clock")
1576 1576 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1577 1577 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1578 1578 datetime.datetime.fromtimestamp(timestamp))
1579 1579 tz = delta.days * 86400 + delta.seconds
1580 1580 return timestamp, tz
1581 1581
1582 1582 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1583 1583 """represent a (unixtime, offset) tuple as a localized time.
1584 1584 unixtime is seconds since the epoch, and offset is the time zone's
1585 1585 number of seconds away from UTC. if timezone is false, do not
1586 1586 append time zone to string."""
1587 1587 t, tz = date or makedate()
1588 if t < 0:
1589 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1590 tz = 0
1591 1588 if "%1" in format or "%2" in format or "%z" in format:
1592 1589 sign = (tz > 0) and "-" or "+"
1593 1590 minutes = abs(tz) // 60
1594 1591 q, r = divmod(minutes, 60)
1595 1592 format = format.replace("%z", "%1%2")
1596 1593 format = format.replace("%1", "%c%02d" % (sign, q))
1597 1594 format = format.replace("%2", "%02d" % r)
1598 try:
1599 t = time.gmtime(float(t) - tz)
1600 except ValueError:
1601 # time was out of range
1602 t = time.gmtime(sys.maxint)
1603 s = time.strftime(format, t)
1595 d = t - tz
1596 if d > 0x7fffffff:
1597 d = 0x7fffffff
1598 elif d < -0x7fffffff:
1599 d = -0x7fffffff
1600 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
1601 # because they use the gmtime() system call which is buggy on Windows
1602 # for negative values.
1603 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
1604 s = t.strftime(format)
1604 1605 return s
1605 1606
1606 1607 def shortdate(date=None):
1607 1608 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1608 1609 return datestr(date, format='%Y-%m-%d')
1609 1610
1610 1611 def parsetimezone(tz):
1611 1612 """parse a timezone string and return an offset integer"""
1612 1613 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1613 1614 sign = (tz[0] == "+") and 1 or -1
1614 1615 hours = int(tz[1:3])
1615 1616 minutes = int(tz[3:5])
1616 1617 return -sign * (hours * 60 + minutes) * 60
1617 1618 if tz == "GMT" or tz == "UTC":
1618 1619 return 0
1619 1620 return None
1620 1621
1621 1622 def strdate(string, format, defaults=[]):
1622 1623 """parse a localized time string and return a (unixtime, offset) tuple.
1623 1624 if the string cannot be parsed, ValueError is raised."""
1624 1625 # NOTE: unixtime = localunixtime + offset
1625 1626 offset, date = parsetimezone(string.split()[-1]), string
1626 1627 if offset is not None:
1627 1628 date = " ".join(string.split()[:-1])
1628 1629
1629 1630 # add missing elements from defaults
1630 1631 usenow = False # default to using biased defaults
1631 1632 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1632 1633 found = [True for p in part if ("%"+p) in format]
1633 1634 if not found:
1634 1635 date += "@" + defaults[part][usenow]
1635 1636 format += "@%" + part[0]
1636 1637 else:
1637 1638 # We've found a specific time element, less specific time
1638 1639 # elements are relative to today
1639 1640 usenow = True
1640 1641
1641 1642 timetuple = time.strptime(date, format)
1642 1643 localunixtime = int(calendar.timegm(timetuple))
1643 1644 if offset is None:
1644 1645 # local timezone
1645 1646 unixtime = int(time.mktime(timetuple))
1646 1647 offset = unixtime - localunixtime
1647 1648 else:
1648 1649 unixtime = localunixtime + offset
1649 1650 return unixtime, offset
1650 1651
1651 1652 def parsedate(date, formats=None, bias=None):
1652 1653 """parse a localized date/time and return a (unixtime, offset) tuple.
1653 1654
1654 1655 The date may be a "unixtime offset" string or in one of the specified
1655 1656 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1656 1657
1657 1658 >>> parsedate(' today ') == parsedate(\
1658 1659 datetime.date.today().strftime('%b %d'))
1659 1660 True
1660 1661 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1661 1662 datetime.timedelta(days=1)\
1662 1663 ).strftime('%b %d'))
1663 1664 True
1664 1665 >>> now, tz = makedate()
1665 1666 >>> strnow, strtz = parsedate('now')
1666 1667 >>> (strnow - now) < 1
1667 1668 True
1668 1669 >>> tz == strtz
1669 1670 True
1670 1671 """
1671 1672 if bias is None:
1672 1673 bias = {}
1673 1674 if not date:
1674 1675 return 0, 0
1675 1676 if isinstance(date, tuple) and len(date) == 2:
1676 1677 return date
1677 1678 if not formats:
1678 1679 formats = defaultdateformats
1679 1680 date = date.strip()
1680 1681
1681 1682 if date == 'now' or date == _('now'):
1682 1683 return makedate()
1683 1684 if date == 'today' or date == _('today'):
1684 1685 date = datetime.date.today().strftime('%b %d')
1685 1686 elif date == 'yesterday' or date == _('yesterday'):
1686 1687 date = (datetime.date.today() -
1687 1688 datetime.timedelta(days=1)).strftime('%b %d')
1688 1689
1689 1690 try:
1690 1691 when, offset = map(int, date.split(' '))
1691 1692 except ValueError:
1692 1693 # fill out defaults
1693 1694 now = makedate()
1694 1695 defaults = {}
1695 1696 for part in ("d", "mb", "yY", "HI", "M", "S"):
1696 1697 # this piece is for rounding the specific end of unknowns
1697 1698 b = bias.get(part)
1698 1699 if b is None:
1699 1700 if part[0] in "HMS":
1700 1701 b = "00"
1701 1702 else:
1702 1703 b = "0"
1703 1704
1704 1705 # this piece is for matching the generic end to today's date
1705 1706 n = datestr(now, "%" + part[0])
1706 1707
1707 1708 defaults[part] = (b, n)
1708 1709
1709 1710 for format in formats:
1710 1711 try:
1711 1712 when, offset = strdate(date, format, defaults)
1712 1713 except (ValueError, OverflowError):
1713 1714 pass
1714 1715 else:
1715 1716 break
1716 1717 else:
1717 1718 raise Abort(_('invalid date: %r') % date)
1718 1719 # validate explicit (probably user-specified) date and
1719 1720 # time zone offset. values must fit in signed 32 bits for
1720 1721 # current 32-bit linux runtimes. timezones go from UTC-12
1721 1722 # to UTC+14
1722 1723 if abs(when) > 0x7fffffff:
1723 1724 raise Abort(_('date exceeds 32 bits: %d') % when)
1724 if when < 0:
1725 raise Abort(_('negative date value: %d') % when)
1726 1725 if offset < -50400 or offset > 43200:
1727 1726 raise Abort(_('impossible time zone offset: %d') % offset)
1728 1727 return when, offset
1729 1728
1730 1729 def matchdate(date):
1731 1730 """Return a function that matches a given date match specifier
1732 1731
1733 1732 Formats include:
1734 1733
1735 1734 '{date}' match a given date to the accuracy provided
1736 1735
1737 1736 '<{date}' on or before a given date
1738 1737
1739 1738 '>{date}' on or after a given date
1740 1739
1741 1740 >>> p1 = parsedate("10:29:59")
1742 1741 >>> p2 = parsedate("10:30:00")
1743 1742 >>> p3 = parsedate("10:30:59")
1744 1743 >>> p4 = parsedate("10:31:00")
1745 1744 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1746 1745 >>> f = matchdate("10:30")
1747 1746 >>> f(p1[0])
1748 1747 False
1749 1748 >>> f(p2[0])
1750 1749 True
1751 1750 >>> f(p3[0])
1752 1751 True
1753 1752 >>> f(p4[0])
1754 1753 False
1755 1754 >>> f(p5[0])
1756 1755 False
1757 1756 """
1758 1757
1759 1758 def lower(date):
1760 1759 d = {'mb': "1", 'd': "1"}
1761 1760 return parsedate(date, extendeddateformats, d)[0]
1762 1761
1763 1762 def upper(date):
1764 1763 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
1765 1764 for days in ("31", "30", "29"):
1766 1765 try:
1767 1766 d["d"] = days
1768 1767 return parsedate(date, extendeddateformats, d)[0]
1769 1768 except Abort:
1770 1769 pass
1771 1770 d["d"] = "28"
1772 1771 return parsedate(date, extendeddateformats, d)[0]
1773 1772
1774 1773 date = date.strip()
1775 1774
1776 1775 if not date:
1777 1776 raise Abort(_("dates cannot consist entirely of whitespace"))
1778 1777 elif date[0] == "<":
1779 1778 if not date[1:]:
1780 1779 raise Abort(_("invalid day spec, use '<DATE'"))
1781 1780 when = upper(date[1:])
1782 1781 return lambda x: x <= when
1783 1782 elif date[0] == ">":
1784 1783 if not date[1:]:
1785 1784 raise Abort(_("invalid day spec, use '>DATE'"))
1786 1785 when = lower(date[1:])
1787 1786 return lambda x: x >= when
1788 1787 elif date[0] == "-":
1789 1788 try:
1790 1789 days = int(date[1:])
1791 1790 except ValueError:
1792 1791 raise Abort(_("invalid day spec: %s") % date[1:])
1793 1792 if days < 0:
1794 1793 raise Abort(_('%s must be nonnegative (see "hg help dates")')
1795 1794 % date[1:])
1796 1795 when = makedate()[0] - days * 3600 * 24
1797 1796 return lambda x: x >= when
1798 1797 elif " to " in date:
1799 1798 a, b = date.split(" to ")
1800 1799 start, stop = lower(a), upper(b)
1801 1800 return lambda x: x >= start and x <= stop
1802 1801 else:
1803 1802 start, stop = lower(date), upper(date)
1804 1803 return lambda x: x >= start and x <= stop
1805 1804
1806 1805 def stringmatcher(pattern):
1807 1806 """
1808 1807 accepts a string, possibly starting with 're:' or 'literal:' prefix.
1809 1808 returns the matcher name, pattern, and matcher function.
1810 1809 missing or unknown prefixes are treated as literal matches.
1811 1810
1812 1811 helper for tests:
1813 1812 >>> def test(pattern, *tests):
1814 1813 ... kind, pattern, matcher = stringmatcher(pattern)
1815 1814 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
1816 1815
1817 1816 exact matching (no prefix):
1818 1817 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
1819 1818 ('literal', 'abcdefg', [False, False, True])
1820 1819
1821 1820 regex matching ('re:' prefix)
1822 1821 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
1823 1822 ('re', 'a.+b', [False, False, True])
1824 1823
1825 1824 force exact matches ('literal:' prefix)
1826 1825 >>> test('literal:re:foobar', 'foobar', 're:foobar')
1827 1826 ('literal', 're:foobar', [False, True])
1828 1827
1829 1828 unknown prefixes are ignored and treated as literals
1830 1829 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
1831 1830 ('literal', 'foo:bar', [False, False, True])
1832 1831 """
1833 1832 if pattern.startswith('re:'):
1834 1833 pattern = pattern[3:]
1835 1834 try:
1836 1835 regex = remod.compile(pattern)
1837 1836 except remod.error as e:
1838 1837 raise error.ParseError(_('invalid regular expression: %s')
1839 1838 % e)
1840 1839 return 're', pattern, regex.search
1841 1840 elif pattern.startswith('literal:'):
1842 1841 pattern = pattern[8:]
1843 1842 return 'literal', pattern, pattern.__eq__
1844 1843
1845 1844 def shortuser(user):
1846 1845 """Return a short representation of a user name or email address."""
1847 1846 f = user.find('@')
1848 1847 if f >= 0:
1849 1848 user = user[:f]
1850 1849 f = user.find('<')
1851 1850 if f >= 0:
1852 1851 user = user[f + 1:]
1853 1852 f = user.find(' ')
1854 1853 if f >= 0:
1855 1854 user = user[:f]
1856 1855 f = user.find('.')
1857 1856 if f >= 0:
1858 1857 user = user[:f]
1859 1858 return user
1860 1859
1861 1860 def emailuser(user):
1862 1861 """Return the user portion of an email address."""
1863 1862 f = user.find('@')
1864 1863 if f >= 0:
1865 1864 user = user[:f]
1866 1865 f = user.find('<')
1867 1866 if f >= 0:
1868 1867 user = user[f + 1:]
1869 1868 return user
1870 1869
1871 1870 def email(author):
1872 1871 '''get email of author.'''
1873 1872 r = author.find('>')
1874 1873 if r == -1:
1875 1874 r = None
1876 1875 return author[author.find('<') + 1:r]
1877 1876
1878 1877 def ellipsis(text, maxlength=400):
1879 1878 """Trim string to at most maxlength (default: 400) columns in display."""
1880 1879 return encoding.trim(text, maxlength, ellipsis='...')
1881 1880
1882 1881 def unitcountfn(*unittable):
1883 1882 '''return a function that renders a readable count of some quantity'''
1884 1883
1885 1884 def go(count):
1886 1885 for multiplier, divisor, format in unittable:
1887 1886 if count >= divisor * multiplier:
1888 1887 return format % (count / float(divisor))
1889 1888 return unittable[-1][2] % count
1890 1889
1891 1890 return go
1892 1891
1893 1892 bytecount = unitcountfn(
1894 1893 (100, 1 << 30, _('%.0f GB')),
1895 1894 (10, 1 << 30, _('%.1f GB')),
1896 1895 (1, 1 << 30, _('%.2f GB')),
1897 1896 (100, 1 << 20, _('%.0f MB')),
1898 1897 (10, 1 << 20, _('%.1f MB')),
1899 1898 (1, 1 << 20, _('%.2f MB')),
1900 1899 (100, 1 << 10, _('%.0f KB')),
1901 1900 (10, 1 << 10, _('%.1f KB')),
1902 1901 (1, 1 << 10, _('%.2f KB')),
1903 1902 (1, 1, _('%.0f bytes')),
1904 1903 )
1905 1904
1906 1905 def uirepr(s):
1907 1906 # Avoid double backslash in Windows path repr()
1908 1907 return repr(s).replace('\\\\', '\\')
1909 1908
1910 1909 # delay import of textwrap
1911 1910 def MBTextWrapper(**kwargs):
1912 1911 class tw(textwrap.TextWrapper):
1913 1912 """
1914 1913 Extend TextWrapper for width-awareness.
1915 1914
1916 1915 Neither number of 'bytes' in any encoding nor 'characters' is
1917 1916 appropriate to calculate terminal columns for specified string.
1918 1917
1919 1918 Original TextWrapper implementation uses built-in 'len()' directly,
1920 1919 so overriding is needed to use width information of each characters.
1921 1920
1922 1921 In addition, characters classified into 'ambiguous' width are
1923 1922 treated as wide in East Asian area, but as narrow in other.
1924 1923
1925 1924 This requires use decision to determine width of such characters.
1926 1925 """
1927 1926 def _cutdown(self, ucstr, space_left):
1928 1927 l = 0
1929 1928 colwidth = encoding.ucolwidth
1930 1929 for i in xrange(len(ucstr)):
1931 1930 l += colwidth(ucstr[i])
1932 1931 if space_left < l:
1933 1932 return (ucstr[:i], ucstr[i:])
1934 1933 return ucstr, ''
1935 1934
1936 1935 # overriding of base class
1937 1936 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1938 1937 space_left = max(width - cur_len, 1)
1939 1938
1940 1939 if self.break_long_words:
1941 1940 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1942 1941 cur_line.append(cut)
1943 1942 reversed_chunks[-1] = res
1944 1943 elif not cur_line:
1945 1944 cur_line.append(reversed_chunks.pop())
1946 1945
1947 1946 # this overriding code is imported from TextWrapper of Python 2.6
1948 1947 # to calculate columns of string by 'encoding.ucolwidth()'
1949 1948 def _wrap_chunks(self, chunks):
1950 1949 colwidth = encoding.ucolwidth
1951 1950
1952 1951 lines = []
1953 1952 if self.width <= 0:
1954 1953 raise ValueError("invalid width %r (must be > 0)" % self.width)
1955 1954
1956 1955 # Arrange in reverse order so items can be efficiently popped
1957 1956 # from a stack of chucks.
1958 1957 chunks.reverse()
1959 1958
1960 1959 while chunks:
1961 1960
1962 1961 # Start the list of chunks that will make up the current line.
1963 1962 # cur_len is just the length of all the chunks in cur_line.
1964 1963 cur_line = []
1965 1964 cur_len = 0
1966 1965
1967 1966 # Figure out which static string will prefix this line.
1968 1967 if lines:
1969 1968 indent = self.subsequent_indent
1970 1969 else:
1971 1970 indent = self.initial_indent
1972 1971
1973 1972 # Maximum width for this line.
1974 1973 width = self.width - len(indent)
1975 1974
1976 1975 # First chunk on line is whitespace -- drop it, unless this
1977 1976 # is the very beginning of the text (i.e. no lines started yet).
1978 1977 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1979 1978 del chunks[-1]
1980 1979
1981 1980 while chunks:
1982 1981 l = colwidth(chunks[-1])
1983 1982
1984 1983 # Can at least squeeze this chunk onto the current line.
1985 1984 if cur_len + l <= width:
1986 1985 cur_line.append(chunks.pop())
1987 1986 cur_len += l
1988 1987
1989 1988 # Nope, this line is full.
1990 1989 else:
1991 1990 break
1992 1991
1993 1992 # The current line is full, and the next chunk is too big to
1994 1993 # fit on *any* line (not just this one).
1995 1994 if chunks and colwidth(chunks[-1]) > width:
1996 1995 self._handle_long_word(chunks, cur_line, cur_len, width)
1997 1996
1998 1997 # If the last chunk on this line is all whitespace, drop it.
1999 1998 if (self.drop_whitespace and
2000 1999 cur_line and cur_line[-1].strip() == ''):
2001 2000 del cur_line[-1]
2002 2001
2003 2002 # Convert current line back to a string and store it in list
2004 2003 # of all lines (return value).
2005 2004 if cur_line:
2006 2005 lines.append(indent + ''.join(cur_line))
2007 2006
2008 2007 return lines
2009 2008
2010 2009 global MBTextWrapper
2011 2010 MBTextWrapper = tw
2012 2011 return tw(**kwargs)
2013 2012
2014 2013 def wrap(line, width, initindent='', hangindent=''):
2015 2014 maxindent = max(len(hangindent), len(initindent))
2016 2015 if width <= maxindent:
2017 2016 # adjust for weird terminal size
2018 2017 width = max(78, maxindent + 1)
2019 2018 line = line.decode(encoding.encoding, encoding.encodingmode)
2020 2019 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
2021 2020 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
2022 2021 wrapper = MBTextWrapper(width=width,
2023 2022 initial_indent=initindent,
2024 2023 subsequent_indent=hangindent)
2025 2024 return wrapper.fill(line).encode(encoding.encoding)
2026 2025
2027 2026 def iterlines(iterator):
2028 2027 for chunk in iterator:
2029 2028 for line in chunk.splitlines():
2030 2029 yield line
2031 2030
2032 2031 def expandpath(path):
2033 2032 return os.path.expanduser(os.path.expandvars(path))
2034 2033
2035 2034 def hgcmd():
2036 2035 """Return the command used to execute current hg
2037 2036
2038 2037 This is different from hgexecutable() because on Windows we want
2039 2038 to avoid things opening new shell windows like batch files, so we
2040 2039 get either the python call or current executable.
2041 2040 """
2042 2041 if mainfrozen():
2043 2042 if getattr(sys, 'frozen', None) == 'macosx_app':
2044 2043 # Env variable set by py2app
2045 2044 return [os.environ['EXECUTABLEPATH']]
2046 2045 else:
2047 2046 return [sys.executable]
2048 2047 return gethgcmd()
2049 2048
2050 2049 def rundetached(args, condfn):
2051 2050 """Execute the argument list in a detached process.
2052 2051
2053 2052 condfn is a callable which is called repeatedly and should return
2054 2053 True once the child process is known to have started successfully.
2055 2054 At this point, the child process PID is returned. If the child
2056 2055 process fails to start or finishes before condfn() evaluates to
2057 2056 True, return -1.
2058 2057 """
2059 2058 # Windows case is easier because the child process is either
2060 2059 # successfully starting and validating the condition or exiting
2061 2060 # on failure. We just poll on its PID. On Unix, if the child
2062 2061 # process fails to start, it will be left in a zombie state until
2063 2062 # the parent wait on it, which we cannot do since we expect a long
2064 2063 # running process on success. Instead we listen for SIGCHLD telling
2065 2064 # us our child process terminated.
2066 2065 terminated = set()
2067 2066 def handler(signum, frame):
2068 2067 terminated.add(os.wait())
2069 2068 prevhandler = None
2070 2069 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2071 2070 if SIGCHLD is not None:
2072 2071 prevhandler = signal.signal(SIGCHLD, handler)
2073 2072 try:
2074 2073 pid = spawndetached(args)
2075 2074 while not condfn():
2076 2075 if ((pid in terminated or not testpid(pid))
2077 2076 and not condfn()):
2078 2077 return -1
2079 2078 time.sleep(0.1)
2080 2079 return pid
2081 2080 finally:
2082 2081 if prevhandler is not None:
2083 2082 signal.signal(signal.SIGCHLD, prevhandler)
2084 2083
2085 2084 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2086 2085 """Return the result of interpolating items in the mapping into string s.
2087 2086
2088 2087 prefix is a single character string, or a two character string with
2089 2088 a backslash as the first character if the prefix needs to be escaped in
2090 2089 a regular expression.
2091 2090
2092 2091 fn is an optional function that will be applied to the replacement text
2093 2092 just before replacement.
2094 2093
2095 2094 escape_prefix is an optional flag that allows using doubled prefix for
2096 2095 its escaping.
2097 2096 """
2098 2097 fn = fn or (lambda s: s)
2099 2098 patterns = '|'.join(mapping.keys())
2100 2099 if escape_prefix:
2101 2100 patterns += '|' + prefix
2102 2101 if len(prefix) > 1:
2103 2102 prefix_char = prefix[1:]
2104 2103 else:
2105 2104 prefix_char = prefix
2106 2105 mapping[prefix_char] = prefix_char
2107 2106 r = remod.compile(r'%s(%s)' % (prefix, patterns))
2108 2107 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2109 2108
2110 2109 def getport(port):
2111 2110 """Return the port for a given network service.
2112 2111
2113 2112 If port is an integer, it's returned as is. If it's a string, it's
2114 2113 looked up using socket.getservbyname(). If there's no matching
2115 2114 service, error.Abort is raised.
2116 2115 """
2117 2116 try:
2118 2117 return int(port)
2119 2118 except ValueError:
2120 2119 pass
2121 2120
2122 2121 try:
2123 2122 return socket.getservbyname(port)
2124 2123 except socket.error:
2125 2124 raise Abort(_("no port number associated with service '%s'") % port)
2126 2125
2127 2126 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2128 2127 '0': False, 'no': False, 'false': False, 'off': False,
2129 2128 'never': False}
2130 2129
2131 2130 def parsebool(s):
2132 2131 """Parse s into a boolean.
2133 2132
2134 2133 If s is not a valid boolean, returns None.
2135 2134 """
2136 2135 return _booleans.get(s.lower(), None)
2137 2136
2138 2137 _hexdig = '0123456789ABCDEFabcdef'
2139 2138 _hextochr = dict((a + b, chr(int(a + b, 16)))
2140 2139 for a in _hexdig for b in _hexdig)
2141 2140
2142 2141 def _urlunquote(s):
2143 2142 """Decode HTTP/HTML % encoding.
2144 2143
2145 2144 >>> _urlunquote('abc%20def')
2146 2145 'abc def'
2147 2146 """
2148 2147 res = s.split('%')
2149 2148 # fastpath
2150 2149 if len(res) == 1:
2151 2150 return s
2152 2151 s = res[0]
2153 2152 for item in res[1:]:
2154 2153 try:
2155 2154 s += _hextochr[item[:2]] + item[2:]
2156 2155 except KeyError:
2157 2156 s += '%' + item
2158 2157 except UnicodeDecodeError:
2159 2158 s += unichr(int(item[:2], 16)) + item[2:]
2160 2159 return s
2161 2160
2162 2161 class url(object):
2163 2162 r"""Reliable URL parser.
2164 2163
2165 2164 This parses URLs and provides attributes for the following
2166 2165 components:
2167 2166
2168 2167 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2169 2168
2170 2169 Missing components are set to None. The only exception is
2171 2170 fragment, which is set to '' if present but empty.
2172 2171
2173 2172 If parsefragment is False, fragment is included in query. If
2174 2173 parsequery is False, query is included in path. If both are
2175 2174 False, both fragment and query are included in path.
2176 2175
2177 2176 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2178 2177
2179 2178 Note that for backward compatibility reasons, bundle URLs do not
2180 2179 take host names. That means 'bundle://../' has a path of '../'.
2181 2180
2182 2181 Examples:
2183 2182
2184 2183 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2185 2184 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2186 2185 >>> url('ssh://[::1]:2200//home/joe/repo')
2187 2186 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2188 2187 >>> url('file:///home/joe/repo')
2189 2188 <url scheme: 'file', path: '/home/joe/repo'>
2190 2189 >>> url('file:///c:/temp/foo/')
2191 2190 <url scheme: 'file', path: 'c:/temp/foo/'>
2192 2191 >>> url('bundle:foo')
2193 2192 <url scheme: 'bundle', path: 'foo'>
2194 2193 >>> url('bundle://../foo')
2195 2194 <url scheme: 'bundle', path: '../foo'>
2196 2195 >>> url(r'c:\foo\bar')
2197 2196 <url path: 'c:\\foo\\bar'>
2198 2197 >>> url(r'\\blah\blah\blah')
2199 2198 <url path: '\\\\blah\\blah\\blah'>
2200 2199 >>> url(r'\\blah\blah\blah#baz')
2201 2200 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2202 2201 >>> url(r'file:///C:\users\me')
2203 2202 <url scheme: 'file', path: 'C:\\users\\me'>
2204 2203
2205 2204 Authentication credentials:
2206 2205
2207 2206 >>> url('ssh://joe:xyz@x/repo')
2208 2207 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2209 2208 >>> url('ssh://joe@x/repo')
2210 2209 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2211 2210
2212 2211 Query strings and fragments:
2213 2212
2214 2213 >>> url('http://host/a?b#c')
2215 2214 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2216 2215 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2217 2216 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2218 2217 """
2219 2218
2220 2219 _safechars = "!~*'()+"
2221 2220 _safepchars = "/!~*'()+:\\"
2222 2221 _matchscheme = remod.compile(r'^[a-zA-Z0-9+.\-]+:').match
2223 2222
2224 2223 def __init__(self, path, parsequery=True, parsefragment=True):
2225 2224 # We slowly chomp away at path until we have only the path left
2226 2225 self.scheme = self.user = self.passwd = self.host = None
2227 2226 self.port = self.path = self.query = self.fragment = None
2228 2227 self._localpath = True
2229 2228 self._hostport = ''
2230 2229 self._origpath = path
2231 2230
2232 2231 if parsefragment and '#' in path:
2233 2232 path, self.fragment = path.split('#', 1)
2234 2233 if not path:
2235 2234 path = None
2236 2235
2237 2236 # special case for Windows drive letters and UNC paths
2238 2237 if hasdriveletter(path) or path.startswith(r'\\'):
2239 2238 self.path = path
2240 2239 return
2241 2240
2242 2241 # For compatibility reasons, we can't handle bundle paths as
2243 2242 # normal URLS
2244 2243 if path.startswith('bundle:'):
2245 2244 self.scheme = 'bundle'
2246 2245 path = path[7:]
2247 2246 if path.startswith('//'):
2248 2247 path = path[2:]
2249 2248 self.path = path
2250 2249 return
2251 2250
2252 2251 if self._matchscheme(path):
2253 2252 parts = path.split(':', 1)
2254 2253 if parts[0]:
2255 2254 self.scheme, path = parts
2256 2255 self._localpath = False
2257 2256
2258 2257 if not path:
2259 2258 path = None
2260 2259 if self._localpath:
2261 2260 self.path = ''
2262 2261 return
2263 2262 else:
2264 2263 if self._localpath:
2265 2264 self.path = path
2266 2265 return
2267 2266
2268 2267 if parsequery and '?' in path:
2269 2268 path, self.query = path.split('?', 1)
2270 2269 if not path:
2271 2270 path = None
2272 2271 if not self.query:
2273 2272 self.query = None
2274 2273
2275 2274 # // is required to specify a host/authority
2276 2275 if path and path.startswith('//'):
2277 2276 parts = path[2:].split('/', 1)
2278 2277 if len(parts) > 1:
2279 2278 self.host, path = parts
2280 2279 else:
2281 2280 self.host = parts[0]
2282 2281 path = None
2283 2282 if not self.host:
2284 2283 self.host = None
2285 2284 # path of file:///d is /d
2286 2285 # path of file:///d:/ is d:/, not /d:/
2287 2286 if path and not hasdriveletter(path):
2288 2287 path = '/' + path
2289 2288
2290 2289 if self.host and '@' in self.host:
2291 2290 self.user, self.host = self.host.rsplit('@', 1)
2292 2291 if ':' in self.user:
2293 2292 self.user, self.passwd = self.user.split(':', 1)
2294 2293 if not self.host:
2295 2294 self.host = None
2296 2295
2297 2296 # Don't split on colons in IPv6 addresses without ports
2298 2297 if (self.host and ':' in self.host and
2299 2298 not (self.host.startswith('[') and self.host.endswith(']'))):
2300 2299 self._hostport = self.host
2301 2300 self.host, self.port = self.host.rsplit(':', 1)
2302 2301 if not self.host:
2303 2302 self.host = None
2304 2303
2305 2304 if (self.host and self.scheme == 'file' and
2306 2305 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2307 2306 raise Abort(_('file:// URLs can only refer to localhost'))
2308 2307
2309 2308 self.path = path
2310 2309
2311 2310 # leave the query string escaped
2312 2311 for a in ('user', 'passwd', 'host', 'port',
2313 2312 'path', 'fragment'):
2314 2313 v = getattr(self, a)
2315 2314 if v is not None:
2316 2315 setattr(self, a, _urlunquote(v))
2317 2316
2318 2317 def __repr__(self):
2319 2318 attrs = []
2320 2319 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2321 2320 'query', 'fragment'):
2322 2321 v = getattr(self, a)
2323 2322 if v is not None:
2324 2323 attrs.append('%s: %r' % (a, v))
2325 2324 return '<url %s>' % ', '.join(attrs)
2326 2325
2327 2326 def __str__(self):
2328 2327 r"""Join the URL's components back into a URL string.
2329 2328
2330 2329 Examples:
2331 2330
2332 2331 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2333 2332 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2334 2333 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2335 2334 'http://user:pw@host:80/?foo=bar&baz=42'
2336 2335 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2337 2336 'http://user:pw@host:80/?foo=bar%3dbaz'
2338 2337 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2339 2338 'ssh://user:pw@[::1]:2200//home/joe#'
2340 2339 >>> str(url('http://localhost:80//'))
2341 2340 'http://localhost:80//'
2342 2341 >>> str(url('http://localhost:80/'))
2343 2342 'http://localhost:80/'
2344 2343 >>> str(url('http://localhost:80'))
2345 2344 'http://localhost:80/'
2346 2345 >>> str(url('bundle:foo'))
2347 2346 'bundle:foo'
2348 2347 >>> str(url('bundle://../foo'))
2349 2348 'bundle:../foo'
2350 2349 >>> str(url('path'))
2351 2350 'path'
2352 2351 >>> str(url('file:///tmp/foo/bar'))
2353 2352 'file:///tmp/foo/bar'
2354 2353 >>> str(url('file:///c:/tmp/foo/bar'))
2355 2354 'file:///c:/tmp/foo/bar'
2356 2355 >>> print url(r'bundle:foo\bar')
2357 2356 bundle:foo\bar
2358 2357 >>> print url(r'file:///D:\data\hg')
2359 2358 file:///D:\data\hg
2360 2359 """
2361 2360 if self._localpath:
2362 2361 s = self.path
2363 2362 if self.scheme == 'bundle':
2364 2363 s = 'bundle:' + s
2365 2364 if self.fragment:
2366 2365 s += '#' + self.fragment
2367 2366 return s
2368 2367
2369 2368 s = self.scheme + ':'
2370 2369 if self.user or self.passwd or self.host:
2371 2370 s += '//'
2372 2371 elif self.scheme and (not self.path or self.path.startswith('/')
2373 2372 or hasdriveletter(self.path)):
2374 2373 s += '//'
2375 2374 if hasdriveletter(self.path):
2376 2375 s += '/'
2377 2376 if self.user:
2378 2377 s += urllib.quote(self.user, safe=self._safechars)
2379 2378 if self.passwd:
2380 2379 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
2381 2380 if self.user or self.passwd:
2382 2381 s += '@'
2383 2382 if self.host:
2384 2383 if not (self.host.startswith('[') and self.host.endswith(']')):
2385 2384 s += urllib.quote(self.host)
2386 2385 else:
2387 2386 s += self.host
2388 2387 if self.port:
2389 2388 s += ':' + urllib.quote(self.port)
2390 2389 if self.host:
2391 2390 s += '/'
2392 2391 if self.path:
2393 2392 # TODO: similar to the query string, we should not unescape the
2394 2393 # path when we store it, the path might contain '%2f' = '/',
2395 2394 # which we should *not* escape.
2396 2395 s += urllib.quote(self.path, safe=self._safepchars)
2397 2396 if self.query:
2398 2397 # we store the query in escaped form.
2399 2398 s += '?' + self.query
2400 2399 if self.fragment is not None:
2401 2400 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
2402 2401 return s
2403 2402
2404 2403 def authinfo(self):
2405 2404 user, passwd = self.user, self.passwd
2406 2405 try:
2407 2406 self.user, self.passwd = None, None
2408 2407 s = str(self)
2409 2408 finally:
2410 2409 self.user, self.passwd = user, passwd
2411 2410 if not self.user:
2412 2411 return (s, None)
2413 2412 # authinfo[1] is passed to urllib2 password manager, and its
2414 2413 # URIs must not contain credentials. The host is passed in the
2415 2414 # URIs list because Python < 2.4.3 uses only that to search for
2416 2415 # a password.
2417 2416 return (s, (None, (s, self.host),
2418 2417 self.user, self.passwd or ''))
2419 2418
2420 2419 def isabs(self):
2421 2420 if self.scheme and self.scheme != 'file':
2422 2421 return True # remote URL
2423 2422 if hasdriveletter(self.path):
2424 2423 return True # absolute for our purposes - can't be joined()
2425 2424 if self.path.startswith(r'\\'):
2426 2425 return True # Windows UNC path
2427 2426 if self.path.startswith('/'):
2428 2427 return True # POSIX-style
2429 2428 return False
2430 2429
2431 2430 def localpath(self):
2432 2431 if self.scheme == 'file' or self.scheme == 'bundle':
2433 2432 path = self.path or '/'
2434 2433 # For Windows, we need to promote hosts containing drive
2435 2434 # letters to paths with drive letters.
2436 2435 if hasdriveletter(self._hostport):
2437 2436 path = self._hostport + '/' + self.path
2438 2437 elif (self.host is not None and self.path
2439 2438 and not hasdriveletter(path)):
2440 2439 path = '/' + path
2441 2440 return path
2442 2441 return self._origpath
2443 2442
2444 2443 def islocal(self):
2445 2444 '''whether localpath will return something that posixfile can open'''
2446 2445 return (not self.scheme or self.scheme == 'file'
2447 2446 or self.scheme == 'bundle')
2448 2447
2449 2448 def hasscheme(path):
2450 2449 return bool(url(path).scheme)
2451 2450
2452 2451 def hasdriveletter(path):
2453 2452 return path and path[1:2] == ':' and path[0:1].isalpha()
2454 2453
2455 2454 def urllocalpath(path):
2456 2455 return url(path, parsequery=False, parsefragment=False).localpath()
2457 2456
2458 2457 def hidepassword(u):
2459 2458 '''hide user credential in a url string'''
2460 2459 u = url(u)
2461 2460 if u.passwd:
2462 2461 u.passwd = '***'
2463 2462 return str(u)
2464 2463
2465 2464 def removeauth(u):
2466 2465 '''remove all authentication information from a url string'''
2467 2466 u = url(u)
2468 2467 u.user = u.passwd = None
2469 2468 return str(u)
2470 2469
2471 2470 def isatty(fp):
2472 2471 try:
2473 2472 return fp.isatty()
2474 2473 except AttributeError:
2475 2474 return False
2476 2475
2477 2476 timecount = unitcountfn(
2478 2477 (1, 1e3, _('%.0f s')),
2479 2478 (100, 1, _('%.1f s')),
2480 2479 (10, 1, _('%.2f s')),
2481 2480 (1, 1, _('%.3f s')),
2482 2481 (100, 0.001, _('%.1f ms')),
2483 2482 (10, 0.001, _('%.2f ms')),
2484 2483 (1, 0.001, _('%.3f ms')),
2485 2484 (100, 0.000001, _('%.1f us')),
2486 2485 (10, 0.000001, _('%.2f us')),
2487 2486 (1, 0.000001, _('%.3f us')),
2488 2487 (100, 0.000000001, _('%.1f ns')),
2489 2488 (10, 0.000000001, _('%.2f ns')),
2490 2489 (1, 0.000000001, _('%.3f ns')),
2491 2490 )
2492 2491
2493 2492 _timenesting = [0]
2494 2493
2495 2494 def timed(func):
2496 2495 '''Report the execution time of a function call to stderr.
2497 2496
2498 2497 During development, use as a decorator when you need to measure
2499 2498 the cost of a function, e.g. as follows:
2500 2499
2501 2500 @util.timed
2502 2501 def foo(a, b, c):
2503 2502 pass
2504 2503 '''
2505 2504
2506 2505 def wrapper(*args, **kwargs):
2507 2506 start = time.time()
2508 2507 indent = 2
2509 2508 _timenesting[0] += indent
2510 2509 try:
2511 2510 return func(*args, **kwargs)
2512 2511 finally:
2513 2512 elapsed = time.time() - start
2514 2513 _timenesting[0] -= indent
2515 2514 sys.stderr.write('%s%s: %s\n' %
2516 2515 (' ' * _timenesting[0], func.__name__,
2517 2516 timecount(elapsed)))
2518 2517 return wrapper
2519 2518
2520 2519 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2521 2520 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2522 2521
2523 2522 def sizetoint(s):
2524 2523 '''Convert a space specifier to a byte count.
2525 2524
2526 2525 >>> sizetoint('30')
2527 2526 30
2528 2527 >>> sizetoint('2.2kb')
2529 2528 2252
2530 2529 >>> sizetoint('6M')
2531 2530 6291456
2532 2531 '''
2533 2532 t = s.strip().lower()
2534 2533 try:
2535 2534 for k, u in _sizeunits:
2536 2535 if t.endswith(k):
2537 2536 return int(float(t[:-len(k)]) * u)
2538 2537 return int(t)
2539 2538 except ValueError:
2540 2539 raise error.ParseError(_("couldn't parse size: %s") % s)
2541 2540
2542 2541 class hooks(object):
2543 2542 '''A collection of hook functions that can be used to extend a
2544 2543 function's behavior. Hooks are called in lexicographic order,
2545 2544 based on the names of their sources.'''
2546 2545
2547 2546 def __init__(self):
2548 2547 self._hooks = []
2549 2548
2550 2549 def add(self, source, hook):
2551 2550 self._hooks.append((source, hook))
2552 2551
2553 2552 def __call__(self, *args):
2554 2553 self._hooks.sort(key=lambda x: x[0])
2555 2554 results = []
2556 2555 for source, hook in self._hooks:
2557 2556 results.append(hook(*args))
2558 2557 return results
2559 2558
2560 2559 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s'):
2561 2560 '''Yields lines for a nicely formatted stacktrace.
2562 2561 Skips the 'skip' last entries.
2563 2562 Each file+linenumber is formatted according to fileline.
2564 2563 Each line is formatted according to line.
2565 2564 If line is None, it yields:
2566 2565 length of longest filepath+line number,
2567 2566 filepath+linenumber,
2568 2567 function
2569 2568
2570 2569 Not be used in production code but very convenient while developing.
2571 2570 '''
2572 2571 entries = [(fileline % (fn, ln), func)
2573 2572 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]]
2574 2573 if entries:
2575 2574 fnmax = max(len(entry[0]) for entry in entries)
2576 2575 for fnln, func in entries:
2577 2576 if line is None:
2578 2577 yield (fnmax, fnln, func)
2579 2578 else:
2580 2579 yield line % (fnmax, fnln, func)
2581 2580
2582 2581 def debugstacktrace(msg='stacktrace', skip=0, f=sys.stderr, otherf=sys.stdout):
2583 2582 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
2584 2583 Skips the 'skip' last entries. By default it will flush stdout first.
2585 2584 It can be used everywhere and intentionally does not require an ui object.
2586 2585 Not be used in production code but very convenient while developing.
2587 2586 '''
2588 2587 if otherf:
2589 2588 otherf.flush()
2590 2589 f.write('%s at:\n' % msg)
2591 2590 for line in getstackframes(skip + 1):
2592 2591 f.write(line)
2593 2592 f.flush()
2594 2593
2595 2594 class dirs(object):
2596 2595 '''a multiset of directory names from a dirstate or manifest'''
2597 2596
2598 2597 def __init__(self, map, skip=None):
2599 2598 self._dirs = {}
2600 2599 addpath = self.addpath
2601 2600 if safehasattr(map, 'iteritems') and skip is not None:
2602 2601 for f, s in map.iteritems():
2603 2602 if s[0] != skip:
2604 2603 addpath(f)
2605 2604 else:
2606 2605 for f in map:
2607 2606 addpath(f)
2608 2607
2609 2608 def addpath(self, path):
2610 2609 dirs = self._dirs
2611 2610 for base in finddirs(path):
2612 2611 if base in dirs:
2613 2612 dirs[base] += 1
2614 2613 return
2615 2614 dirs[base] = 1
2616 2615
2617 2616 def delpath(self, path):
2618 2617 dirs = self._dirs
2619 2618 for base in finddirs(path):
2620 2619 if dirs[base] > 1:
2621 2620 dirs[base] -= 1
2622 2621 return
2623 2622 del dirs[base]
2624 2623
2625 2624 def __iter__(self):
2626 2625 return self._dirs.iterkeys()
2627 2626
2628 2627 def __contains__(self, d):
2629 2628 return d in self._dirs
2630 2629
2631 2630 if safehasattr(parsers, 'dirs'):
2632 2631 dirs = parsers.dirs
2633 2632
2634 2633 def finddirs(path):
2635 2634 pos = path.rfind('/')
2636 2635 while pos != -1:
2637 2636 yield path[:pos]
2638 2637 pos = path.rfind('/', 0, pos)
2639 2638
2640 2639 # compression utility
2641 2640
2642 2641 class nocompress(object):
2643 2642 def compress(self, x):
2644 2643 return x
2645 2644 def flush(self):
2646 2645 return ""
2647 2646
2648 2647 compressors = {
2649 2648 None: nocompress,
2650 2649 # lambda to prevent early import
2651 2650 'BZ': lambda: bz2.BZ2Compressor(),
2652 2651 'GZ': lambda: zlib.compressobj(),
2653 2652 }
2654 2653 # also support the old form by courtesies
2655 2654 compressors['UN'] = compressors[None]
2656 2655
2657 2656 def _makedecompressor(decompcls):
2658 2657 def generator(f):
2659 2658 d = decompcls()
2660 2659 for chunk in filechunkiter(f):
2661 2660 yield d.decompress(chunk)
2662 2661 def func(fh):
2663 2662 return chunkbuffer(generator(fh))
2664 2663 return func
2665 2664
2666 2665 class ctxmanager(object):
2667 2666 '''A context manager for use in 'with' blocks to allow multiple
2668 2667 contexts to be entered at once. This is both safer and more
2669 2668 flexible than contextlib.nested.
2670 2669
2671 2670 Once Mercurial supports Python 2.7+, this will become mostly
2672 2671 unnecessary.
2673 2672 '''
2674 2673
2675 2674 def __init__(self, *args):
2676 2675 '''Accepts a list of no-argument functions that return context
2677 2676 managers. These will be invoked at __call__ time.'''
2678 2677 self._pending = args
2679 2678 self._atexit = []
2680 2679
2681 2680 def __enter__(self):
2682 2681 return self
2683 2682
2684 2683 def enter(self):
2685 2684 '''Create and enter context managers in the order in which they were
2686 2685 passed to the constructor.'''
2687 2686 values = []
2688 2687 for func in self._pending:
2689 2688 obj = func()
2690 2689 values.append(obj.__enter__())
2691 2690 self._atexit.append(obj.__exit__)
2692 2691 del self._pending
2693 2692 return values
2694 2693
2695 2694 def atexit(self, func, *args, **kwargs):
2696 2695 '''Add a function to call when this context manager exits. The
2697 2696 ordering of multiple atexit calls is unspecified, save that
2698 2697 they will happen before any __exit__ functions.'''
2699 2698 def wrapper(exc_type, exc_val, exc_tb):
2700 2699 func(*args, **kwargs)
2701 2700 self._atexit.append(wrapper)
2702 2701 return func
2703 2702
2704 2703 def __exit__(self, exc_type, exc_val, exc_tb):
2705 2704 '''Context managers are exited in the reverse order from which
2706 2705 they were created.'''
2707 2706 received = exc_type is not None
2708 2707 suppressed = False
2709 2708 pending = None
2710 2709 self._atexit.reverse()
2711 2710 for exitfunc in self._atexit:
2712 2711 try:
2713 2712 if exitfunc(exc_type, exc_val, exc_tb):
2714 2713 suppressed = True
2715 2714 exc_type = None
2716 2715 exc_val = None
2717 2716 exc_tb = None
2718 2717 except BaseException:
2719 2718 pending = sys.exc_info()
2720 2719 exc_type, exc_val, exc_tb = pending = sys.exc_info()
2721 2720 del self._atexit
2722 2721 if pending:
2723 2722 raise exc_val
2724 2723 return received and suppressed
2725 2724
2726 2725 def _bz2():
2727 2726 d = bz2.BZ2Decompressor()
2728 2727 # Bzip2 stream start with BZ, but we stripped it.
2729 2728 # we put it back for good measure.
2730 2729 d.decompress('BZ')
2731 2730 return d
2732 2731
2733 2732 decompressors = {None: lambda fh: fh,
2734 2733 '_truncatedBZ': _makedecompressor(_bz2),
2735 2734 'BZ': _makedecompressor(lambda: bz2.BZ2Decompressor()),
2736 2735 'GZ': _makedecompressor(lambda: zlib.decompressobj()),
2737 2736 }
2738 2737 # also support the old form by courtesies
2739 2738 decompressors['UN'] = decompressors[None]
2740 2739
2741 2740 # convenient shortcut
2742 2741 dst = debugstacktrace
@@ -1,679 +1,692
1 1 commit date test
2 2
3 3 $ hg init test
4 4 $ cd test
5 5 $ echo foo > foo
6 6 $ hg add foo
7 7 $ cat > $TESTTMP/checkeditform.sh <<EOF
8 8 > env | grep HGEDITFORM
9 9 > true
10 10 > EOF
11 11 $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg commit -m ""
12 12 HGEDITFORM=commit.normal.normal
13 13 abort: empty commit message
14 14 [255]
15 15 $ hg commit -d '0 0' -m commit-1
16 16 $ echo foo >> foo
17 17 $ hg commit -d '1 4444444' -m commit-3
18 18 abort: impossible time zone offset: 4444444
19 19 [255]
20 20 $ hg commit -d '1 15.1' -m commit-4
21 21 abort: invalid date: '1\t15.1'
22 22 [255]
23 23 $ hg commit -d 'foo bar' -m commit-5
24 24 abort: invalid date: 'foo bar'
25 25 [255]
26 26 $ hg commit -d ' 1 4444' -m commit-6
27 27 $ hg commit -d '111111111111 0' -m commit-7
28 28 abort: date exceeds 32 bits: 111111111111
29 29 [255]
30 $ hg commit -d '-7654321 3600' -m commit-7
31 abort: negative date value: -7654321
30 $ hg commit -d '-111111111111 0' -m commit-7
31 abort: date exceeds 32 bits: -111111111111
32 [255]
33 $ echo foo >> foo
34 $ hg commit -d '1901-12-13 20:45:53 +0000' -m commit-7-2
35 $ echo foo >> foo
36 $ hg commit -d '-2147483647 0' -m commit-7-3
37 $ hg log -T '{rev} {date|isodatesec}\n' -l2
38 3 1901-12-13 20:45:53 +0000
39 2 1901-12-13 20:45:53 +0000
40 $ hg commit -d '1901-12-13 20:45:52 +0000' -m commit-7
41 abort: date exceeds 32 bits: -2147483648
42 [255]
43 $ hg commit -d '-2147483648 0' -m commit-7
44 abort: date exceeds 32 bits: -2147483648
32 45 [255]
33 46
34 47 commit added file that has been deleted
35 48
36 49 $ echo bar > bar
37 50 $ hg add bar
38 51 $ rm bar
39 52 $ hg commit -m commit-8
40 53 nothing changed (1 missing files, see 'hg status')
41 54 [1]
42 55 $ hg commit -m commit-8-2 bar
43 56 abort: bar: file not found!
44 57 [255]
45 58
46 59 $ hg -q revert -a --no-backup
47 60
48 61 $ mkdir dir
49 62 $ echo boo > dir/file
50 63 $ hg add
51 64 adding dir/file (glob)
52 65 $ hg -v commit -m commit-9 dir
53 66 committing files:
54 67 dir/file
55 68 committing manifest
56 69 committing changelog
57 committed changeset 2:d2a76177cb42
70 committed changeset 4:76aab26859d7
58 71
59 72 $ echo > dir.file
60 73 $ hg add
61 74 adding dir.file
62 75 $ hg commit -m commit-10 dir dir.file
63 76 abort: dir: no match under directory!
64 77 [255]
65 78
66 79 $ echo >> dir/file
67 80 $ mkdir bleh
68 81 $ mkdir dir2
69 82 $ cd bleh
70 83 $ hg commit -m commit-11 .
71 84 abort: bleh: no match under directory!
72 85 [255]
73 86 $ hg commit -m commit-12 ../dir ../dir2
74 87 abort: dir2: no match under directory!
75 88 [255]
76 89 $ hg -v commit -m commit-13 ../dir
77 90 committing files:
78 91 dir/file
79 92 committing manifest
80 93 committing changelog
81 committed changeset 3:1cd62a2d8db5
94 committed changeset 5:9a50557f1baf
82 95 $ cd ..
83 96
84 97 $ hg commit -m commit-14 does-not-exist
85 98 abort: does-not-exist: * (glob)
86 99 [255]
87 100
88 101 #if symlink
89 102 $ ln -s foo baz
90 103 $ hg commit -m commit-15 baz
91 104 abort: baz: file not tracked!
92 105 [255]
93 106 #endif
94 107
95 108 $ touch quux
96 109 $ hg commit -m commit-16 quux
97 110 abort: quux: file not tracked!
98 111 [255]
99 112 $ echo >> dir/file
100 113 $ hg -v commit -m commit-17 dir/file
101 114 committing files:
102 115 dir/file
103 116 committing manifest
104 117 committing changelog
105 committed changeset 4:49176991390e
118 committed changeset 6:4b4c75bf422d
106 119
107 120 An empty date was interpreted as epoch origin
108 121
109 122 $ echo foo >> foo
110 123 $ hg commit -d '' -m commit-no-date
111 124 $ hg tip --template '{date|isodate}\n' | grep '1970'
112 125 [1]
113 126
114 127 Make sure we do not obscure unknown requires file entries (issue2649)
115 128
116 129 $ echo foo >> foo
117 130 $ echo fake >> .hg/requires
118 131 $ hg commit -m bla
119 132 abort: repository requires features unknown to this Mercurial: fake!
120 133 (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
121 134 [255]
122 135
123 136 $ cd ..
124 137
125 138
126 139 partial subdir commit test
127 140
128 141 $ hg init test2
129 142 $ cd test2
130 143 $ mkdir foo
131 144 $ echo foo > foo/foo
132 145 $ mkdir bar
133 146 $ echo bar > bar/bar
134 147 $ hg add
135 148 adding bar/bar (glob)
136 149 adding foo/foo (glob)
137 150 $ HGEDITOR=cat hg ci -e -m commit-subdir-1 foo
138 151 commit-subdir-1
139 152
140 153
141 154 HG: Enter commit message. Lines beginning with 'HG:' are removed.
142 155 HG: Leave message empty to abort commit.
143 156 HG: --
144 157 HG: user: test
145 158 HG: branch 'default'
146 159 HG: added foo/foo
147 160
148 161
149 162 $ hg ci -m commit-subdir-2 bar
150 163
151 164 subdir log 1
152 165
153 166 $ hg log -v foo
154 167 changeset: 0:f97e73a25882
155 168 user: test
156 169 date: Thu Jan 01 00:00:00 1970 +0000
157 170 files: foo/foo
158 171 description:
159 172 commit-subdir-1
160 173
161 174
162 175
163 176 subdir log 2
164 177
165 178 $ hg log -v bar
166 179 changeset: 1:aa809156d50d
167 180 tag: tip
168 181 user: test
169 182 date: Thu Jan 01 00:00:00 1970 +0000
170 183 files: bar/bar
171 184 description:
172 185 commit-subdir-2
173 186
174 187
175 188
176 189 full log
177 190
178 191 $ hg log -v
179 192 changeset: 1:aa809156d50d
180 193 tag: tip
181 194 user: test
182 195 date: Thu Jan 01 00:00:00 1970 +0000
183 196 files: bar/bar
184 197 description:
185 198 commit-subdir-2
186 199
187 200
188 201 changeset: 0:f97e73a25882
189 202 user: test
190 203 date: Thu Jan 01 00:00:00 1970 +0000
191 204 files: foo/foo
192 205 description:
193 206 commit-subdir-1
194 207
195 208
196 209 $ cd ..
197 210
198 211
199 212 dot and subdir commit test
200 213
201 214 $ hg init test3
202 215 $ echo commit-foo-subdir > commit-log-test
203 216 $ cd test3
204 217 $ mkdir foo
205 218 $ echo foo content > foo/plain-file
206 219 $ hg add foo/plain-file
207 220 $ HGEDITOR=cat hg ci --edit -l ../commit-log-test foo
208 221 commit-foo-subdir
209 222
210 223
211 224 HG: Enter commit message. Lines beginning with 'HG:' are removed.
212 225 HG: Leave message empty to abort commit.
213 226 HG: --
214 227 HG: user: test
215 228 HG: branch 'default'
216 229 HG: added foo/plain-file
217 230
218 231
219 232 $ echo modified foo content > foo/plain-file
220 233 $ hg ci -m commit-foo-dot .
221 234
222 235 full log
223 236
224 237 $ hg log -v
225 238 changeset: 1:95b38e3a5b2e
226 239 tag: tip
227 240 user: test
228 241 date: Thu Jan 01 00:00:00 1970 +0000
229 242 files: foo/plain-file
230 243 description:
231 244 commit-foo-dot
232 245
233 246
234 247 changeset: 0:65d4e9386227
235 248 user: test
236 249 date: Thu Jan 01 00:00:00 1970 +0000
237 250 files: foo/plain-file
238 251 description:
239 252 commit-foo-subdir
240 253
241 254
242 255
243 256 subdir log
244 257
245 258 $ cd foo
246 259 $ hg log .
247 260 changeset: 1:95b38e3a5b2e
248 261 tag: tip
249 262 user: test
250 263 date: Thu Jan 01 00:00:00 1970 +0000
251 264 summary: commit-foo-dot
252 265
253 266 changeset: 0:65d4e9386227
254 267 user: test
255 268 date: Thu Jan 01 00:00:00 1970 +0000
256 269 summary: commit-foo-subdir
257 270
258 271 $ cd ..
259 272 $ cd ..
260 273
261 274 Issue1049: Hg permits partial commit of merge without warning
262 275
263 276 $ hg init issue1049
264 277 $ cd issue1049
265 278 $ echo a > a
266 279 $ hg ci -Ama
267 280 adding a
268 281 $ echo a >> a
269 282 $ hg ci -mb
270 283 $ hg up 0
271 284 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
272 285 $ echo b >> a
273 286 $ hg ci -mc
274 287 created new head
275 288 $ HGMERGE=true hg merge
276 289 merging a
277 290 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
278 291 (branch merge, don't forget to commit)
279 292
280 293 should fail because we are specifying a file name
281 294
282 295 $ hg ci -mmerge a
283 296 abort: cannot partially commit a merge (do not specify files or patterns)
284 297 [255]
285 298
286 299 should fail because we are specifying a pattern
287 300
288 301 $ hg ci -mmerge -I a
289 302 abort: cannot partially commit a merge (do not specify files or patterns)
290 303 [255]
291 304
292 305 should succeed
293 306
294 307 $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg ci -mmerge --edit
295 308 HGEDITFORM=commit.normal.merge
296 309 $ cd ..
297 310
298 311
299 312 test commit message content
300 313
301 314 $ hg init commitmsg
302 315 $ cd commitmsg
303 316 $ echo changed > changed
304 317 $ echo removed > removed
305 318 $ hg book activebookmark
306 319 $ hg ci -qAm init
307 320
308 321 $ hg rm removed
309 322 $ echo changed >> changed
310 323 $ echo added > added
311 324 $ hg add added
312 325 $ HGEDITOR=cat hg ci -A
313 326
314 327
315 328 HG: Enter commit message. Lines beginning with 'HG:' are removed.
316 329 HG: Leave message empty to abort commit.
317 330 HG: --
318 331 HG: user: test
319 332 HG: branch 'default'
320 333 HG: bookmark 'activebookmark'
321 334 HG: added added
322 335 HG: changed changed
323 336 HG: removed removed
324 337 abort: empty commit message
325 338 [255]
326 339
327 340 test saving last-message.txt
328 341
329 342 $ hg init sub
330 343 $ echo a > sub/a
331 344 $ hg -R sub add sub/a
332 345 $ cat > sub/.hg/hgrc <<EOF
333 346 > [hooks]
334 347 > precommit.test-saving-last-message = false
335 348 > EOF
336 349
337 350 $ echo 'sub = sub' > .hgsub
338 351 $ hg add .hgsub
339 352
340 353 $ cat > $TESTTMP/editor.sh <<EOF
341 354 > echo "==== before editing:"
342 355 > cat \$1
343 356 > echo "===="
344 357 > echo "test saving last-message.txt" >> \$1
345 358 > EOF
346 359
347 360 $ rm -f .hg/last-message.txt
348 361 $ HGEDITOR="sh $TESTTMP/editor.sh" hg commit -S -q
349 362 ==== before editing:
350 363
351 364
352 365 HG: Enter commit message. Lines beginning with 'HG:' are removed.
353 366 HG: Leave message empty to abort commit.
354 367 HG: --
355 368 HG: user: test
356 369 HG: branch 'default'
357 370 HG: bookmark 'activebookmark'
358 371 HG: subrepo sub
359 372 HG: added .hgsub
360 373 HG: added added
361 374 HG: changed .hgsubstate
362 375 HG: changed changed
363 376 HG: removed removed
364 377 ====
365 378 abort: precommit.test-saving-last-message hook exited with status 1 (in subrepo sub)
366 379 [255]
367 380 $ cat .hg/last-message.txt
368 381
369 382
370 383 test saving last-message.txt
371 384
372 385 test that '[committemplate] changeset' definition and commit log
373 386 specific template keywords work well
374 387
375 388 $ cat >> .hg/hgrc <<EOF
376 389 > [committemplate]
377 390 > changeset.commit.normal = HG: this is "commit.normal" template
378 391 > HG: {extramsg}
379 392 > {if(activebookmark,
380 393 > "HG: bookmark '{activebookmark}' is activated\n",
381 394 > "HG: no bookmark is activated\n")}{subrepos %
382 395 > "HG: subrepo '{subrepo}' is changed\n"}
383 396 >
384 397 > changeset.commit = HG: this is "commit" template
385 398 > HG: {extramsg}
386 399 > {if(activebookmark,
387 400 > "HG: bookmark '{activebookmark}' is activated\n",
388 401 > "HG: no bookmark is activated\n")}{subrepos %
389 402 > "HG: subrepo '{subrepo}' is changed\n"}
390 403 >
391 404 > changeset = HG: this is customized commit template
392 405 > HG: {extramsg}
393 406 > {if(activebookmark,
394 407 > "HG: bookmark '{activebookmark}' is activated\n",
395 408 > "HG: no bookmark is activated\n")}{subrepos %
396 409 > "HG: subrepo '{subrepo}' is changed\n"}
397 410 > EOF
398 411
399 412 $ hg init sub2
400 413 $ echo a > sub2/a
401 414 $ hg -R sub2 add sub2/a
402 415 $ echo 'sub2 = sub2' >> .hgsub
403 416
404 417 $ HGEDITOR=cat hg commit -S -q
405 418 HG: this is "commit.normal" template
406 419 HG: Leave message empty to abort commit.
407 420 HG: bookmark 'activebookmark' is activated
408 421 HG: subrepo 'sub' is changed
409 422 HG: subrepo 'sub2' is changed
410 423 abort: empty commit message
411 424 [255]
412 425
413 426 $ cat >> .hg/hgrc <<EOF
414 427 > [committemplate]
415 428 > changeset.commit.normal =
416 429 > # now, "changeset.commit" should be chosen for "hg commit"
417 430 > EOF
418 431
419 432 $ hg bookmark --inactive activebookmark
420 433 $ hg forget .hgsub
421 434 $ HGEDITOR=cat hg commit -q
422 435 HG: this is "commit" template
423 436 HG: Leave message empty to abort commit.
424 437 HG: no bookmark is activated
425 438 abort: empty commit message
426 439 [255]
427 440
428 441 $ cat >> .hg/hgrc <<EOF
429 442 > [committemplate]
430 443 > changeset.commit =
431 444 > # now, "changeset" should be chosen for "hg commit"
432 445 > EOF
433 446
434 447 $ HGEDITOR=cat hg commit -q
435 448 HG: this is customized commit template
436 449 HG: Leave message empty to abort commit.
437 450 HG: no bookmark is activated
438 451 abort: empty commit message
439 452 [255]
440 453
441 454 $ cat >> .hg/hgrc <<EOF
442 455 > [committemplate]
443 456 > changeset = {desc}
444 457 > HG: mods={file_mods}
445 458 > HG: adds={file_adds}
446 459 > HG: dels={file_dels}
447 460 > HG: files={files}
448 461 > HG:
449 462 > {splitlines(diff()) % 'HG: {line}\n'
450 463 > }HG:
451 464 > HG: mods={file_mods}
452 465 > HG: adds={file_adds}
453 466 > HG: dels={file_dels}
454 467 > HG: files={files}\n
455 468 > EOF
456 469 $ hg status -amr
457 470 M changed
458 471 A added
459 472 R removed
460 473 $ HGEDITOR=cat hg commit -q -e -m "foo bar" changed
461 474 foo bar
462 475 HG: mods=changed
463 476 HG: adds=
464 477 HG: dels=
465 478 HG: files=changed
466 479 HG:
467 480 HG: --- a/changed Thu Jan 01 00:00:00 1970 +0000
468 481 HG: +++ b/changed Thu Jan 01 00:00:00 1970 +0000
469 482 HG: @@ -1,1 +1,2 @@
470 483 HG: changed
471 484 HG: +changed
472 485 HG:
473 486 HG: mods=changed
474 487 HG: adds=
475 488 HG: dels=
476 489 HG: files=changed
477 490 $ hg status -amr
478 491 A added
479 492 R removed
480 493 $ hg parents --template "M {file_mods}\nA {file_adds}\nR {file_dels}\n"
481 494 M changed
482 495 A
483 496 R
484 497 $ hg rollback -q
485 498
486 499 $ cat >> .hg/hgrc <<EOF
487 500 > [committemplate]
488 501 > changeset = {desc}
489 502 > HG: mods={file_mods}
490 503 > HG: adds={file_adds}
491 504 > HG: dels={file_dels}
492 505 > HG: files={files}
493 506 > HG:
494 507 > {splitlines(diff("changed")) % 'HG: {line}\n'
495 508 > }HG:
496 509 > HG: mods={file_mods}
497 510 > HG: adds={file_adds}
498 511 > HG: dels={file_dels}
499 512 > HG: files={files}
500 513 > HG:
501 514 > {splitlines(diff("added")) % 'HG: {line}\n'
502 515 > }HG:
503 516 > HG: mods={file_mods}
504 517 > HG: adds={file_adds}
505 518 > HG: dels={file_dels}
506 519 > HG: files={files}
507 520 > HG:
508 521 > {splitlines(diff("removed")) % 'HG: {line}\n'
509 522 > }HG:
510 523 > HG: mods={file_mods}
511 524 > HG: adds={file_adds}
512 525 > HG: dels={file_dels}
513 526 > HG: files={files}\n
514 527 > EOF
515 528 $ HGEDITOR=cat hg commit -q -e -m "foo bar" added removed
516 529 foo bar
517 530 HG: mods=
518 531 HG: adds=added
519 532 HG: dels=removed
520 533 HG: files=added removed
521 534 HG:
522 535 HG:
523 536 HG: mods=
524 537 HG: adds=added
525 538 HG: dels=removed
526 539 HG: files=added removed
527 540 HG:
528 541 HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000
529 542 HG: +++ b/added Thu Jan 01 00:00:00 1970 +0000
530 543 HG: @@ -0,0 +1,1 @@
531 544 HG: +added
532 545 HG:
533 546 HG: mods=
534 547 HG: adds=added
535 548 HG: dels=removed
536 549 HG: files=added removed
537 550 HG:
538 551 HG: --- a/removed Thu Jan 01 00:00:00 1970 +0000
539 552 HG: +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
540 553 HG: @@ -1,1 +0,0 @@
541 554 HG: -removed
542 555 HG:
543 556 HG: mods=
544 557 HG: adds=added
545 558 HG: dels=removed
546 559 HG: files=added removed
547 560 $ hg status -amr
548 561 M changed
549 562 $ hg parents --template "M {file_mods}\nA {file_adds}\nR {file_dels}\n"
550 563 M
551 564 A added
552 565 R removed
553 566 $ hg rollback -q
554 567
555 568 $ cat >> .hg/hgrc <<EOF
556 569 > # disable customizing for subsequent tests
557 570 > [committemplate]
558 571 > changeset =
559 572 > EOF
560 573
561 574 $ cd ..
562 575
563 576
564 577 commit copy
565 578
566 579 $ hg init dir2
567 580 $ cd dir2
568 581 $ echo bleh > bar
569 582 $ hg add bar
570 583 $ hg ci -m 'add bar'
571 584
572 585 $ hg cp bar foo
573 586 $ echo >> bar
574 587 $ hg ci -m 'cp bar foo; change bar'
575 588
576 589 $ hg debugrename foo
577 590 foo renamed from bar:26d3ca0dfd18e44d796b564e38dd173c9668d3a9
578 591 $ hg debugindex bar
579 592 rev offset length ..... linkrev nodeid p1 p2 (re)
580 593 0 0 6 ..... 0 26d3ca0dfd18 000000000000 000000000000 (re)
581 594 1 6 7 ..... 1 d267bddd54f7 26d3ca0dfd18 000000000000 (re)
582 595
583 596 Test making empty commits
584 597 $ hg commit --config ui.allowemptycommit=True -m "empty commit"
585 598 $ hg log -r . -v --stat
586 599 changeset: 2:d809f3644287
587 600 tag: tip
588 601 user: test
589 602 date: Thu Jan 01 00:00:00 1970 +0000
590 603 description:
591 604 empty commit
592 605
593 606
594 607
595 608 verify pathauditor blocks evil filepaths
596 609 $ cat > evil-commit.py <<EOF
597 610 > from mercurial import ui, hg, context, node
598 611 > notrc = u".h\u200cg".encode('utf-8') + '/hgrc'
599 612 > u = ui.ui()
600 613 > r = hg.repository(u, '.')
601 614 > def filectxfn(repo, memctx, path):
602 615 > return context.memfilectx(repo, path, '[hooks]\nupdate = echo owned')
603 616 > c = context.memctx(r, [r['tip'].node(), node.nullid],
604 617 > 'evil', [notrc], filectxfn, 0)
605 618 > r.commitctx(c)
606 619 > EOF
607 620 $ $PYTHON evil-commit.py
608 621 #if windows
609 622 $ hg co --clean tip
610 623 abort: path contains illegal component: .h\xe2\x80\x8cg\\hgrc (esc)
611 624 [255]
612 625 #else
613 626 $ hg co --clean tip
614 627 abort: path contains illegal component: .h\xe2\x80\x8cg/hgrc (esc)
615 628 [255]
616 629 #endif
617 630
618 631 $ hg rollback -f
619 632 repository tip rolled back to revision 2 (undo commit)
620 633 $ cat > evil-commit.py <<EOF
621 634 > from mercurial import ui, hg, context, node
622 635 > notrc = "HG~1/hgrc"
623 636 > u = ui.ui()
624 637 > r = hg.repository(u, '.')
625 638 > def filectxfn(repo, memctx, path):
626 639 > return context.memfilectx(repo, path, '[hooks]\nupdate = echo owned')
627 640 > c = context.memctx(r, [r['tip'].node(), node.nullid],
628 641 > 'evil', [notrc], filectxfn, 0)
629 642 > r.commitctx(c)
630 643 > EOF
631 644 $ $PYTHON evil-commit.py
632 645 $ hg co --clean tip
633 646 abort: path contains illegal component: HG~1/hgrc (glob)
634 647 [255]
635 648
636 649 $ hg rollback -f
637 650 repository tip rolled back to revision 2 (undo commit)
638 651 $ cat > evil-commit.py <<EOF
639 652 > from mercurial import ui, hg, context, node
640 653 > notrc = "HG8B6C~2/hgrc"
641 654 > u = ui.ui()
642 655 > r = hg.repository(u, '.')
643 656 > def filectxfn(repo, memctx, path):
644 657 > return context.memfilectx(repo, path, '[hooks]\nupdate = echo owned')
645 658 > c = context.memctx(r, [r['tip'].node(), node.nullid],
646 659 > 'evil', [notrc], filectxfn, 0)
647 660 > r.commitctx(c)
648 661 > EOF
649 662 $ $PYTHON evil-commit.py
650 663 $ hg co --clean tip
651 664 abort: path contains illegal component: HG8B6C~2/hgrc (glob)
652 665 [255]
653 666
654 667 # test that an unmodified commit template message aborts
655 668
656 669 $ hg init unmodified_commit_template
657 670 $ cd unmodified_commit_template
658 671 $ echo foo > foo
659 672 $ hg add foo
660 673 $ hg commit -m "foo"
661 674 $ cat >> .hg/hgrc <<EOF
662 675 > [committemplate]
663 676 > changeset.commit = HI THIS IS NOT STRIPPED
664 677 > HG: this is customized commit template
665 678 > HG: {extramsg}
666 679 > {if(activebookmark,
667 680 > "HG: bookmark '{activebookmark}' is activated\n",
668 681 > "HG: no bookmark is activated\n")}{subrepos %
669 682 > "HG: subrepo '{subrepo}' is changed\n"}
670 683 > EOF
671 684 $ cat > $TESTTMP/notouching.sh <<EOF
672 685 > true
673 686 > EOF
674 687 $ echo foo2 > foo2
675 688 $ hg add foo2
676 689 $ HGEDITOR="sh $TESTTMP/notouching.sh" hg commit
677 690 abort: commit message unchanged
678 691 [255]
679 692 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now