##// END OF EJS Templates
revset: inline isvalidsymbol() and getsymbol() into _parsealiasdecl()...
revset: inline isvalidsymbol() and getsymbol() into _parsealiasdecl() Since I'm going to extract a common alias parser, I want to eliminate dependencies to the revset parsing rules. These functions are trivial, so we can go without them.

File last commit:

r28497:906fece8 default
r28706:b33ca687 default
Show More
util.py
2735 lines | 84.6 KiB | text/x-python | PythonLexer
timeless@mozdev.org
spelling: specific
r17515 # util.py - Mercurial utility functions and platform specific implementations
Martin Geisler
put license and copyright info into comment blocks
r8226 #
# Copyright 2005 K. Thananchayan <thananck@yahoo.com>
# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
#
# This software may be used and distributed according to the terms of the
Matt Mackall
Update license to GPLv2+
r10263 # GNU General Public License version 2 or any later version.
mpm@selenic.com
Update util.py docstrings, fix walk test
r1082
timeless@mozdev.org
spelling: specific
r17515 """Mercurial utility functions and platform specific implementations.
mpm@selenic.com
Update util.py docstrings, fix walk test
r1082
Martin Geisler
turn some comments back into module docstrings
r8227 This contains helper routines that are independent of the SCM core and
hide platform-specific details from the core.
mpm@selenic.com
Update util.py docstrings, fix walk test
r1082 """
mpm@selenic.com
[PATCH] file seperator handling for the other 'OS'...
r419
Gregory Szorc
util: use absolute_import
r27358 from __future__ import absolute_import
import bz2
import calendar
import collections
import datetime
import errno
import gc
import hashlib
import imp
import os
Siddharth Agarwal
util: rename 're' to 'remod'...
r21907 import re as remod
Gregory Szorc
util: use absolute_import
r27358 import shutil
import signal
import socket
import subprocess
import sys
import tempfile
import textwrap
import time
import traceback
import urllib
Pierre-Yves David
changegroup: move all compressions utilities in util...
r26266 import zlib
Matt Mackall
Add encoding detection
r3769
Gregory Szorc
util: use absolute_import
r27358 from . import (
encoding,
error,
i18n,
osutil,
parsers,
)
Matt Mackall
Add encoding detection
r3769
Adrian Buehlmann
util: move windows and posix wildcard imports to begin of file
r14912 if os.name == 'nt':
Gregory Szorc
util: use absolute_import
r27358 from . import windows as platform
Adrian Buehlmann
util: move windows and posix wildcard imports to begin of file
r14912 else:
Gregory Szorc
util: use absolute_import
r27358 from . import posix as platform
Adrian Buehlmann
util: eliminate wildcard imports
r14926
Gregory Szorc
util: make hashlib import unconditional...
r27357 md5 = hashlib.md5
sha1 = hashlib.sha1
sha512 = hashlib.sha512
Gregory Szorc
util: use absolute_import
r27358 _ = i18n._
Adrian Buehlmann
util: eliminate wildcard imports
r14926
Idan Kamara
posix, windows: introduce cachestat...
r14927 cachestat = platform.cachestat
Adrian Buehlmann
util: eliminate wildcard imports
r14926 checkexec = platform.checkexec
checklink = platform.checklink
Adrian Buehlmann
util: move copymode into posix.py and windows.py...
r15011 copymode = platform.copymode
Adrian Buehlmann
util: eliminate wildcard imports
r14926 executablepath = platform.executablepath
expandglobs = platform.expandglobs
explainexit = platform.explainexit
findexe = platform.findexe
gethgcmd = platform.gethgcmd
getuser = platform.getuser
timeless
util: enable getpid to be replaced...
r28027 getpid = os.getpid
Adrian Buehlmann
util: eliminate wildcard imports
r14926 groupmembers = platform.groupmembers
groupname = platform.groupname
hidewindow = platform.hidewindow
isexec = platform.isexec
isowner = platform.isowner
localpath = platform.localpath
lookupreg = platform.lookupreg
makedir = platform.makedir
nlinks = platform.nlinks
normpath = platform.normpath
Matt Mackall
dirstate: fix case-folding identity for traditional Unix...
r15488 normcase = platform.normcase
Siddharth Agarwal
util: add normcase spec and fallback...
r24605 normcasespec = platform.normcasespec
normcasefallback = platform.normcasefallback
Adrian Buehlmann
util: eliminate wildcard imports
r14926 openhardlinks = platform.openhardlinks
oslink = platform.oslink
parsepatchoutput = platform.parsepatchoutput
pconvert = platform.pconvert
Pierre-Yves David
util: add a simple poll utility...
r25420 poll = platform.poll
Adrian Buehlmann
util: eliminate wildcard imports
r14926 popen = platform.popen
posixfile = platform.posixfile
quotecommand = platform.quotecommand
Gregory Szorc
platform: implement readpipe()...
r22245 readpipe = platform.readpipe
Adrian Buehlmann
util: eliminate wildcard imports
r14926 rename = platform.rename
FUJIWARA Katsunori
util: add removedirs as platform depending function...
r24692 removedirs = platform.removedirs
Adrian Buehlmann
util: eliminate wildcard imports
r14926 samedevice = platform.samedevice
samefile = platform.samefile
samestat = platform.samestat
setbinary = platform.setbinary
setflags = platform.setflags
setsignalhandler = platform.setsignalhandler
shellquote = platform.shellquote
spawndetached = platform.spawndetached
Bryan O'Sullivan
util: implement a faster os.path.split for posix systems...
r17560 split = platform.split
Adrian Buehlmann
util: eliminate wildcard imports
r14926 sshargs = platform.sshargs
Bryan O'Sullivan
osutil: write a C implementation of statfiles for unix...
r18026 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
Bryan O'Sullivan
util: add functions to check symlink/exec bits...
r18868 statisexec = platform.statisexec
statislink = platform.statislink
Adrian Buehlmann
util: eliminate wildcard imports
r14926 termwidth = platform.termwidth
testpid = platform.testpid
umask = platform.umask
unlink = platform.unlink
unlinkpath = platform.unlinkpath
username = platform.username
Adrian Buehlmann
util: move windows and posix wildcard imports to begin of file
r14912
Dirkjan Ochtman
python 2.6 compatibility: compatibility wrappers for hash functions
r6470 # Python compatibility
Matt Mackall
Add encoding detection
r3769
Matt Mackall
util: clean up function ordering
r15656 _notset = object()
Matt Mackall
util: disable floating point stat times (issue4836)...
r27015 # disable Python's problematic floating point timestamps (issue4836)
# (Python hypocritically says you shouldn't change this behavior in
# libraries, and sure enough Mercurial is not a library.)
os.stat_float_times(False)
Matt Mackall
util: clean up function ordering
r15656 def safehasattr(thing, attr):
return getattr(thing, attr, _notset) is not _notset
Mike Hommey
util: add a helper class to compute digests...
r22962 DIGESTS = {
'md5': md5,
'sha1': sha1,
Gregory Szorc
util: make hashlib import unconditional...
r27357 'sha512': sha512,
Mike Hommey
util: add a helper class to compute digests...
r22962 }
# List of digest types from strongest to weakest
Gregory Szorc
util: make hashlib import unconditional...
r27357 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
Mike Hommey
util: add a helper class to compute digests...
r22962
for k in DIGESTS_BY_STRENGTH:
assert k in DIGESTS
class digester(object):
"""helper to compute digests.
This helper can be used to compute one or more digests given their name.
>>> d = digester(['md5', 'sha1'])
>>> d.update('foo')
>>> [k for k in sorted(d)]
['md5', 'sha1']
>>> d['md5']
'acbd18db4cc2f85cedef654fccc4a4d8'
>>> d['sha1']
'0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
>>> digester.preferred(['md5', 'sha1'])
'sha1'
"""
def __init__(self, digests, s=''):
self._hashes = {}
for k in digests:
if k not in DIGESTS:
raise Abort(_('unknown digest type: %s') % k)
self._hashes[k] = DIGESTS[k]()
if s:
self.update(s)
def update(self, data):
for h in self._hashes.values():
h.update(data)
def __getitem__(self, key):
if key not in DIGESTS:
raise Abort(_('unknown digest type: %s') % k)
return self._hashes[key].hexdigest()
def __iter__(self):
return iter(self._hashes)
@staticmethod
def preferred(supported):
"""returns the strongest digest type in both supported and DIGESTS."""
for k in DIGESTS_BY_STRENGTH:
if k in supported:
return k
return None
Mike Hommey
util: add a file handle wrapper class that does hash digest validation...
r22963 class digestchecker(object):
"""file handle wrapper that additionally checks content against a given
size and digests.
d = digestchecker(fh, size, {'md5': '...'})
When multiple digests are given, all of them are validated.
"""
def __init__(self, fh, size, digests):
self._fh = fh
self._size = size
self._got = 0
self._digests = dict(digests)
self._digester = digester(self._digests.keys())
def read(self, length=-1):
content = self._fh.read(length)
self._digester.update(content)
self._got += len(content)
return content
def validate(self):
if self._size != self._got:
raise Abort(_('size mismatch: expected %d, got %d') %
(self._size, self._got))
for k, v in self._digests.items():
if v != self._digester[k]:
Wagner Bruna
i18n: add hint to digest mismatch message
r23076 # i18n: first parameter is a digest name
Mike Hommey
util: add a file handle wrapper class that does hash digest validation...
r22963 raise Abort(_('%s mismatch: expected %s, got %s') %
(k, v, self._digester[k]))
Renato Cunha
util: improved the check for the existence of the 'buffer' builtin...
r11565 try:
Matt Mackall
util: don't mess with builtins to emulate buffer()
r15657 buffer = buffer
Renato Cunha
util: improved the check for the existence of the 'buffer' builtin...
r11565 except NameError:
Matt Mackall
util: don't mess with builtins to emulate buffer()
r15657 if sys.version_info[0] < 3:
def buffer(sliceable, offset=0):
return sliceable[offset:]
else:
def buffer(sliceable, offset=0):
return memoryview(sliceable)[offset:]
Ronny Pfannschmidt
util: fake the builtin buffer if it's missing (jython)
r10756
Martin Geisler
util: always use subprocess
r8280 closefds = os.name == 'posix'
Patrick Mezard
subrepo: normalize svn output line-endings
r10197
Pierre-Yves David
util: introduce a bufferedinputpipe utility...
r25406 _chunksize = 4096
class bufferedinputpipe(object):
"""a manually buffered input pipe
Python will not let us use buffered IO and lazy reading with 'polling' at
the same time. We cannot probe the buffer state and select will not detect
that data are ready to read if they are already buffered.
This class let us work around that by implementing its own buffering
(allowing efficient readline) while offering a way to know if the buffer is
empty from the output (allowing collaboration of the buffer with polling).
This class lives in the 'util' module because it makes use of the 'os'
module from the python stdlib.
"""
def __init__(self, input):
self._input = input
self._buffer = []
self._eof = False
Pierre-Yves David
bufferedinputpipe: remove N^2 computation of buffer length (issue4735)...
r25672 self._lenbuf = 0
Pierre-Yves David
util: introduce a bufferedinputpipe utility...
r25406
@property
def hasbuffer(self):
"""True is any data is currently buffered
This will be used externally a pre-step for polling IO. If there is
already data then no polling should be set in place."""
return bool(self._buffer)
@property
def closed(self):
return self._input.closed
def fileno(self):
return self._input.fileno()
def close(self):
return self._input.close()
def read(self, size):
while (not self._eof) and (self._lenbuf < size):
self._fillbuffer()
return self._frombuffer(size)
def readline(self, *args, **kwargs):
if 1 < len(self._buffer):
# this should not happen because both read and readline end with a
# _frombuffer call that collapse it.
self._buffer = [''.join(self._buffer)]
Pierre-Yves David
bufferedinputpipe: remove N^2 computation of buffer length (issue4735)...
r25672 self._lenbuf = len(self._buffer[0])
Pierre-Yves David
util: introduce a bufferedinputpipe utility...
r25406 lfi = -1
if self._buffer:
lfi = self._buffer[-1].find('\n')
while (not self._eof) and lfi < 0:
self._fillbuffer()
if self._buffer:
lfi = self._buffer[-1].find('\n')
size = lfi + 1
if lfi < 0: # end of file
size = self._lenbuf
elif 1 < len(self._buffer):
# we need to take previous chunks into account
size += self._lenbuf - len(self._buffer[-1])
return self._frombuffer(size)
def _frombuffer(self, size):
"""return at most 'size' data from the buffer
The data are removed from the buffer."""
if size == 0 or not self._buffer:
return ''
buf = self._buffer[0]
if 1 < len(self._buffer):
buf = ''.join(self._buffer)
data = buf[:size]
buf = buf[len(data):]
if buf:
self._buffer = [buf]
Pierre-Yves David
bufferedinputpipe: remove N^2 computation of buffer length (issue4735)...
r25672 self._lenbuf = len(buf)
Pierre-Yves David
util: introduce a bufferedinputpipe utility...
r25406 else:
self._buffer = []
Pierre-Yves David
bufferedinputpipe: remove N^2 computation of buffer length (issue4735)...
r25672 self._lenbuf = 0
Pierre-Yves David
util: introduce a bufferedinputpipe utility...
r25406 return data
def _fillbuffer(self):
"""read data to the buffer"""
data = os.read(self._input.fileno(), _chunksize)
if not data:
self._eof = True
else:
Pierre-Yves David
bufferedinputpipe: remove N^2 computation of buffer length (issue4735)...
r25672 self._lenbuf += len(data)
Pierre-Yves David
util: introduce a bufferedinputpipe utility...
r25406 self._buffer.append(data)
Patrick Mezard
subrepo: force en_US.UTF-8 locale when calling svn...
r10199 def popen2(cmd, env=None, newlines=False):
Martin Geisler
util: remove unused bufsize argument...
r9089 # Setting bufsize to -1 lets the system decide the buffer size.
# The default for bufsize is 0, meaning unbuffered. This leads to
# poor performance on Mac OS X: http://bugs.python.org/issue4194
p = subprocess.Popen(cmd, shell=True, bufsize=-1,
Bryan O'Sullivan
Backed out changeset fce065538bcf: it caused a 5x performance regression on OS X
r9083 close_fds=closefds,
Patrick Mezard
subrepo: normalize svn output line-endings
r10197 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
Patrick Mezard
subrepo: force en_US.UTF-8 locale when calling svn...
r10199 universal_newlines=newlines,
env=env)
Martin Geisler
util: always use subprocess
r8280 return p.stdin, p.stdout
Patrick Mezard
subrepo: normalize svn output line-endings
r10197
Patrick Mezard
subrepo: force en_US.UTF-8 locale when calling svn...
r10199 def popen3(cmd, env=None, newlines=False):
Durham Goode
sshpeer: store subprocess so it cleans up correctly...
r18759 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
return stdin, stdout, stderr
Pierre-Yves David
util: allow to specify buffer size in popen4...
r25245 def popen4(cmd, env=None, newlines=False, bufsize=-1):
p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
Bryan O'Sullivan
Backed out changeset fce065538bcf: it caused a 5x performance regression on OS X
r9083 close_fds=closefds,
Martin Geisler
util: always use subprocess
r8280 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
Patrick Mezard
subrepo: normalize svn output line-endings
r10197 stderr=subprocess.PIPE,
Patrick Mezard
subrepo: force en_US.UTF-8 locale when calling svn...
r10199 universal_newlines=newlines,
env=env)
Durham Goode
sshpeer: store subprocess so it cleans up correctly...
r18759 return p.stdin, p.stdout, p.stderr, p
Dirkjan Ochtman
python2.6: use subprocess if available
r7106
Matt Mackall
refactor version code...
r7632 def version():
"""Return version information if available."""
try:
Gregory Szorc
util: use absolute_import
r27358 from . import __version__
Matt Mackall
refactor version code...
r7632 return __version__.version
except ImportError:
return 'unknown'
Gregory Szorc
util: add versiontuple() for returning parsed version information...
r27112 def versiontuple(v=None, n=4):
"""Parses a Mercurial version string into an N-tuple.
The version string to be parsed is specified with the ``v`` argument.
If it isn't defined, the current Mercurial version string will be parsed.
``n`` can be 2, 3, or 4. Here is how some version strings map to
returned values:
>>> v = '3.6.1+190-df9b73d2d444'
>>> versiontuple(v, 2)
(3, 6)
>>> versiontuple(v, 3)
(3, 6, 1)
>>> versiontuple(v, 4)
(3, 6, 1, '190-df9b73d2d444')
>>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
(3, 6, 1, '190-df9b73d2d444+20151118')
>>> v = '3.6'
>>> versiontuple(v, 2)
(3, 6)
>>> versiontuple(v, 3)
(3, 6, None)
>>> versiontuple(v, 4)
(3, 6, None, None)
"""
if not v:
v = version()
parts = v.split('+', 1)
if len(parts) == 1:
vparts, extra = parts[0], None
else:
vparts, extra = parts
vints = []
for i in vparts.split('.'):
try:
vints.append(int(i))
except ValueError:
break
# (3, 6) -> (3, 6, None)
while len(vints) < 3:
vints.append(None)
if n == 2:
return (vints[0], vints[1])
if n == 3:
return (vints[0], vints[1], vints[2])
if n == 4:
return (vints[0], vints[1], vints[2], extra)
Chris Mason
util.parsedate should understand dates from hg export
r2609 # used by parsedate
Matt Mackall
improve date parsing for numerous new date formats...
r3808 defaultdateformats = (
'%Y-%m-%d %H:%M:%S',
'%Y-%m-%d %I:%M:%S%p',
'%Y-%m-%d %H:%M',
'%Y-%m-%d %I:%M%p',
'%Y-%m-%d',
'%m-%d',
'%m/%d',
'%m/%d/%y',
'%m/%d/%Y',
'%a %b %d %H:%M:%S %Y',
'%a %b %d %I:%M:%S%p %Y',
Markus F.X.J. Oberhumer
Add support for RFC2822 to util.parsedate().
r4708 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
Matt Mackall
improve date parsing for numerous new date formats...
r3808 '%b %d %H:%M:%S %Y',
Matt Mackall
Add date matching support...
r3812 '%b %d %I:%M:%S%p %Y',
'%b %d %H:%M:%S',
Matt Mackall
improve date parsing for numerous new date formats...
r3808 '%b %d %I:%M:%S%p',
'%b %d %H:%M',
'%b %d %I:%M%p',
'%b %d %Y',
'%b %d',
'%H:%M:%S',
Carey Evans
util: Fix date format for 12-hour time.
r9383 '%I:%M:%S%p',
Matt Mackall
improve date parsing for numerous new date formats...
r3808 '%H:%M',
'%I:%M%p',
)
Chris Mason
util.parsedate should understand dates from hg export
r2609
Matt Mackall
Add date matching support...
r3812 extendeddateformats = defaultdateformats + (
"%Y",
"%Y-%m",
"%b",
"%b %Y",
)
Chris Mason
util.parsedate should understand dates from hg export
r2609
Brendan Cully
Add cachefunc to abstract function call cache
r3145 def cachefunc(func):
'''cache the result of function calls'''
Benoit Boissinot
add comments in cachefunc
r3147 # XXX doesn't handle keywords args
FUJIWARA Katsunori
util: add the code path to "cachefunc()" for the function taking no arguments...
r20835 if func.func_code.co_argcount == 0:
cache = []
def f():
if len(cache) == 0:
cache.append(func())
return cache[0]
return f
Brendan Cully
Add cachefunc to abstract function call cache
r3145 cache = {}
if func.func_code.co_argcount == 1:
Benoit Boissinot
add comments in cachefunc
r3147 # we gain a small amount of time because
# we don't need to pack/unpack the list
Brendan Cully
Add cachefunc to abstract function call cache
r3145 def f(arg):
if arg not in cache:
cache[arg] = func(arg)
return cache[arg]
else:
def f(*args):
if args not in cache:
cache[args] = func(*args)
return cache[args]
return f
Angel Ezquerra
config: move config.sortdict class into util...
r21813 class sortdict(dict):
'''a simple sorted dictionary'''
def __init__(self, data=None):
self._list = []
if data:
self.update(data)
def copy(self):
return sortdict(self)
def __setitem__(self, key, val):
if key in self:
self._list.remove(key)
self._list.append(key)
dict.__setitem__(self, key, val)
def __iter__(self):
return self._list.__iter__()
def update(self, src):
Yuya Nishihara
sortdict: have update() accept either dict or iterable of key/value pairs...
r24236 if isinstance(src, dict):
src = src.iteritems()
for k, v in src:
self[k] = v
Angel Ezquerra
config: move config.sortdict class into util...
r21813 def clear(self):
dict.clear(self)
self._list = []
def items(self):
return [(k, self[k]) for k in self._list]
def __delitem__(self, key):
dict.__delitem__(self, key)
self._list.remove(key)
Pierre-Yves David
util: fix sorteddict.pop...
r22643 def pop(self, key, *args, **kwargs):
dict.pop(self, key, *args, **kwargs)
try:
self._list.remove(key)
except ValueError:
pass
Angel Ezquerra
config: move config.sortdict class into util...
r21813 def keys(self):
return self._list
def iterkeys(self):
return self._list.__iter__()
Sean Farley
sortdict: add iteritems method...
r23260 def iteritems(self):
for k in self._list:
yield k, self[k]
Sean Farley
sortdict: add insert method...
r23261 def insert(self, index, key, val):
self._list.insert(index, key)
dict.__setitem__(self, key, val)
Angel Ezquerra
config: move config.sortdict class into util...
r21813
Gregory Szorc
util: reimplement lrucachedict...
r27371 class _lrucachenode(object):
"""A node in a doubly linked list.
Holds a reference to nodes on either side as well as a key-value
pair for the dictionary entry.
"""
__slots__ = ('next', 'prev', 'key', 'value')
def __init__(self):
self.next = None
self.prev = None
self.key = _notset
self.value = None
def markempty(self):
"""Mark the node as emptied."""
self.key = _notset
Siddharth Agarwal
util: add an LRU cache dict...
r18603 class lrucachedict(object):
Gregory Szorc
util: reimplement lrucachedict...
r27371 """Dict that caches most recent accesses and sets.
The dict consists of an actual backing dict - indexed by original
key - and a doubly linked circular list defining the order of entries in
the cache.
The head node is the newest entry in the cache. If the cache is full,
we recycle head.prev and make it the new head. Cache accesses result in
the node being moved to before the existing head and being marked as the
new head node.
"""
def __init__(self, max):
Siddharth Agarwal
util: add an LRU cache dict...
r18603 self._cache = {}
Gregory Szorc
util: reimplement lrucachedict...
r27371
self._head = head = _lrucachenode()
head.prev = head
head.next = head
self._size = 1
self._capacity = max
def __len__(self):
return len(self._cache)
def __contains__(self, k):
return k in self._cache
Siddharth Agarwal
util: add an LRU cache dict...
r18603
Gregory Szorc
util: reimplement lrucachedict...
r27371 def __iter__(self):
# We don't have to iterate in cache order, but why not.
n = self._head
for i in range(len(self._cache)):
yield n.key
n = n.next
def __getitem__(self, k):
node = self._cache[k]
self._movetohead(node)
return node.value
def __setitem__(self, k, v):
node = self._cache.get(k)
# Replace existing value and mark as newest.
if node is not None:
node.value = v
self._movetohead(node)
return
if self._size < self._capacity:
node = self._addcapacity()
else:
# Grab the last/oldest item.
node = self._head.prev
Siddharth Agarwal
util: add an LRU cache dict...
r18603
Gregory Szorc
util: reimplement lrucachedict...
r27371 # At capacity. Kill the old entry.
if node.key is not _notset:
del self._cache[node.key]
node.key = k
node.value = v
self._cache[k] = node
# And mark it as newest entry. No need to adjust order since it
# is already self._head.prev.
self._head = node
Siddharth Agarwal
util: add an LRU cache dict...
r18603
Gregory Szorc
util: reimplement lrucachedict...
r27371 def __delitem__(self, k):
node = self._cache.pop(k)
node.markempty()
# Temporarily mark as newest item before re-adjusting head to make
# this node the oldest item.
self._movetohead(node)
self._head = node.next
# Additional dict methods.
def get(self, k, default=None):
try:
return self._cache[k]
except KeyError:
return default
Siddharth Agarwal
util: add an LRU cache dict...
r18603
Siddharth Agarwal
lrucachedict: implement clear()
r19710 def clear(self):
Gregory Szorc
util: reimplement lrucachedict...
r27371 n = self._head
while n.key is not _notset:
n.markempty()
n = n.next
Siddharth Agarwal
lrucachedict: implement clear()
r19710 self._cache.clear()
Gregory Szorc
util: reimplement lrucachedict...
r27371
Eric Sumner
lrucachedict: add copy method...
r27576 def copy(self):
result = lrucachedict(self._capacity)
n = self._head.prev
# Iterate in oldest-to-newest order, so the copy has the right ordering
for i in range(len(self._cache)):
result[n.key] = n.value
n = n.prev
return result
Gregory Szorc
util: reimplement lrucachedict...
r27371 def _movetohead(self, node):
"""Mark a node as the newest, making it the new head.
When a node is accessed, it becomes the freshest entry in the LRU
list, which is denoted by self._head.
Visually, let's make ``N`` the new head node (* denotes head):
previous/oldest <-> head <-> next/next newest
----<->--- A* ---<->-----
| |
E <-> D <-> N <-> C <-> B
To:
----<->--- N* ---<->-----
| |
E <-> D <-> C <-> B <-> A
This requires the following moves:
C.next = D (node.prev.next = node.next)
D.prev = C (node.next.prev = node.prev)
E.next = N (head.prev.next = node)
N.prev = E (node.prev = head.prev)
N.next = A (node.next = head)
A.prev = N (head.prev = node)
"""
head = self._head
# C.next = D
node.prev.next = node.next
# D.prev = C
node.next.prev = node.prev
# N.prev = E
node.prev = head.prev
# N.next = A
# It is tempting to do just "head" here, however if node is
# adjacent to head, this will do bad things.
node.next = head.prev.next
# E.next = N
node.next.prev = node
# A.prev = N
node.prev.next = node
self._head = node
def _addcapacity(self):
"""Add a node to the circular linked list.
The new node is inserted before the head node.
"""
head = self._head
node = _lrucachenode()
head.prev.next = node
node.prev = head.prev
node.next = head
head.prev = node
self._size += 1
return node
Siddharth Agarwal
lrucachedict: implement clear()
r19710
Matt Mackall
fix memory usage of revlog caches by limiting cache size [issue1639]
r9097 def lrucachefunc(func):
'''cache most recent results of function calls'''
cache = {}
Martin von Zweigbergk
util: drop alias for collections.deque...
r25113 order = collections.deque()
Matt Mackall
fix memory usage of revlog caches by limiting cache size [issue1639]
r9097 if func.func_code.co_argcount == 1:
def f(arg):
if arg not in cache:
if len(cache) > 20:
Bryan O'Sullivan
cleanup: use the deque type where appropriate...
r16803 del cache[order.popleft()]
Matt Mackall
fix memory usage of revlog caches by limiting cache size [issue1639]
r9097 cache[arg] = func(arg)
else:
order.remove(arg)
order.append(arg)
return cache[arg]
else:
def f(*args):
if args not in cache:
if len(cache) > 20:
Bryan O'Sullivan
cleanup: use the deque type where appropriate...
r16803 del cache[order.popleft()]
Matt Mackall
fix memory usage of revlog caches by limiting cache size [issue1639]
r9097 cache[args] = func(*args)
else:
order.remove(args)
order.append(args)
return cache[args]
return f
Matt Mackall
util: take propertycache from context.py
r8207 class propertycache(object):
def __init__(self, func):
self.func = func
self.name = func.__name__
def __get__(self, obj, type=None):
result = self.func(obj)
Pierre-Yves David
clfilter: add a propertycache that must be unfiltered...
r18013 self.cachevalue(obj, result)
Matt Mackall
util: take propertycache from context.py
r8207 return result
Pierre-Yves David
clfilter: add a propertycache that must be unfiltered...
r18013 def cachevalue(self, obj, value):
Mads Kiilerich
spelling: random spell checker fixes
r19951 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
Pierre-Yves David
repoview: make propertycache.setcache compatible with repoview...
r19845 obj.__dict__[self.name] = value
Pierre-Yves David
clfilter: add a propertycache that must be unfiltered...
r18013
Bryan O'Sullivan
Enhance the file filtering capabilities....
r1293 def pipefilter(s, cmd):
'''filter string S through command CMD, returning its output'''
Martin Geisler
util: simplify pipefilter and avoid subprocess race...
r8302 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
pout, perr = p.communicate(s)
return pout
mpm@selenic.com
[PATCH] file seperator handling for the other 'OS'...
r419
Bryan O'Sullivan
Enhance the file filtering capabilities....
r1293 def tempfilter(s, cmd):
'''filter string S through a pair of temporary files with CMD.
CMD is used as a template to create the real command to be run,
with the strings INFILE and OUTFILE replaced by the real names of
the temporary files generated.'''
inname, outname = None, None
try:
Thomas Arendsen Hein
Use better names (hg-{usage}-{random}.{suffix}) for temporary files.
r2165 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
Bryan O'Sullivan
Enhance the file filtering capabilities....
r1293 fp = os.fdopen(infd, 'wb')
fp.write(s)
fp.close()
Thomas Arendsen Hein
Use better names (hg-{usage}-{random}.{suffix}) for temporary files.
r2165 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
Bryan O'Sullivan
Enhance the file filtering capabilities....
r1293 os.close(outfd)
cmd = cmd.replace('INFILE', inname)
cmd = cmd.replace('OUTFILE', outname)
code = os.system(cmd)
Jean-Francois PIERONNE
OpenVMS patches
r4720 if sys.platform == 'OpenVMS' and code & 1:
code = 0
Matt Mackall
many, many trivial check-code fixups
r10282 if code:
raise Abort(_("command '%s' failed: %s") %
Adrian Buehlmann
rename explain_exit to explainexit
r14234 (cmd, explainexit(code)))
Bryan O'Sullivan
util: replace file I/O with readfile
r27768 return readfile(outname)
Bryan O'Sullivan
Enhance the file filtering capabilities....
r1293 finally:
try:
Matt Mackall
many, many trivial check-code fixups
r10282 if inname:
os.unlink(inname)
Idan Kamara
eliminate various naked except clauses
r14004 except OSError:
Matt Mackall
many, many trivial check-code fixups
r10282 pass
Bryan O'Sullivan
Enhance the file filtering capabilities....
r1293 try:
Matt Mackall
many, many trivial check-code fixups
r10282 if outname:
os.unlink(outname)
Idan Kamara
eliminate various naked except clauses
r14004 except OSError:
Matt Mackall
many, many trivial check-code fixups
r10282 pass
Bryan O'Sullivan
Enhance the file filtering capabilities....
r1293
filtertable = {
'tempfile:': tempfilter,
'pipe:': pipefilter,
}
def filter(s, cmd):
"filter a string through a command that transforms its input to its output"
for name, fn in filtertable.iteritems():
if cmd.startswith(name):
return fn(s, cmd[len(name):].lstrip())
return pipefilter(s, cmd)
mpm@selenic.com
Add automatic binary file detection to diff and export...
r1015 def binary(s):
Christian Ebert
Let util.binary check entire data for \0 (issue1066, issue1079)
r6507 """return true if a string is binary data"""
Martin Geisler
util: return boolean result directly in util.binary
r8118 return bool(s and '\0' in s)
Matt Mackall
util: add sort helper
r6762
Brendan Cully
templater: return data in increasing chunk sizes...
r7396 def increasingchunks(source, min=1024, max=65536):
'''return no less than min bytes per chunk while data remains,
doubling min after each chunk until it reaches max'''
def log2(x):
if not x:
return 0
i = 0
while x:
x >>= 1
i += 1
return i - 1
buf = []
blen = 0
for chunk in source:
buf.append(chunk)
blen += len(chunk)
if blen >= min:
if min < max:
min = min << 1
nmin = 1 << log2(blen)
if nmin > min:
min = nmin
if min > max:
min = max
yield ''.join(buf)
blen = 0
buf = []
if buf:
yield ''.join(buf)
Matt Mackall
move util.Abort to error.py
r7947 Abort = error.Abort
mpm@selenic.com
[PATCH] Harden os.system...
r508
Matt Mackall
many, many trivial check-code fixups
r10282 def always(fn):
return True
def never(fn):
return False
Bryan O'Sullivan
Get add and locate to use new repo and dirstate walk code....
r724
Pierre-Yves David
util: add a 'nogc' decorator to disable the garbage collection...
r23495 def nogc(func):
"""disable garbage collector
Python's garbage collector triggers a GC each time a certain number of
container objects (the number being defined by gc.get_threshold()) are
allocated even when marked not to be tracked by the collector. Tracking has
no effect on when GCs are triggered, only on what objects the GC looks
Mads Kiilerich
spelling: fixes from proofreading of spell checker issues
r23543 into. As a workaround, disable GC while building complex (huge)
Pierre-Yves David
util: add a 'nogc' decorator to disable the garbage collection...
r23495 containers.
This garbage collector issue have been fixed in 2.7.
"""
def wrapper(*args, **kwargs):
gcenabled = gc.isenabled()
gc.disable()
try:
return func(*args, **kwargs)
finally:
if gcenabled:
gc.enable()
return wrapper
Alexis S. L. Carvalho
pass repo.root to util.pathto() in preparation for the next patch
r4229 def pathto(root, n1, n2):
Bryan O'Sullivan
Fix walk path handling on Windows
r886 '''return the relative path from one place to another.
Alexis S. L. Carvalho
pass repo.root to util.pathto() in preparation for the next patch
r4229 root should use os.sep to separate directories
Alexis S. L. Carvalho
fix util.pathto...
r3669 n1 should use os.sep to separate directories
n2 should use "/" to separate directories
returns an os.sep-separated path.
Alexis S. L. Carvalho
pass repo.root to util.pathto() in preparation for the next patch
r4229
If n1 is a relative path, it's assumed it's
relative to root.
n2 should always be relative to root.
Alexis S. L. Carvalho
fix util.pathto...
r3669 '''
Matt Mackall
many, many trivial check-code fixups
r10282 if not n1:
return localpath(n2)
Alexis S. L. Carvalho
Fix handling of paths when run outside the repo....
r4230 if os.path.isabs(n1):
if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
return os.path.join(root, localpath(n2))
n2 = '/'.join((pconvert(root), n2))
Shun-ichi GOTO
Add util.splitpath() and use it instead of using os.sep directly....
r5844 a, b = splitpath(n1), n2.split('/')
twaldmann@thinkmo.de
fixed some stuff pychecker shows, marked unclear/wrong stuff with XXX
r1541 a.reverse()
b.reverse()
Bryan O'Sullivan
Fix walk code for files that do not exist anywhere, and unhandled types....
r884 while a and b and a[-1] == b[-1]:
twaldmann@thinkmo.de
fixed some stuff pychecker shows, marked unclear/wrong stuff with XXX
r1541 a.pop()
b.pop()
Bryan O'Sullivan
Fix walk code for files that do not exist anywhere, and unhandled types....
r884 b.reverse()
Alexis S. L. Carvalho
util.pathto: return '.' instead of an empty string...
r6111 return os.sep.join((['..'] * len(a)) + b) or '.'
Bryan O'Sullivan
Fix walk code for files that do not exist anywhere, and unhandled types....
r884
Adrian Buehlmann
rename util.main_is_frozen to mainfrozen
r14228 def mainfrozen():
"Paul Moore "
Added hgexecutable support for py2exe/frozen scripts
r6499 """return True if we are a frozen executable.
The code supports py2exe (most common, Windows only) and tools/freeze
(portable, not much used).
"""
Augie Fackler
util: use safehasattr or getattr instead of hasattr
r14968 return (safehasattr(sys, "frozen") or # new py2exe
safehasattr(sys, "importers") or # old py2exe
"Paul Moore "
Added hgexecutable support for py2exe/frozen scripts
r6499 imp.is_frozen("__main__")) # tools/freeze
Mads Kiilerich
util: introduce datapath for getting the location of supporting data files...
r22633 # the location of data files matching the source code
Matt Harbison
util: adjust 'datapath' to be correct in a frozen OS X package...
r27764 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
Mads Kiilerich
util: introduce datapath for getting the location of supporting data files...
r22633 # executable version (py2exe) doesn't support __file__
datapath = os.path.dirname(sys.executable)
else:
datapath = os.path.dirname(__file__)
Mads Kiilerich
i18n: use datapath for i18n like for templates and help...
r22638 i18n.setdatapath(datapath)
Mads Kiilerich
util: move _hgexecutable a few lines, closer to where it is used
r22632 _hgexecutable = None
Thomas Arendsen Hein
Simplify/correct finding the hg executable (fixes issue644)...
r5062 def hgexecutable():
"""return location of the 'hg' executable.
Defaults to $HG or 'hg' in the search path.
"""
if _hgexecutable is None:
Bryan O'Sullivan
Tidy code, fix typo
r6500 hg = os.environ.get('HG')
Simon Heimberg
util: fix finding of hgexecutable...
r15106 mainmod = sys.modules['__main__']
Bryan O'Sullivan
Tidy code, fix typo
r6500 if hg:
Adrian Buehlmann
rename util.set_hgexecutable to _sethgexecutable
r14229 _sethgexecutable(hg)
Adrian Buehlmann
rename util.main_is_frozen to mainfrozen
r14228 elif mainfrozen():
Matt Harbison
util: adjust hgexecutable() to handle frozen Mercurial on OS X...
r27765 if getattr(sys, 'frozen', None) == 'macosx_app':
# Env variable set by py2app
_sethgexecutable(os.environ['EXECUTABLEPATH'])
else:
_sethgexecutable(sys.executable)
Simon Heimberg
util: fix finding of hgexecutable...
r15106 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
_sethgexecutable(mainmod.__file__)
"Paul Moore "
Added hgexecutable support for py2exe/frozen scripts
r6499 else:
Adrian Buehlmann
rename util.find_exe to findexe
r14271 exe = findexe('hg') or os.path.basename(sys.argv[0])
Adrian Buehlmann
rename util.set_hgexecutable to _sethgexecutable
r14229 _sethgexecutable(exe)
Thomas Arendsen Hein
Simplify/correct finding the hg executable (fixes issue644)...
r5062 return _hgexecutable
Thomas Arendsen Hein
Remember path to 'hg' executable and pass to external tools and hooks as $HG.
r4686
Adrian Buehlmann
rename util.set_hgexecutable to _sethgexecutable
r14229 def _sethgexecutable(path):
Thomas Arendsen Hein
Simplify/correct finding the hg executable (fixes issue644)...
r5062 """set location of the 'hg' executable"""
Thomas Arendsen Hein
Remember path to 'hg' executable and pass to external tools and hooks as $HG.
r4686 global _hgexecutable
Thomas Arendsen Hein
Simplify/correct finding the hg executable (fixes issue644)...
r5062 _hgexecutable = path
Thomas Arendsen Hein
Remember path to 'hg' executable and pass to external tools and hooks as $HG.
r4686
Yuya Nishihara
util.system: compare fileno to see if it needs stdout redirection...
r26450 def _isstdout(f):
fileno = getattr(f, 'fileno', None)
return fileno and fileno() == sys.__stdout__.fileno()
Siddharth Agarwal
util: avoid mutable default arguments...
r26311 def system(cmd, environ=None, cwd=None, onerr=None, errprefix=None, out=None):
Vadim Gelfer
merge util.esystem and util.system.
r1882 '''enhanced shell command execution.
run with environment maybe modified, maybe in different dir.
mpm@selenic.com
[PATCH] Harden os.system...
r508
Yuya Nishihara
util.system: remove unused handling of onerr=ui...
r23271 if command fails and onerr is None, return status, else raise onerr
object as exception.
Maxim Khitrov
http: deliver hook output to client
r11469
if out is specified, it is assumed to be a file-like object that has a
write() method. stdout and stderr will be redirected to out.'''
Siddharth Agarwal
util: avoid mutable default arguments...
r26311 if environ is None:
environ = {}
Mads Kiilerich
util: flush stdout before calling external processes...
r13439 try:
sys.stdout.flush()
except Exception:
pass
Vadim Gelfer
move most of tag code to localrepository class.
r2601 def py2shell(val):
'convert python object into string that is useful to shell'
Martin Geisler
util: use "is" for True/False/None comparisons
r8534 if val is None or val is False:
Vadim Gelfer
move most of tag code to localrepository class.
r2601 return '0'
Martin Geisler
util: use "is" for True/False/None comparisons
r8534 if val is True:
Vadim Gelfer
move most of tag code to localrepository class.
r2601 return '1'
return str(val)
Alexis S. L. Carvalho
util.system: fix quoting on windows
r3905 origcmd = cmd
Steve Borho
util: concentrate quoting knowledge to windows.py quotecommand()...
r13188 cmd = quotecommand(cmd)
Jeff Sickel
plan9: update util.py for cpython 2.7 build
r19729 if sys.platform == 'plan9' and (sys.version_info[0] == 2
and sys.version_info[1] < 7):
Steven Stallion
plan9: initial support for plan 9 from bell labs...
r16383 # subprocess kludge to work around issues in half-baked Python
# ports, notably bichued/python:
if not cwd is None:
os.chdir(cwd)
rc = os.system(cmd)
Maxim Khitrov
http: deliver hook output to client
r11469 else:
Steven Stallion
plan9: initial support for plan 9 from bell labs...
r16383 env = dict(os.environ)
env.update((k, py2shell(v)) for k, v in environ.iteritems())
env['HG'] = hgexecutable()
Yuya Nishihara
util.system: compare fileno to see if it needs stdout redirection...
r26450 if out is None or _isstdout(out):
Steven Stallion
plan9: initial support for plan 9 from bell labs...
r16383 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
env=env, cwd=cwd)
else:
proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
env=env, cwd=cwd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
Yuya Nishihara
util.system: avoid buffering of subprocess output when it is piped...
r23030 while True:
line = proc.stdout.readline()
if not line:
break
Steven Stallion
plan9: initial support for plan 9 from bell labs...
r16383 out.write(line)
proc.wait()
rc = proc.returncode
if sys.platform == 'OpenVMS' and rc & 1:
rc = 0
Mads Kiilerich
util.system: Use subprocess instead of os.system...
r9517 if rc and onerr:
errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
Adrian Buehlmann
rename explain_exit to explainexit
r14234 explainexit(rc)[0])
Mads Kiilerich
util.system: Use subprocess instead of os.system...
r9517 if errprefix:
errmsg = '%s: %s' % (errprefix, errmsg)
Yuya Nishihara
util.system: remove unused handling of onerr=ui...
r23271 raise onerr(errmsg)
Mads Kiilerich
util.system: Use subprocess instead of os.system...
r9517 return rc
Vadim Gelfer
fix broken environment save/restore when a hook runs....
r1880
Matt Mackall
dispatch: generalize signature checking for extension command wrapping
r7388 def checksignature(func):
'''wrap a function with code to check for calling errors'''
def check(*args, **kwargs):
try:
return func(*args, **kwargs)
except TypeError:
if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
Matt Mackall
error: move SignatureError
r7646 raise error.SignatureError
Matt Mackall
dispatch: generalize signature checking for extension command wrapping
r7388 raise
return check
Siddharth Agarwal
copyfile: add an optional parameter to copy other stat data...
r27369 def copyfile(src, dest, hardlink=False, copystat=False):
'''copy a file, preserving mode and optionally other stat info like
atime/mtime'''
Mads Kiilerich
util: copyfile: remove dest before copying...
r18326 if os.path.lexists(dest):
unlink(dest)
Matt Mackall
transaction: disable hardlink backups (issue4546)...
r24155 # hardlinks are problematic on CIFS, quietly ignore this flag
# until we find a way to work around it cleanly (issue4546)
Matt Harbison
transaction: really disable hardlink backups (issue4546)
r24164 if False and hardlink:
Pierre-Yves David
copyfile: allow optional hardlinking...
r23899 try:
oslink(src, dest)
return
except (IOError, OSError):
pass # fall back to normal copy
Eric St-Jean
fix util.copyfile to deal with symlinks
r4271 if os.path.islink(src):
os.symlink(os.readlink(src), dest)
Siddharth Agarwal
copyfile: add an optional parameter to copy other stat data...
r27369 # copytime is ignored for symlinks, but in general copytime isn't needed
# for them anyway
Eric St-Jean
fix util.copyfile to deal with symlinks
r4271 else:
try:
shutil.copyfile(src, dest)
Siddharth Agarwal
copyfile: add an optional parameter to copy other stat data...
r27369 if copystat:
# copystat also copies mode
shutil.copystat(src, dest)
else:
shutil.copymode(src, dest)
Gregory Szorc
global: mass rewrite to use modern exception syntax...
r25660 except shutil.Error as inst:
Eric St-Jean
fix util.copyfile to deal with symlinks
r4271 raise Abort(str(inst))
Matt Mackall
util: add copyfile function
r3629
Augie Fackler
util: add progress callback support to copyfiles
r24439 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
"""Copy a directory tree using hardlinks if possible."""
num = 0
Stephen Darnell
Add support for cloning with hardlinks on windows....
r1241
if hardlink is None:
hardlink = (os.stat(src).st_dev ==
os.stat(os.path.dirname(dst)).st_dev)
Augie Fackler
util: add progress callback support to copyfiles
r24439 if hardlink:
topic = _('linking')
else:
topic = _('copying')
Thomas Arendsen Hein
Use python function instead of external 'cp' command when cloning repos....
r698
mpm@selenic.com
Rewrite copytree as copyfiles...
r1207 if os.path.isdir(src):
os.mkdir(dst)
Bryan O'Sullivan
Add osutil module, containing a listdir function....
r5396 for name, kind in osutil.listdir(src):
mpm@selenic.com
Rewrite copytree as copyfiles...
r1207 srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
Augie Fackler
util: add progress callback support to copyfiles
r24439 def nprog(t, pos):
if pos is not None:
return progress(t, pos + num)
hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
Adrian Buehlmann
clone: print number of linked/copied files on --debug
r11251 num += n
mpm@selenic.com
Rewrite copytree as copyfiles...
r1207 else:
Stephen Darnell
Add support for cloning with hardlinks on windows....
r1241 if hardlink:
try:
Adrian Buehlmann
rename util.os_link to oslink
r14235 oslink(src, dst)
Vadim Gelfer
util.copyfiles: only switch to copy if hardlink raises IOError or OSError....
r2050 except (IOError, OSError):
Stephen Darnell
Add support for cloning with hardlinks on windows....
r1241 hardlink = False
Benoit Boissinot
do not copy atime and mtime in util.copyfiles...
r1591 shutil.copy(src, dst)
Stephen Darnell
Add support for cloning with hardlinks on windows....
r1241 else:
Benoit Boissinot
do not copy atime and mtime in util.copyfiles...
r1591 shutil.copy(src, dst)
Adrian Buehlmann
clone: print number of linked/copied files on --debug
r11251 num += 1
Augie Fackler
util: add progress callback support to copyfiles
r24439 progress(topic, num)
progress(topic, None)
Thomas Arendsen Hein
Use python function instead of external 'cp' command when cloning repos....
r698
Adrian Buehlmann
clone: print number of linked/copied files on --debug
r11251 return hardlink, num
Adrian Buehlmann
util.copyfiles: don't try os_link() again if it failed before...
r11254
Adrian Buehlmann
util: rename _windows_reserved_filenames and _windows_reserved_chars
r14262 _winreservednames = '''con prn aux nul
Adrian Buehlmann
path_auditor: check filenames for basic platform validity (issue2755)...
r13916 com1 com2 com3 com4 com5 com6 com7 com8 com9
lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
Adrian Buehlmann
util: rename _windows_reserved_filenames and _windows_reserved_chars
r14262 _winreservedchars = ':*?"<>|'
Adrian Buehlmann
path_auditor: check filenames for basic platform validity (issue2755)...
r13916 def checkwinfilename(path):
Mads Kiilerich
util: warn when adding paths ending with \...
r20000 r'''Check that the base-relative path is a valid filename on Windows.
Adrian Buehlmann
path_auditor: check filenames for basic platform validity (issue2755)...
r13916 Returns None if the path is ok, or a UI string describing the problem.
>>> checkwinfilename("just/a/normal/path")
>>> checkwinfilename("foo/bar/con.xml")
"filename contains 'con', which is reserved on Windows"
>>> checkwinfilename("foo/con.xml/bar")
"filename contains 'con', which is reserved on Windows"
>>> checkwinfilename("foo/bar/xml.con")
>>> checkwinfilename("foo/bar/AUX/bla.txt")
"filename contains 'AUX', which is reserved on Windows"
>>> checkwinfilename("foo/bar/bla:.txt")
"filename contains ':', which is reserved on Windows"
>>> checkwinfilename("foo/bar/b\07la.txt")
Mads Kiilerich
util: warn when adding paths ending with \...
r20000 "filename contains '\\x07', which is invalid on Windows"
Adrian Buehlmann
path_auditor: check filenames for basic platform validity (issue2755)...
r13916 >>> checkwinfilename("foo/bar/bla ")
"filename ends with ' ', which is not allowed on Windows"
Matt Mackall
util: don't complain about '..' in path components not working on Windows
r15358 >>> checkwinfilename("../bar")
Mads Kiilerich
util: warn when adding paths ending with \...
r20000 >>> checkwinfilename("foo\\")
"filename ends with '\\', which is invalid on Windows"
>>> checkwinfilename("foo\\/bar")
"directory name ends with '\\', which is invalid on Windows"
Adrian Buehlmann
path_auditor: check filenames for basic platform validity (issue2755)...
r13916 '''
Mads Kiilerich
util: warn when adding paths ending with \...
r20000 if path.endswith('\\'):
return _("filename ends with '\\', which is invalid on Windows")
if '\\/' in path:
return _("directory name ends with '\\', which is invalid on Windows")
Adrian Buehlmann
path_auditor: check filenames for basic platform validity (issue2755)...
r13916 for n in path.replace('\\', '/').split('/'):
if not n:
continue
for c in n:
Adrian Buehlmann
util: rename _windows_reserved_filenames and _windows_reserved_chars
r14262 if c in _winreservedchars:
Adrian Buehlmann
path_auditor: check filenames for basic platform validity (issue2755)...
r13916 return _("filename contains '%s', which is reserved "
"on Windows") % c
if ord(c) <= 31:
Adrian Buehlmann
checkwinfilename: use %r in format string
r13947 return _("filename contains %r, which is invalid "
Adrian Buehlmann
path_auditor: check filenames for basic platform validity (issue2755)...
r13916 "on Windows") % c
base = n.split('.')[0]
Adrian Buehlmann
util: rename _windows_reserved_filenames and _windows_reserved_chars
r14262 if base and base.lower() in _winreservednames:
Adrian Buehlmann
path_auditor: check filenames for basic platform validity (issue2755)...
r13916 return _("filename contains '%s', which is reserved "
"on Windows") % base
t = n[-1]
Matt Mackall
util: don't complain about '..' in path components not working on Windows
r15358 if t in '. ' and n not in '..':
Adrian Buehlmann
path_auditor: check filenames for basic platform validity (issue2755)...
r13916 return _("filename ends with '%s', which is not allowed "
"on Windows") % t
Matt Mackall
util: split out posix, windows, and win32 modules
r7890 if os.name == 'nt':
Adrian Buehlmann
path_auditor: check filenames for basic platform validity (issue2755)...
r13916 checkosfilename = checkwinfilename
Matt Mackall
util: split out posix, windows, and win32 modules
r7890 else:
Adrian Buehlmann
util: eliminate wildcard imports
r14926 checkosfilename = platform.checkosfilename
Matt Mackall
util: split out posix, windows, and win32 modules
r7890
def makelock(info, pathname):
try:
return os.symlink(info, pathname)
Gregory Szorc
global: mass rewrite to use modern exception syntax...
r25660 except OSError as why:
Matt Mackall
util: split out posix, windows, and win32 modules
r7890 if why.errno == errno.EEXIST:
raise
except AttributeError: # no symlink in os
pass
Thomas Arendsen Hein
Make makelock and readlock work on filesystems without symlink support....
r704 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
os.write(ld, info)
os.close(ld)
Matt Mackall
util: split out posix, windows, and win32 modules
r7890 def readlock(pathname):
try:
return os.readlink(pathname)
Gregory Szorc
global: mass rewrite to use modern exception syntax...
r25660 except OSError as why:
Matt Mackall
util: split out posix, windows, and win32 modules
r7890 if why.errno not in (errno.EINVAL, errno.ENOSYS):
raise
except AttributeError: # no symlink in os
pass
Dan Villiom Podlaski Christiansen
explicitly close files...
r13400 fp = posixfile(pathname)
r = fp.read()
fp.close()
return r
Thomas Arendsen Hein
Make makelock and readlock work on filesystems without symlink support....
r704
Vadim Gelfer
fix file handling bugs on windows....
r2176 def fstat(fp):
'''stat file object that may not have fileno method.'''
try:
return os.fstat(fp.fileno())
except AttributeError:
return os.stat(fp.name)
Matt Mackall
imported patch folding
r3784 # File system features
Matt Mackall
rename checkfolding to checkcase
r6746 def checkcase(path):
Matt Mackall
imported patch folding
r3784 """
Mads Kiilerich
util: improve doc for checkcase
r18911 Return true if the given path is on a case-sensitive filesystem
Matt Mackall
imported patch folding
r3784
Requires a path (like /foo/.hg) ending with a foldable final
directory component.
"""
Siddharth Agarwal
util.checkcase: don't abort on broken symlinks...
r24902 s1 = os.lstat(path)
Matt Mackall
imported patch folding
r3784 d, b = os.path.split(path)
FUJIWARA Katsunori
icasefs: consider as case sensitive if there is no counterevidence, for safety...
r15667 b2 = b.upper()
if b == b2:
b2 = b.lower()
if b == b2:
return True # no evidence against case sensitivity
p2 = os.path.join(d, b2)
Matt Mackall
imported patch folding
r3784 try:
Siddharth Agarwal
util.checkcase: don't abort on broken symlinks...
r24902 s2 = os.lstat(p2)
Matt Mackall
imported patch folding
r3784 if s2 == s1:
return False
return True
Idan Kamara
eliminate various naked except clauses
r14004 except OSError:
Matt Mackall
imported patch folding
r3784 return True
Bryan O'Sullivan
matcher: use re2 bindings if available...
r16943 try:
import re2
_re2 = None
except ImportError:
_re2 = False
Siddharth Agarwal
util: move compilere to a class...
r21908 class _re(object):
Siddharth Agarwal
util.re: move check for re2 into a separate method...
r21913 def _checkre2(self):
global _re2
try:
# check if match works, see issue3964
_re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
except ImportError:
_re2 = False
Siddharth Agarwal
util: move compilere to a class...
r21908 def compile(self, pat, flags=0):
'''Compile a regular expression, using re2 if possible
Bryan O'Sullivan
matcher: use re2 bindings if available...
r16943
Siddharth Agarwal
util: move compilere to a class...
r21908 For best performance, use only re2-compatible regexp features. The
only flags from the re module that are re2-compatible are
IGNORECASE and MULTILINE.'''
if _re2 is None:
Siddharth Agarwal
util.re: move check for re2 into a separate method...
r21913 self._checkre2()
Siddharth Agarwal
util: move compilere to a class...
r21908 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
if flags & remod.IGNORECASE:
pat = '(?i)' + pat
if flags & remod.MULTILINE:
pat = '(?m)' + pat
try:
return re2.compile(pat)
except re2.error:
pass
return remod.compile(pat, flags)
Siddharth Agarwal
util.re: add an escape method...
r21914 @propertycache
def escape(self):
'''Return the version of escape corresponding to self.compile.
This is imperfect because whether re2 or re is used for a particular
function depends on the flags, etc, but it's the best we can do.
'''
global _re2
if _re2 is None:
self._checkre2()
if _re2:
return re2.escape
else:
return remod.escape
Siddharth Agarwal
util: move compilere to a class...
r21908 re = _re()
Bryan O'Sullivan
matcher: use re2 bindings if available...
r16943
Paul Moore
Add a new function, fspath...
r6676 _fspathcache = {}
def fspath(name, root):
'''Get name in the case stored in the filesystem
FUJIWARA Katsunori
icasefs: avoid path-absoluteness/existance check in util.fspath() for efficiency...
r15710 The name should be relative to root, and be normcase-ed for efficiency.
Note that this function is unnecessary, and should not be
Paul Moore
Add a new function, fspath...
r6676 called, for case-sensitive filesystems (simply because it's expensive).
FUJIWARA Katsunori
icasefs: avoid normcase()-ing in util.fspath() for efficiency...
r15670
FUJIWARA Katsunori
icasefs: avoid path-absoluteness/existance check in util.fspath() for efficiency...
r15710 The root should be normcase-ed, too.
Paul Moore
Add a new function, fspath...
r6676 '''
Siddharth Agarwal
util.fspath: use a dict rather than a linear scan for lookups...
r23097 def _makefspathcacheentry(dir):
return dict((normcase(n), n) for n in os.listdir(dir))
FUJIWARA Katsunori
icasefs: retry directory scan once for already invalidated cache...
r15709
Paul Moore
Add a new function, fspath...
r6676 seps = os.sep
if os.altsep:
seps = seps + os.altsep
# Protect backslashes. This gets silly very quickly.
seps.replace('\\','\\\\')
Siddharth Agarwal
util: rename 're' to 'remod'...
r21907 pattern = remod.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
FUJIWARA Katsunori
icasefs: use util.normcase() instead of lower() or os.path.normcase in fspath...
r15669 dir = os.path.normpath(root)
Paul Moore
Add a new function, fspath...
r6676 result = []
for part, sep in pattern.findall(name):
if sep:
result.append(sep)
continue
FUJIWARA Katsunori
icasefs: follow standard cache look up pattern
r15719 if dir not in _fspathcache:
Siddharth Agarwal
util.fspath: use a dict rather than a linear scan for lookups...
r23097 _fspathcache[dir] = _makefspathcacheentry(dir)
FUJIWARA Katsunori
icasefs: follow standard cache look up pattern
r15719 contents = _fspathcache[dir]
Paul Moore
Add a new function, fspath...
r6676
Siddharth Agarwal
util.fspath: use a dict rather than a linear scan for lookups...
r23097 found = contents.get(part)
FUJIWARA Katsunori
icasefs: retry directory scan once for already invalidated cache...
r15709 if not found:
FUJIWARA Katsunori
icasefs: rewrite comment to explain situtation precisely
r15720 # retry "once per directory" per "dirstate.walk" which
# may take place for each patches of "hg qpush", for example
Siddharth Agarwal
util.fspath: use a dict rather than a linear scan for lookups...
r23097 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
found = contents.get(part)
FUJIWARA Katsunori
icasefs: retry directory scan once for already invalidated cache...
r15709
result.append(found or part)
FUJIWARA Katsunori
icasefs: use util.normcase() instead of lower() or os.path.normcase in fspath...
r15669 dir = os.path.join(dir, part)
Paul Moore
Add a new function, fspath...
r6676
return ''.join(result)
Adrian Buehlmann
opener: check hardlink count reporting (issue1866)...
r12938 def checknlink(testfile):
'''check whether hardlink count reporting works properly'''
Adrian Buehlmann
checknlink: use two testfiles (issue2543)...
r13204 # testfile may be open, so we need a separate file for checking to
# work around issue2543 (or testfile may get lost on Samba shares)
f1 = testfile + ".hgtmp1"
if os.path.lexists(f1):
return False
Adrian Buehlmann
opener: check hardlink count reporting (issue1866)...
r12938 try:
Adrian Buehlmann
checknlink: use two testfiles (issue2543)...
r13204 posixfile(f1, 'w').close()
except IOError:
Adrian Buehlmann
opener: check hardlink count reporting (issue1866)...
r12938 return False
Adrian Buehlmann
checknlink: use two testfiles (issue2543)...
r13204 f2 = testfile + ".hgtmp2"
fd = None
Adrian Buehlmann
opener: check hardlink count reporting (issue1866)...
r12938 try:
Matt Mackall
util: use try/except/finally
r25088 oslink(f1, f2)
Adrian Buehlmann
opener: check hardlink count reporting (issue1866)...
r12938 # nlinks() may behave differently for files on Windows shares if
# the file is open.
Adrian Buehlmann
opener: use posixfile to hold file open when calling nlinks()...
r13342 fd = posixfile(f2)
Adrian Buehlmann
checknlink: use two testfiles (issue2543)...
r13204 return nlinks(f2) > 1
Matt Mackall
util: use try/except/finally
r25088 except OSError:
return False
Adrian Buehlmann
opener: check hardlink count reporting (issue1866)...
r12938 finally:
Adrian Buehlmann
checknlink: use two testfiles (issue2543)...
r13204 if fd is not None:
fd.close()
for f in (f1, f2):
try:
os.unlink(f)
except OSError:
pass
Adrian Buehlmann
opener: check hardlink count reporting (issue1866)...
r12938
Shun-ichi GOTO
Add endswithsep() and use it instead of using os.sep and os.altsep directly....
r5843 def endswithsep(path):
'''Check path ends with os.sep or os.altsep.'''
return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
Shun-ichi GOTO
Add util.splitpath() and use it instead of using os.sep directly....
r5844 def splitpath(path):
'''Split path by os.sep.
Note that this function does not use os.altsep because this is
an alternative of simple "xxx.split(os.sep)".
It is recommended to use os.path.normpath() before using this
function if need.'''
return path.split(os.sep)
Matt Mackall
filemerge: add config item for GUI tools...
r6007 def gui():
'''Are we running in a GUI?'''
Dan Villiom Podlaski Christiansen
util: add Mac-specific check whether we're in a GUI session (issue2553)...
r13734 if sys.platform == 'darwin':
if 'SSH_CONNECTION' in os.environ:
# handle SSH access to a box where the user is logged in
return False
elif getattr(osutil, 'isgui', None):
# check if a CoreGraphics session is available
return osutil.isgui()
else:
# pure build; use a safe default
return True
else:
return os.name == "nt" or os.environ.get("DISPLAY")
Matt Mackall
filemerge: add config item for GUI tools...
r6007
Alexis S. L. Carvalho
Make files in .hg inherit the permissions from .hg/store
r6062 def mktempcopy(name, emptyok=False, createmode=None):
Alexis S. L. Carvalho
turn util.opener into a class
r4827 """Create a temporary file with the same contents from name
The permission bits are copied from the original file.
If the temporary file is going to be truncated immediately, you
can use emptyok=True as an optimization.
Returns the name of the temporary file.
Vadim Gelfer
fix file handling bugs on windows....
r2176 """
Alexis S. L. Carvalho
turn util.opener into a class
r4827 d, fn = os.path.split(name)
fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
os.close(fd)
# Temporary files are created with mode 0600, which is usually not
# what we want. If the original file already exists, just copy
# its mode. Otherwise, manually obey umask.
Adrian Buehlmann
util: factor new function copymode out of mktempcopy
r15010 copymode(name, temp, createmode)
Alexis S. L. Carvalho
turn util.opener into a class
r4827 if emptyok:
return temp
try:
try:
ifp = posixfile(name, "rb")
Gregory Szorc
global: mass rewrite to use modern exception syntax...
r25660 except IOError as inst:
Alexis S. L. Carvalho
turn util.opener into a class
r4827 if inst.errno == errno.ENOENT:
return temp
if not getattr(inst, 'filename', None):
inst.filename = name
raise
ofp = posixfile(temp, "wb")
for chunk in filechunkiter(ifp):
ofp.write(chunk)
ifp.close()
ofp.close()
Brodie Rao
check-code: ignore naked excepts with a "re-raise" comment...
r16705 except: # re-raises
Alexis S. L. Carvalho
turn util.opener into a class
r4827 try: os.unlink(temp)
Brodie Rao
cleanup: replace more naked excepts with more specific ones
r16703 except OSError: pass
Alexis S. L. Carvalho
turn util.opener into a class
r4827 raise
return temp
Vadim Gelfer
fix file handling bugs on windows....
r2176
Benoit Boissinot
use new style classes
r8778 class atomictempfile(object):
Mads Kiilerich
fix trivial spelling errors
r17424 '''writable file object that atomically updates a file
Alexis S. L. Carvalho
turn util.opener into a class
r4827
Greg Ward
atomictempfile: rewrite docstring to clarify rename() vs. close().
r14008 All writes will go to a temporary copy of the original file. Call
Greg Ward
atomictempfile: make close() consistent with other file-like objects....
r15057 close() when you are done writing, and atomictempfile will rename
the temporary copy to the original name, making the changes
visible. If the object is destroyed without being closed, all your
writes are discarded.
Greg Ward
atomictempfile: rewrite docstring to clarify rename() vs. close().
r14008 '''
Yuya Nishihara
util: give appropriate default args to atomictempfile()...
r11212 def __init__(self, name, mode='w+b', createmode=None):
Greg Ward
atomictempfile: avoid infinite recursion in __del__()....
r14007 self.__name = name # permanent name
self._tempname = mktempcopy(name, emptyok=('w' in mode),
createmode=createmode)
self._fp = posixfile(self._tempname, mode)
Bryan O'Sullivan
atomictempfile: delegate to posixfile instead of inheriting from it
r8327
Greg Ward
atomictempfile: avoid infinite recursion in __del__()....
r14007 # delegated methods
self.write = self._fp.write
Bryan O'Sullivan
util: delegate seek and tell methods of atomictempfile
r17237 self.seek = self._fp.seek
self.tell = self._fp.tell
Greg Ward
atomictempfile: avoid infinite recursion in __del__()....
r14007 self.fileno = self._fp.fileno
Alexis S. L. Carvalho
turn util.opener into a class
r4827
Greg Ward
atomictempfile: make close() consistent with other file-like objects....
r15057 def close(self):
Benoit Boissinot
atomictempfile: fix exception in __del__ if mktempcopy fails (self._fp is None)...
r8785 if not self._fp.closed:
Bryan O'Sullivan
atomictempfile: delegate to posixfile instead of inheriting from it
r8327 self._fp.close()
Greg Ward
atomictempfile: avoid infinite recursion in __del__()....
r14007 rename(self._tempname, localpath(self.__name))
Alexis S. L. Carvalho
turn util.opener into a class
r4827
Greg Ward
atomictempfile: make close() consistent with other file-like objects....
r15057 def discard(self):
Benoit Boissinot
atomictempfile: fix exception in __del__ if mktempcopy fails (self._fp is None)...
r8785 if not self._fp.closed:
Alexis S. L. Carvalho
turn util.opener into a class
r4827 try:
Greg Ward
atomictempfile: avoid infinite recursion in __del__()....
r14007 os.unlink(self._tempname)
except OSError:
pass
Bryan O'Sullivan
atomictempfile: delegate to posixfile instead of inheriting from it
r8327 self._fp.close()
Alexis S. L. Carvalho
turn util.opener into a class
r4827
Dan Villiom Podlaski Christiansen
util: make atomicfiles closable
r13098 def __del__(self):
Augie Fackler
util: use safehasattr or getattr instead of hasattr
r14968 if safehasattr(self, '_fp'): # constructor actually did something
Greg Ward
atomictempfile: make close() consistent with other file-like objects....
r15057 self.discard()
Dan Villiom Podlaski Christiansen
util: make atomicfiles closable
r13098
Angel Ezquerra
util: add notindexed optional parameter to makedirs function
r18938 def makedirs(name, mode=None, notindexed=False):
Alexis S. L. Carvalho
Make files in .hg inherit the permissions from .hg/store
r6062 """recursive directory creation with parent mode inheritance"""
try:
Angel Ezquerra
util: add notindexed optional parameter to makedirs function
r18938 makedir(name, notindexed)
Gregory Szorc
global: mass rewrite to use modern exception syntax...
r25660 except OSError as err:
Alexis S. L. Carvalho
Make files in .hg inherit the permissions from .hg/store
r6062 if err.errno == errno.EEXIST:
return
Adrian Buehlmann
util: postpone and reorder parent calculation in makedirs
r15058 if err.errno != errno.ENOENT or not name:
raise
parent = os.path.dirname(os.path.abspath(name))
if parent == name:
Alexis S. L. Carvalho
Make files in .hg inherit the permissions from .hg/store
r6062 raise
Angel Ezquerra
util: add notindexed optional parameter to makedirs function
r18938 makedirs(parent, mode, notindexed)
makedir(name, notindexed)
Mads Kiilerich
util.makedirs: propagate chmod exceptions...
r15049 if mode is not None:
os.chmod(name, mode)
Alexis S. L. Carvalho
Make files in .hg inherit the permissions from .hg/store
r6062
FUJIWARA Katsunori
vfs: add "notindexed" argument to invoke "ensuredir" with it in write mode...
r23370 def ensuredirs(name, mode=None, notindexed=False):
"""race-safe recursive directory creation
Newly created directories are marked as "not to be indexed by
the content indexing service", if ``notindexed`` is specified
for "write" mode access.
"""
Bryan O'Sullivan
util: make ensuredirs safer against races
r18678 if os.path.isdir(name):
return
parent = os.path.dirname(os.path.abspath(name))
if parent != name:
FUJIWARA Katsunori
vfs: add "notindexed" argument to invoke "ensuredir" with it in write mode...
r23370 ensuredirs(parent, mode, notindexed)
Bryan O'Sullivan
scmutil: create directories in a race-safe way during update...
r18668 try:
FUJIWARA Katsunori
vfs: add "notindexed" argument to invoke "ensuredir" with it in write mode...
r23370 makedir(name, notindexed)
Gregory Szorc
global: mass rewrite to use modern exception syntax...
r25660 except OSError as err:
Bryan O'Sullivan
scmutil: create directories in a race-safe way during update...
r18668 if err.errno == errno.EEXIST and os.path.isdir(name):
# someone else seems to have won a directory creation race
return
raise
Bryan O'Sullivan
util: make ensuredirs safer against races
r18678 if mode is not None:
os.chmod(name, mode)
Bryan O'Sullivan
scmutil: create directories in a race-safe way during update...
r18668
Dan Villiom Podlaski Christiansen
util: add readfile() & writefile() helper functions...
r14099 def readfile(path):
Bryan O'Sullivan
util: simplify file I/O functions using context managers
r27778 with open(path, 'rb') as fp:
Matt Mackall
util: really drop size from readfile
r14100 return fp.read()
Dan Villiom Podlaski Christiansen
util: add readfile() & writefile() helper functions...
r14099
Dan Villiom Podlaski Christiansen
util & scmutil: adapt read/write helpers as request by mpm
r14167 def writefile(path, text):
Bryan O'Sullivan
util: simplify file I/O functions using context managers
r27778 with open(path, 'wb') as fp:
Dan Villiom Podlaski Christiansen
util & scmutil: adapt read/write helpers as request by mpm
r14167 fp.write(text)
def appendfile(path, text):
Bryan O'Sullivan
util: simplify file I/O functions using context managers
r27778 with open(path, 'ab') as fp:
Dan Villiom Podlaski Christiansen
util: add readfile() & writefile() helper functions...
r14099 fp.write(text)
Eric Hopper
Created a class in util called chunkbuffer that buffers reads from an...
r1199 class chunkbuffer(object):
"""Allow arbitrary sized chunks of data to be efficiently read from an
iterator over chunks of arbitrary size."""
Bryan O'Sullivan
Minor cleanups.
r1200
Matt Mackall
chunkbuffer: removed unused method and arg
r5446 def __init__(self, in_iter):
Eric Hopper
Created a class in util called chunkbuffer that buffers reads from an...
r1199 """in_iter is the iterator that's iterating over the input chunks.
targetsize is how big a buffer to try to maintain."""
Benoit Boissinot
chunkbuffer: split big strings directly in chunkbuffer
r11670 def splitbig(chunks):
for chunk in chunks:
if len(chunk) > 2**20:
pos = 0
while pos < len(chunk):
end = pos + 2 ** 18
yield chunk[pos:end]
pos = end
else:
yield chunk
self.iter = splitbig(in_iter)
Martin von Zweigbergk
util: drop alias for collections.deque...
r25113 self._queue = collections.deque()
Gregory Szorc
util.chunkbuffer: avoid extra mutations when reading partial chunks...
r26480 self._chunkoffset = 0
Bryan O'Sullivan
Minor cleanups.
r1200
Pierre-Yves David
util: support None size in chunkbuffer.read()...
r21018 def read(self, l=None):
Bryan O'Sullivan
Minor cleanups.
r1200 """Read L bytes of data from the iterator of chunks of data.
Pierre-Yves David
util: support None size in chunkbuffer.read()...
r21018 Returns less than L bytes if the iterator runs dry.
Mads Kiilerich
spelling: fixes from proofreading of spell checker issues
r23139 If size parameter is omitted, read everything"""
Gregory Szorc
util.chunkbuffer: special case reading everything...
r26478 if l is None:
return ''.join(self.iter)
Matt Mackall
chunkbuffer: use += rather than cStringIO to reduce memory footprint...
r11758 left = l
Matt Mackall
util: make chunkbuffer non-quadratic on Windows...
r17962 buf = []
Bryan O'Sullivan
util: simplify queue management in chunkbuffer...
r16873 queue = self._queue
Gregory Szorc
util.chunkbuffer: special case reading everything...
r26478 while left > 0:
Matt Mackall
chunkbuffer: use += rather than cStringIO to reduce memory footprint...
r11758 # refill the queue
if not queue:
target = 2**18
for chunk in self.iter:
queue.append(chunk)
target -= len(chunk)
if target <= 0:
break
if not queue:
Eric Hopper
Created a class in util called chunkbuffer that buffers reads from an...
r1199 break
Matt Mackall
chunkbuffer: use += rather than cStringIO to reduce memory footprint...
r11758
Gregory Szorc
util.chunkbuffer: avoid extra mutations when reading partial chunks...
r26480 # The easy way to do this would be to queue.popleft(), modify the
# chunk (if necessary), then queue.appendleft(). However, for cases
# where we read partial chunk content, this incurs 2 dequeue
# mutations and creates a new str for the remaining chunk in the
# queue. Our code below avoids this overhead.
Gregory Szorc
util.chunkbuffer: refactor chunk handling logic...
r26479 chunk = queue[0]
chunkl = len(chunk)
Gregory Szorc
util.chunkbuffer: avoid extra mutations when reading partial chunks...
r26480 offset = self._chunkoffset
Gregory Szorc
util.chunkbuffer: refactor chunk handling logic...
r26479
# Use full chunk.
Gregory Szorc
util.chunkbuffer: avoid extra mutations when reading partial chunks...
r26480 if offset == 0 and left >= chunkl:
Gregory Szorc
util.chunkbuffer: refactor chunk handling logic...
r26479 left -= chunkl
queue.popleft()
buf.append(chunk)
Gregory Szorc
util.chunkbuffer: avoid extra mutations when reading partial chunks...
r26480 # self._chunkoffset remains at 0.
continue
chunkremaining = chunkl - offset
# Use all of unconsumed part of chunk.
if left >= chunkremaining:
left -= chunkremaining
queue.popleft()
# offset == 0 is enabled by block above, so this won't merely
# copy via ``chunk[0:]``.
buf.append(chunk[offset:])
self._chunkoffset = 0
Gregory Szorc
util.chunkbuffer: refactor chunk handling logic...
r26479 # Partial chunk needed.
else:
Gregory Szorc
util.chunkbuffer: avoid extra mutations when reading partial chunks...
r26480 buf.append(chunk[offset:offset + left])
self._chunkoffset += left
left -= chunkremaining
Matt Mackall
chunkbuffer: use += rather than cStringIO to reduce memory footprint...
r11758
Matt Mackall
util: make chunkbuffer non-quadratic on Windows...
r17962 return ''.join(buf)
Matt Mackall
chunkbuffer: use += rather than cStringIO to reduce memory footprint...
r11758
Vadim Gelfer
util: add limit to amount filechunkiter will read
r2462 def filechunkiter(f, size=65536, limit=None):
"""Create a generator that produces the data in the file size
(default 65536) bytes at a time, up to optional limit (default is
to read all data). Chunks may be less than size bytes if the
chunk is the last chunk in the file, or the file is a socket or
some other type of file that sometimes reads less data than is
requested."""
assert size >= 0
assert limit is None or limit >= 0
while True:
Matt Mackall
many, many trivial check-code fixups
r10282 if limit is None:
nbytes = size
else:
nbytes = min(limit, size)
Vadim Gelfer
util: add limit to amount filechunkiter will read
r2462 s = nbytes and f.read(nbytes)
Matt Mackall
many, many trivial check-code fixups
r10282 if not s:
break
if limit:
limit -= len(s)
Eric Hopper
Created a class in util called chunkbuffer that buffers reads from an...
r1199 yield s
Bryan O'Sullivan
Fix up representation of dates in hgweb....
r1320
Bryan O'Sullivan
util: add an optional timestamp parameter to makedate...
r19287 def makedate(timestamp=None):
'''Return a unix timestamp (or the current time) as a (unixtime,
offset) tuple based off the local timezone.'''
if timestamp is None:
timestamp = time.time()
Bryan O'Sullivan
util: rename ct variable in makedate to timestamp
r19286 if timestamp < 0:
Adrian Buehlmann
makedate: abort on negative timestamps (issue2513)...
r13063 hint = _("check your clock")
Bryan O'Sullivan
util: rename ct variable in makedate to timestamp
r19286 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
delta = (datetime.datetime.utcfromtimestamp(timestamp) -
datetime.datetime.fromtimestamp(timestamp))
Dmitry Panov
makedate: wrong timezone offset if DST rules changed this year (issue2511)...
r15505 tz = delta.days * 86400 + delta.seconds
Bryan O'Sullivan
util: rename ct variable in makedate to timestamp
r19286 return timestamp, tz
Bryan O'Sullivan
Allow files to be opened in text mode, even on Windows.
r1329
Matt Mackall
dates: improve timezone handling...
r6229 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
Bryan O'Sullivan
Clean up date and timezone handling....
r1321 """represent a (unixtime, offset) tuple as a localized time.
unixtime is seconds since the epoch, and offset is the time zone's
Vadim Gelfer
add changelog style to command line template....
r1987 number of seconds away from UTC. if timezone is false, do not
append time zone to string."""
Bryan O'Sullivan
Clean up date and timezone handling....
r1321 t, tz = date or makedate()
Adrian Buehlmann
util.datestr: do not crash on revisions with negative timestamp (issue2513)...
r13039 if t < 0:
t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
tz = 0
Matt Mackall
date: allow %z in format (issue4040)
r19989 if "%1" in format or "%2" in format or "%z" in format:
Matt Mackall
dates: improve timezone handling...
r6229 sign = (tz > 0) and "-" or "+"
Alejandro Santos
compat: use // for integer division
r9029 minutes = abs(tz) // 60
Gregory Szorc
util.datestr: use divmod()...
r27066 q, r = divmod(minutes, 60)
Matt Mackall
date: allow %z in format (issue4040)
r19989 format = format.replace("%z", "%1%2")
Gregory Szorc
util.datestr: use divmod()...
r27066 format = format.replace("%1", "%c%02d" % (sign, q))
format = format.replace("%2", "%02d" % r)
Kevin Gessner
util: fix crash converting an invalid future date to string...
r15157 try:
t = time.gmtime(float(t) - tz)
except ValueError:
# time was out of range
t = time.gmtime(sys.maxint)
s = time.strftime(format, t)
Vadim Gelfer
add changelog style to command line template....
r1987 return s
Vadim Gelfer
hgwebdir: export collections of repos...
r1829
Thomas Arendsen Hein
Make annotae/grep print short dates with -q/--quiet....
r6134 def shortdate(date=None):
"""turn (timestamp, tzoff) tuple into iso 8631 date."""
Matt Mackall
dates: improve timezone handling...
r6229 return datestr(date, format='%Y-%m-%d')
Thomas Arendsen Hein
Make annotae/grep print short dates with -q/--quiet....
r6134
Yuya Nishihara
util: extract function that parses timezone string...
r26126 def parsetimezone(tz):
"""parse a timezone string and return an offset integer"""
if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
sign = (tz[0] == "+") and 1 or -1
hours = int(tz[1:3])
minutes = int(tz[3:5])
return -sign * (hours * 60 + minutes) * 60
if tz == "GMT" or tz == "UTC":
return 0
return None
Bryan O'Sullivan
util: add default argument to strdate
r5357 def strdate(string, format, defaults=[]):
Jose M. Prieto
Allow the use of human readable dates (issue 251)
r2522 """parse a localized time string and return a (unixtime, offset) tuple.
if the string cannot be parsed, ValueError is raised."""
Jose M. Prieto
util.strdate: compute timestamp using UTC, not local timezone
r3255 # NOTE: unixtime = localunixtime + offset
Yuya Nishihara
util: extract function that parses timezone string...
r26126 offset, date = parsetimezone(string.split()[-1]), string
Martin Geisler
code style: prefer 'is' and 'is not' tests with singletons
r13031 if offset is not None:
Matt Mackall
parsedate: add UTC and GMT timezones
r3809 date = " ".join(string.split()[:-1])
Matt Mackall
improve date parsing for numerous new date formats...
r3808
Matt Mackall
Add date matching support...
r3812 # add missing elements from defaults
Matt Mackall
date: fix matching of underspecified date ranges...
r13212 usenow = False # default to using biased defaults
for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
Matt Mackall
Add date matching support...
r3812 found = [True for p in part if ("%"+p) in format]
if not found:
Matt Mackall
date: fix matching of underspecified date ranges...
r13212 date += "@" + defaults[part][usenow]
Matt Mackall
Add date matching support...
r3812 format += "@%" + part[0]
Matt Mackall
date: fix matching of underspecified date ranges...
r13212 else:
# We've found a specific time element, less specific time
# elements are relative to today
usenow = True
Matt Mackall
improve date parsing for numerous new date formats...
r3808
Jose M. Prieto
util.strdate: assume local time when no timezone specified
r3256 timetuple = time.strptime(date, format)
localunixtime = int(calendar.timegm(timetuple))
if offset is None:
# local timezone
unixtime = int(time.mktime(timetuple))
offset = unixtime - localunixtime
else:
unixtime = localunixtime + offset
Jose M. Prieto
util.strdate: compute timestamp using UTC, not local timezone
r3255 return unixtime, offset
Jose M. Prieto
Allow the use of human readable dates (issue 251)
r2522
Siddharth Agarwal
util: avoid mutable default arguments...
r26311 def parsedate(date, formats=None, bias=None):
Matt Mackall
date: fix matching of underspecified date ranges...
r13212 """parse a localized date/time and return a (unixtime, offset) tuple.
Thomas Arendsen Hein
Fix bad behaviour when specifying an invalid date (issue700)...
r6139
Jose M. Prieto
Allow the use of human readable dates (issue 251)
r2522 The date may be a "unixtime offset" string or in one of the specified
Thomas Arendsen Hein
Fix bad behaviour when specifying an invalid date (issue700)...
r6139 formats. If the date already is a (unixtime, offset) tuple, it is returned.
Paul Cavallaro
dates: support 'today' and 'yesterday' in parsedate (issue3764)...
r18537
>>> parsedate(' today ') == parsedate(\
datetime.date.today().strftime('%b %d'))
True
>>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
datetime.timedelta(days=1)\
).strftime('%b %d'))
True
Augie Fackler
parsedate: understand "now" as a shortcut for the current time
r18614 >>> now, tz = makedate()
>>> strnow, strtz = parsedate('now')
>>> (strnow - now) < 1
True
>>> tz == strtz
True
Thomas Arendsen Hein
Fix bad behaviour when specifying an invalid date (issue700)...
r6139 """
Siddharth Agarwal
util: avoid mutable default arguments...
r26311 if bias is None:
bias = {}
Thomas Arendsen Hein
Fix bad behaviour when specifying an invalid date (issue700)...
r6139 if not date:
Matt Mackall
parsedate: allow '' for epoch
r3807 return 0, 0
Matt Mackall
dates: Fix bare times to be relative to "today"
r6230 if isinstance(date, tuple) and len(date) == 2:
Thomas Arendsen Hein
Fix bad behaviour when specifying an invalid date (issue700)...
r6139 return date
Chris Mason
util.parsedate should understand dates from hg export
r2609 if not formats:
formats = defaultdateformats
Thomas Arendsen Hein
Fix bad behaviour when specifying an invalid date (issue700)...
r6139 date = date.strip()
Paul Cavallaro
dates: support 'today' and 'yesterday' in parsedate (issue3764)...
r18537
André Klitzing
util: accept "now, today, yesterday" for dates even the locale is not english...
r24188 if date == 'now' or date == _('now'):
Augie Fackler
parsedate: understand "now" as a shortcut for the current time
r18614 return makedate()
André Klitzing
util: accept "now, today, yesterday" for dates even the locale is not english...
r24188 if date == 'today' or date == _('today'):
Paul Cavallaro
dates: support 'today' and 'yesterday' in parsedate (issue3764)...
r18537 date = datetime.date.today().strftime('%b %d')
André Klitzing
util: accept "now, today, yesterday" for dates even the locale is not english...
r24188 elif date == 'yesterday' or date == _('yesterday'):
Paul Cavallaro
dates: support 'today' and 'yesterday' in parsedate (issue3764)...
r18537 date = (datetime.date.today() -
datetime.timedelta(days=1)).strftime('%b %d')
Jose M. Prieto
Allow the use of human readable dates (issue 251)
r2522 try:
Thomas Arendsen Hein
Fix bad behaviour when specifying an invalid date (issue700)...
r6139 when, offset = map(int, date.split(' '))
Benoit Boissinot
validate the resulting date in parsedate
r2523 except ValueError:
Matt Mackall
Add date matching support...
r3812 # fill out defaults
now = makedate()
Matt Mackall
date: fix matching of underspecified date ranges...
r13212 defaults = {}
David Soria Parra
avoid .split() in for loops and use tuples instead...
r13200 for part in ("d", "mb", "yY", "HI", "M", "S"):
Matt Mackall
date: fix matching of underspecified date ranges...
r13212 # this piece is for rounding the specific end of unknowns
b = bias.get(part)
if b is None:
Matt Mackall
Add date matching support...
r3812 if part[0] in "HMS":
Matt Mackall
date: fix matching of underspecified date ranges...
r13212 b = "00"
Matt Mackall
Add date matching support...
r3812 else:
Matt Mackall
date: fix matching of underspecified date ranges...
r13212 b = "0"
# this piece is for matching the generic end to today's date
n = datestr(now, "%" + part[0])
defaults[part] = (b, n)
Matt Mackall
Add date matching support...
r3812
Benoit Boissinot
validate the resulting date in parsedate
r2523 for format in formats:
try:
Thomas Arendsen Hein
Fix bad behaviour when specifying an invalid date (issue700)...
r6139 when, offset = strdate(date, format, defaults)
Dirkjan Ochtman
better handle errors with date parsing (issue983)...
r6087 except (ValueError, OverflowError):
Benoit Boissinot
validate the resulting date in parsedate
r2523 pass
else:
break
else:
Nicolas Dumazet
util: get rid of extra trailing whitespace in parsedate abort message
r12105 raise Abort(_('invalid date: %r') % date)
Benoit Boissinot
validate the resulting date in parsedate
r2523 # validate explicit (probably user-specified) date and
# time zone offset. values must fit in signed 32 bits for
# current 32-bit linux runtimes. timezones go from UTC-12
# to UTC+14
if abs(when) > 0x7fffffff:
Matt Mackall
parsedate: use Abort rather than ValueError
r3806 raise Abort(_('date exceeds 32 bits: %d') % when)
Adrian Buehlmann
parsedate: abort on negative dates (issue2513)...
r13062 if when < 0:
raise Abort(_('negative date value: %d') % when)
Benoit Boissinot
validate the resulting date in parsedate
r2523 if offset < -50400 or offset > 43200:
Matt Mackall
parsedate: use Abort rather than ValueError
r3806 raise Abort(_('impossible time zone offset: %d') % offset)
Benoit Boissinot
validate the resulting date in parsedate
r2523 return when, offset
Jose M. Prieto
Allow the use of human readable dates (issue 251)
r2522
Matt Mackall
Add date matching support...
r3812 def matchdate(date):
"""Return a function that matches a given date match specifier
Formats include:
'{date}' match a given date to the accuracy provided
'<{date}' on or before a given date
'>{date}' on or after a given date
Matt Mackall
date: fix matching of underspecified date ranges...
r13212 >>> p1 = parsedate("10:29:59")
>>> p2 = parsedate("10:30:00")
>>> p3 = parsedate("10:30:59")
>>> p4 = parsedate("10:31:00")
>>> p5 = parsedate("Sep 15 10:30:00 1999")
>>> f = matchdate("10:30")
>>> f(p1[0])
False
>>> f(p2[0])
True
>>> f(p3[0])
True
>>> f(p4[0])
False
>>> f(p5[0])
False
Matt Mackall
Add date matching support...
r3812 """
def lower(date):
Augie Fackler
util: move from dict() construction to {} literals...
r20679 d = {'mb': "1", 'd': "1"}
Matt Mackall
dates: Fix bare times to be relative to "today"
r6230 return parsedate(date, extendeddateformats, d)[0]
Matt Mackall
Add date matching support...
r3812
def upper(date):
Augie Fackler
util: move from dict() construction to {} literals...
r20679 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
David Soria Parra
avoid .split() in for loops and use tuples instead...
r13200 for days in ("31", "30", "29"):
Matt Mackall
Add date matching support...
r3812 try:
d["d"] = days
return parsedate(date, extendeddateformats, d)[0]
Brodie Rao
cleanup: replace naked excepts with more specific ones
r16688 except Abort:
Matt Mackall
Add date matching support...
r3812 pass
d["d"] = "28"
return parsedate(date, extendeddateformats, d)[0]
Justin Peng
Correct a bug on date formats with '>' or '<' accompanied by space characters.
r7953 date = date.strip()
Idan Kamara
util: dates cannot consist entirely of whitespace (issue2732)
r13780
if not date:
raise Abort(_("dates cannot consist entirely of whitespace"))
elif date[0] == "<":
Matt Mackall
date: fixup breakage from ">" fix
r13869 if not date[1:]:
Martin Geisler
help/dates: use DATE as place-holder in help and abort texts...
r13886 raise Abort(_("invalid day spec, use '<DATE'"))
Matt Mackall
Add date matching support...
r3812 when = upper(date[1:])
return lambda x: x <= when
elif date[0] == ">":
Matt Mackall
date: fixup breakage from ">" fix
r13869 if not date[1:]:
Martin Geisler
help/dates: use DATE as place-holder in help and abort texts...
r13886 raise Abort(_("invalid day spec, use '>DATE'"))
Matt Mackall
Add date matching support...
r3812 when = lower(date[1:])
return lambda x: x >= when
elif date[0] == "-":
try:
days = int(date[1:])
except ValueError:
raise Abort(_("invalid day spec: %s") % date[1:])
Yun Lee
util: make 'hg log -d --2' abort (issue2734)
r13889 if days < 0:
Wagner Bruna
messages: quote "hg help" hints consistently
r23917 raise Abort(_('%s must be nonnegative (see "hg help dates")')
Yun Lee
util: make 'hg log -d --2' abort (issue2734)
r13889 % date[1:])
Matt Mackall
Add date matching support...
r3812 when = makedate()[0] - days * 3600 * 24
Matt Mackall
Add --date support to log...
r3813 return lambda x: x >= when
Matt Mackall
Add date matching support...
r3812 elif " to " in date:
a, b = date.split(" to ")
start, stop = lower(a), upper(b)
return lambda x: x >= start and x <= stop
else:
start, stop = lower(date), upper(date)
return lambda x: x >= start and x <= stop
Matt Harbison
util: extract stringmatcher() from revset...
r26481 def stringmatcher(pattern):
"""
accepts a string, possibly starting with 're:' or 'literal:' prefix.
returns the matcher name, pattern, and matcher function.
missing or unknown prefixes are treated as literal matches.
helper for tests:
>>> def test(pattern, *tests):
... kind, pattern, matcher = stringmatcher(pattern)
... return (kind, pattern, [bool(matcher(t)) for t in tests])
exact matching (no prefix):
>>> test('abcdefg', 'abc', 'def', 'abcdefg')
('literal', 'abcdefg', [False, False, True])
regex matching ('re:' prefix)
>>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
('re', 'a.+b', [False, False, True])
force exact matches ('literal:' prefix)
>>> test('literal:re:foobar', 'foobar', 're:foobar')
('literal', 're:foobar', [False, True])
unknown prefixes are ignored and treated as literals
>>> test('foo:bar', 'foo', 'bar', 'foo:bar')
('literal', 'foo:bar', [False, False, True])
"""
if pattern.startswith('re:'):
pattern = pattern[3:]
try:
regex = remod.compile(pattern)
except remod.error as e:
raise error.ParseError(_('invalid regular expression: %s')
% e)
return 're', pattern, regex.search
elif pattern.startswith('literal:'):
pattern = pattern[8:]
return 'literal', pattern, pattern.__eq__
Vadim Gelfer
move shortuser into util module.
r1903 def shortuser(user):
"""Return a short representation of a user name or email address."""
f = user.find('@')
if f >= 0:
user = user[:f]
f = user.find('<')
if f >= 0:
Matt Mackall
many, many trivial check-code fixups
r10282 user = user[f + 1:]
Thomas Arendsen Hein
shortuser should stop before the first space character....
r3176 f = user.find(' ')
if f >= 0:
user = user[:f]
Matt Mackall
shortname: truncate at '.' too
r3533 f = user.find('.')
if f >= 0:
user = user[:f]
Vadim Gelfer
move shortuser into util module.
r1903 return user
Vadim Gelfer
merge with crew.
r1920
Matteo Capobianco
templates/filters: extracting the user portion of an email address...
r16360 def emailuser(user):
"""Return the user portion of an email address."""
f = user.find('@')
if f >= 0:
user = user[:f]
f = user.find('<')
if f >= 0:
user = user[f + 1:]
return user
Matt Mackall
templater: move email function to util
r5975 def email(author):
'''get email of author.'''
r = author.find('>')
Matt Mackall
many, many trivial check-code fixups
r10282 if r == -1:
r = None
return author[author.find('<') + 1:r]
Matt Mackall
templater: move email function to util
r5975
Thomas Arendsen Hein
Move ellipsis code to util.ellipsis() and improve maxlength handling.
r3767 def ellipsis(text, maxlength=400):
FUJIWARA Katsunori
util: replace 'ellipsis' implementation by 'encoding.trim'...
r21857 """Trim string to at most maxlength (default: 400) columns in display."""
return encoding.trim(text, maxlength, ellipsis='...')
Thomas Arendsen Hein
Move ellipsis code to util.ellipsis() and improve maxlength handling.
r3767
Bryan O'Sullivan
util: generalize bytecount to unitcountfn...
r18735 def unitcountfn(*unittable):
'''return a function that renders a readable count of some quantity'''
def go(count):
for multiplier, divisor, format in unittable:
if count >= divisor * multiplier:
return format % (count / float(divisor))
return unittable[-1][2] % count
return go
bytecount = unitcountfn(
Matt Mackall
util: create bytecount array just once...
r16397 (100, 1 << 30, _('%.0f GB')),
(10, 1 << 30, _('%.1f GB')),
(1, 1 << 30, _('%.2f GB')),
(100, 1 << 20, _('%.0f MB')),
(10, 1 << 20, _('%.1f MB')),
(1, 1 << 20, _('%.2f MB')),
(100, 1 << 10, _('%.0f KB')),
(10, 1 << 10, _('%.1f KB')),
(1, 1 << 10, _('%.2f KB')),
(1, 1, _('%.0f bytes')),
)
Patrick Mezard
extdiff: avoid repr() doubling paths backslashes under Windows
r5291 def uirepr(s):
# Avoid double backslash in Windows path repr()
return repr(s).replace('\\\\', '\\')
Alexander Solovyov
python implementation of diffstat...
r7547
Matt Mackall
util: delay loading of textwrap
r13316 # delay import of textwrap
def MBTextWrapper(**kwargs):
class tw(textwrap.TextWrapper):
"""
FUJIWARA Katsunori
i18n: calculate terminal columns by width information of each characters...
r15066 Extend TextWrapper for width-awareness.
Neither number of 'bytes' in any encoding nor 'characters' is
appropriate to calculate terminal columns for specified string.
Nicolas Dumazet
util: clarify purpose of MBTextWrapper class...
r12957
FUJIWARA Katsunori
i18n: calculate terminal columns by width information of each characters...
r15066 Original TextWrapper implementation uses built-in 'len()' directly,
so overriding is needed to use width information of each characters.
Nicolas Dumazet
util: clarify purpose of MBTextWrapper class...
r12957
FUJIWARA Katsunori
i18n: calculate terminal columns by width information of each characters...
r15066 In addition, characters classified into 'ambiguous' width are
Mads Kiilerich
fix trivial spelling errors
r17424 treated as wide in East Asian area, but as narrow in other.
FUJIWARA Katsunori
i18n: calculate terminal columns by width information of each characters...
r15066
This requires use decision to determine width of such characters.
Matt Mackall
util: delay loading of textwrap
r13316 """
Mads Kiilerich
util: wrap lines with multi-byte characters correctly (issue2943)...
r15065 def _cutdown(self, ucstr, space_left):
Matt Mackall
util: delay loading of textwrap
r13316 l = 0
FUJIWARA Katsunori
i18n: calculate terminal columns by width information of each characters...
r15066 colwidth = encoding.ucolwidth
Matt Mackall
util: delay loading of textwrap
r13316 for i in xrange(len(ucstr)):
FUJIWARA Katsunori
i18n: calculate terminal columns by width information of each characters...
r15066 l += colwidth(ucstr[i])
Matt Mackall
util: delay loading of textwrap
r13316 if space_left < l:
Mads Kiilerich
util: wrap lines with multi-byte characters correctly (issue2943)...
r15065 return (ucstr[:i], ucstr[i:])
return ucstr, ''
FUJIWARA Katsunori
replace Python standard textwrap by MBCS sensitive one for i18n text...
r11297
Matt Mackall
util: delay loading of textwrap
r13316 # overriding of base class
def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
space_left = max(width - cur_len, 1)
FUJIWARA Katsunori
replace Python standard textwrap by MBCS sensitive one for i18n text...
r11297
Matt Mackall
util: delay loading of textwrap
r13316 if self.break_long_words:
cut, res = self._cutdown(reversed_chunks[-1], space_left)
cur_line.append(cut)
reversed_chunks[-1] = res
elif not cur_line:
cur_line.append(reversed_chunks.pop())
FUJIWARA Katsunori
replace Python standard textwrap by MBCS sensitive one for i18n text...
r11297
timeless@mozdev.org
util: capitalize Python in MBTextWrapper._wrap_chunks comment
r26201 # this overriding code is imported from TextWrapper of Python 2.6
FUJIWARA Katsunori
i18n: calculate terminal columns by width information of each characters...
r15066 # to calculate columns of string by 'encoding.ucolwidth()'
def _wrap_chunks(self, chunks):
colwidth = encoding.ucolwidth
lines = []
if self.width <= 0:
raise ValueError("invalid width %r (must be > 0)" % self.width)
# Arrange in reverse order so items can be efficiently popped
# from a stack of chucks.
chunks.reverse()
while chunks:
# Start the list of chunks that will make up the current line.
# cur_len is just the length of all the chunks in cur_line.
cur_line = []
cur_len = 0
# Figure out which static string will prefix this line.
if lines:
indent = self.subsequent_indent
else:
indent = self.initial_indent
# Maximum width for this line.
width = self.width - len(indent)
# First chunk on line is whitespace -- drop it, unless this
Mads Kiilerich
fix trivial spelling errors
r17424 # is the very beginning of the text (i.e. no lines started yet).
FUJIWARA Katsunori
i18n: calculate terminal columns by width information of each characters...
r15066 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
del chunks[-1]
while chunks:
l = colwidth(chunks[-1])
# Can at least squeeze this chunk onto the current line.
if cur_len + l <= width:
cur_line.append(chunks.pop())
cur_len += l
# Nope, this line is full.
else:
break
# The current line is full, and the next chunk is too big to
# fit on *any* line (not just this one).
if chunks and colwidth(chunks[-1]) > width:
self._handle_long_word(chunks, cur_line, cur_len, width)
# If the last chunk on this line is all whitespace, drop it.
if (self.drop_whitespace and
cur_line and cur_line[-1].strip() == ''):
del cur_line[-1]
# Convert current line back to a string and store it in list
# of all lines (return value).
if cur_line:
lines.append(indent + ''.join(cur_line))
return lines
Matt Mackall
util: delay loading of textwrap
r13316 global MBTextWrapper
MBTextWrapper = tw
return tw(**kwargs)
FUJIWARA Katsunori
replace Python standard textwrap by MBCS sensitive one for i18n text...
r11297
Matt Mackall
util: make wrap() require a width argument...
r12698 def wrap(line, width, initindent='', hangindent=''):
FUJIWARA Katsunori
replace Python standard textwrap by MBCS sensitive one for i18n text...
r11297 maxindent = max(len(hangindent), len(initindent))
if width <= maxindent:
Martin Geisler
util, minirst: do not crash with COLUMNS=0
r9417 # adjust for weird terminal size
FUJIWARA Katsunori
replace Python standard textwrap by MBCS sensitive one for i18n text...
r11297 width = max(78, maxindent + 1)
Mads Kiilerich
util: wrap lines with multi-byte characters correctly (issue2943)...
r15065 line = line.decode(encoding.encoding, encoding.encodingmode)
initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
FUJIWARA Katsunori
replace Python standard textwrap by MBCS sensitive one for i18n text...
r11297 wrapper = MBTextWrapper(width=width,
initial_indent=initindent,
subsequent_indent=hangindent)
Mads Kiilerich
util: wrap lines with multi-byte characters correctly (issue2943)...
r15065 return wrapper.fill(line).encode(encoding.encoding)
Martin Geisler
commands: wrap short descriptions in 'hg help'...
r8938
Alexander Solovyov <piranha at piranha.org.ua>
templater: ability to display diffstat for log-like commands
r7879 def iterlines(iterator):
for chunk in iterator:
for line in chunk.splitlines():
yield line
Alexander Solovyov
make path expanding more consistent...
r9610
def expandpath(path):
return os.path.expanduser(os.path.expandvars(path))
Patrick Mezard
Find right hg command for detached process...
r10239
def hgcmd():
"""Return the command used to execute current hg
This is different from hgexecutable() because on Windows we want
to avoid things opening new shell windows like batch files, so we
get either the python call or current executable.
"""
Adrian Buehlmann
rename util.main_is_frozen to mainfrozen
r14228 if mainfrozen():
Matt Harbison
util: adjust hgcmd() to handle frozen Mercurial on OS X...
r27766 if getattr(sys, 'frozen', None) == 'macosx_app':
# Env variable set by py2app
return [os.environ['EXECUTABLEPATH']]
else:
return [sys.executable]
Patrick Mezard
Find right hg command for detached process...
r10239 return gethgcmd()
Patrick Mezard
util: make spawndetached() handle subprocess early terminations...
r10344
def rundetached(args, condfn):
"""Execute the argument list in a detached process.
Augie Fackler
util: fix trailing whitespace found by check-code
r10422
Patrick Mezard
util: make spawndetached() handle subprocess early terminations...
r10344 condfn is a callable which is called repeatedly and should return
True once the child process is known to have started successfully.
At this point, the child process PID is returned. If the child
process fails to start or finishes before condfn() evaluates to
True, return -1.
"""
# Windows case is easier because the child process is either
# successfully starting and validating the condition or exiting
# on failure. We just poll on its PID. On Unix, if the child
# process fails to start, it will be left in a zombie state until
# the parent wait on it, which we cannot do since we expect a long
# running process on success. Instead we listen for SIGCHLD telling
# us our child process terminated.
terminated = set()
def handler(signum, frame):
terminated.add(os.wait())
prevhandler = None
Augie Fackler
util: use safehasattr or getattr instead of hasattr
r14968 SIGCHLD = getattr(signal, 'SIGCHLD', None)
if SIGCHLD is not None:
prevhandler = signal.signal(SIGCHLD, handler)
Patrick Mezard
util: make spawndetached() handle subprocess early terminations...
r10344 try:
pid = spawndetached(args)
while not condfn():
if ((pid in terminated or not testpid(pid))
and not condfn()):
return -1
time.sleep(0.1)
return pid
finally:
if prevhandler is not None:
signal.signal(signal.SIGCHLD, prevhandler)
Steve Losh
util: add any() and all() functions for Python 2.4 compatibility...
r10438
Roman Sokolov
dispatch: support for $ escaping in shell-alias definition...
r13392 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
Steve Losh
util: add an interpolate() function to for replacing multiple values...
r11988 """Return the result of interpolating items in the mapping into string s.
prefix is a single character string, or a two character string with
a backslash as the first character if the prefix needs to be escaped in
a regular expression.
fn is an optional function that will be applied to the replacement text
just before replacement.
Roman Sokolov
dispatch: support for $ escaping in shell-alias definition...
r13392
escape_prefix is an optional flag that allows using doubled prefix for
its escaping.
Steve Losh
util: add an interpolate() function to for replacing multiple values...
r11988 """
fn = fn or (lambda s: s)
Roman Sokolov
dispatch: support for $ escaping in shell-alias definition...
r13392 patterns = '|'.join(mapping.keys())
if escape_prefix:
patterns += '|' + prefix
if len(prefix) > 1:
prefix_char = prefix[1:]
else:
prefix_char = prefix
mapping[prefix_char] = prefix_char
Siddharth Agarwal
util: rename 're' to 'remod'...
r21907 r = remod.compile(r'%s(%s)' % (prefix, patterns))
Steve Losh
util: add an interpolate() function to for replacing multiple values...
r11988 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
Brodie Rao
mail/hgweb: support service names for ports (issue2350)...
r12076 def getport(port):
"""Return the port for a given network service.
If port is an integer, it's returned as is. If it's a string, it's
looked up using socket.getservbyname(). If there's no matching
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 service, error.Abort is raised.
Brodie Rao
mail/hgweb: support service names for ports (issue2350)...
r12076 """
try:
return int(port)
except ValueError:
pass
try:
return socket.getservbyname(port)
except socket.error:
raise Abort(_("no port number associated with service '%s'") % port)
Augie Fackler
parsebool: create new function and use it for config parsing
r12087
Augie Fackler
parsebool: accept always as true and never as false
r12088 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
'0': False, 'no': False, 'false': False, 'off': False,
'never': False}
Augie Fackler
parsebool: create new function and use it for config parsing
r12087
def parsebool(s):
"""Parse s into a boolean.
If s is not a valid boolean, returns None.
"""
return _booleans.get(s.lower(), None)
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076
Brodie Rao
util.url: copy urllib.unquote() into util to improve startup times...
r14077 _hexdig = '0123456789ABCDEFabcdef'
_hextochr = dict((a + b, chr(int(a + b, 16)))
for a in _hexdig for b in _hexdig)
def _urlunquote(s):
Mads Kiilerich
fix wording and not-completely-trivial spelling errors and bad docstrings
r17425 """Decode HTTP/HTML % encoding.
>>> _urlunquote('abc%20def')
'abc def'
"""
Brodie Rao
util.url: copy urllib.unquote() into util to improve startup times...
r14077 res = s.split('%')
# fastpath
if len(res) == 1:
return s
s = res[0]
for item in res[1:]:
try:
s += _hextochr[item[:2]] + item[2:]
except KeyError:
s += '%' + item
except UnicodeDecodeError:
s += unichr(int(item[:2], 16)) + item[2:]
return s
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076 class url(object):
Mads Kiilerich
tests: use raw string for url tests of '\' handling
r14146 r"""Reliable URL parser.
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076
This parses URLs and provides attributes for the following
components:
<scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
Missing components are set to None. The only exception is
fragment, which is set to '' if present but empty.
If parsefragment is False, fragment is included in query. If
parsequery is False, query is included in path. If both are
False, both fragment and query are included in path.
See http://www.ietf.org/rfc/rfc2396.txt for more information.
Note that for backward compatibility reasons, bundle URLs do not
take host names. That means 'bundle://../' has a path of '../'.
Examples:
>>> url('http://www.ietf.org/rfc/rfc2396.txt')
<url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
>>> url('ssh://[::1]:2200//home/joe/repo')
<url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
>>> url('file:///home/joe/repo')
<url scheme: 'file', path: '/home/joe/repo'>
Matt Mackall
url: handle urls of the form file:///c:/foo/bar/ correctly
r14915 >>> url('file:///c:/temp/foo/')
<url scheme: 'file', path: 'c:/temp/foo/'>
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076 >>> url('bundle:foo')
<url scheme: 'bundle', path: 'foo'>
>>> url('bundle://../foo')
<url scheme: 'bundle', path: '../foo'>
Mads Kiilerich
tests: use raw string for url tests of '\' handling
r14146 >>> url(r'c:\foo\bar')
<url path: 'c:\\foo\\bar'>
Matt Mackall
url: catch UNC paths as yet another Windows special case (issue2808)
r14699 >>> url(r'\\blah\blah\blah')
<url path: '\\\\blah\\blah\\blah'>
Matt Mackall
url: parse fragments first (issue2997)
r15074 >>> url(r'\\blah\blah\blah#baz')
<url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
Simon Heimberg
util: url keeps backslash in paths...
r20106 >>> url(r'file:///C:\users\me')
<url scheme: 'file', path: 'C:\\users\\me'>
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076
Authentication credentials:
>>> url('ssh://joe:xyz@x/repo')
<url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
>>> url('ssh://joe@x/repo')
<url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
Query strings and fragments:
>>> url('http://host/a?b#c')
<url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
>>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
<url scheme: 'http', host: 'host', path: 'a?b#c'>
"""
_safechars = "!~*'()+"
Simon Heimberg
util: url keeps backslash in paths...
r20106 _safepchars = "/!~*'()+:\\"
Siddharth Agarwal
util: rename 're' to 'remod'...
r21907 _matchscheme = remod.compile(r'^[a-zA-Z0-9+.\-]+:').match
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076
def __init__(self, path, parsequery=True, parsefragment=True):
# We slowly chomp away at path until we have only the path left
self.scheme = self.user = self.passwd = self.host = None
self.port = self.path = self.query = self.fragment = None
self._localpath = True
self._hostport = ''
self._origpath = path
Matt Mackall
url: parse fragments first (issue2997)
r15074 if parsefragment and '#' in path:
path, self.fragment = path.split('#', 1)
if not path:
path = None
Matt Mackall
url: catch UNC paths as yet another Windows special case (issue2808)
r14699 # special case for Windows drive letters and UNC paths
if hasdriveletter(path) or path.startswith(r'\\'):
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076 self.path = path
return
# For compatibility reasons, we can't handle bundle paths as
# normal URLS
if path.startswith('bundle:'):
self.scheme = 'bundle'
path = path[7:]
if path.startswith('//'):
path = path[2:]
self.path = path
return
if self._matchscheme(path):
parts = path.split(':', 1)
if parts[0]:
self.scheme, path = parts
self._localpath = False
if not path:
path = None
if self._localpath:
self.path = ''
return
else:
if self._localpath:
self.path = path
return
if parsequery and '?' in path:
path, self.query = path.split('?', 1)
if not path:
path = None
if not self.query:
self.query = None
# // is required to specify a host/authority
if path and path.startswith('//'):
parts = path[2:].split('/', 1)
if len(parts) > 1:
self.host, path = parts
else:
self.host = parts[0]
path = None
if not self.host:
self.host = None
Mads Kiilerich
url: really handle urls of the form file:///c:/foo/bar/ correctly...
r15018 # path of file:///d is /d
# path of file:///d:/ is d:/, not /d:/
Matt Mackall
url: handle urls of the form file:///c:/foo/bar/ correctly
r14915 if path and not hasdriveletter(path):
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076 path = '/' + path
if self.host and '@' in self.host:
self.user, self.host = self.host.rsplit('@', 1)
if ':' in self.user:
self.user, self.passwd = self.user.split(':', 1)
if not self.host:
self.host = None
# Don't split on colons in IPv6 addresses without ports
if (self.host and ':' in self.host and
not (self.host.startswith('[') and self.host.endswith(']'))):
self._hostport = self.host
self.host, self.port = self.host.rsplit(':', 1)
if not self.host:
self.host = None
if (self.host and self.scheme == 'file' and
self.host not in ('localhost', '127.0.0.1', '[::1]')):
raise Abort(_('file:// URLs can only refer to localhost'))
self.path = path
Benoit Boissinot
url: store and assume the query part of an url is in escaped form (issue2921)
r14988 # leave the query string escaped
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076 for a in ('user', 'passwd', 'host', 'port',
Benoit Boissinot
url: store and assume the query part of an url is in escaped form (issue2921)
r14988 'path', 'fragment'):
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076 v = getattr(self, a)
if v is not None:
Brodie Rao
util.url: copy urllib.unquote() into util to improve startup times...
r14077 setattr(self, a, _urlunquote(v))
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076
def __repr__(self):
attrs = []
for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
'query', 'fragment'):
v = getattr(self, a)
if v is not None:
attrs.append('%s: %r' % (a, v))
return '<url %s>' % ', '.join(attrs)
def __str__(self):
Mads Kiilerich
test: test that backslash is preserved by the url class
r14147 r"""Join the URL's components back into a URL string.
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076
Examples:
Mads Kiilerich
util: don't encode ':' in url paths...
r15452 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
Benoit Boissinot
url: store and assume the query part of an url is in escaped form (issue2921)
r14988 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
'http://user:pw@host:80/?foo=bar&baz=42'
>>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
'http://user:pw@host:80/?foo=bar%3dbaz'
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
'ssh://user:pw@[::1]:2200//home/joe#'
>>> str(url('http://localhost:80//'))
'http://localhost:80//'
>>> str(url('http://localhost:80/'))
'http://localhost:80/'
>>> str(url('http://localhost:80'))
'http://localhost:80/'
>>> str(url('bundle:foo'))
'bundle:foo'
>>> str(url('bundle://../foo'))
'bundle:../foo'
>>> str(url('path'))
'path'
Peter Arrenbrecht
util: make str(url) return file:/// for abs paths again...
r14313 >>> str(url('file:///tmp/foo/bar'))
'file:///tmp/foo/bar'
Patrick Mezard
util: fix url.__str__() for windows file URLs...
r15609 >>> str(url('file:///c:/tmp/foo/bar'))
Matt Mackall
merge with stable
r15611 'file:///c:/tmp/foo/bar'
Mads Kiilerich
test: test that backslash is preserved by the url class
r14147 >>> print url(r'bundle:foo\bar')
bundle:foo\bar
Simon Heimberg
util: url keeps backslash in paths...
r20106 >>> print url(r'file:///D:\data\hg')
file:///D:\data\hg
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076 """
if self._localpath:
s = self.path
if self.scheme == 'bundle':
s = 'bundle:' + s
if self.fragment:
s += '#' + self.fragment
return s
s = self.scheme + ':'
Peter Arrenbrecht
util: make str(url) return file:/// for abs paths again...
r14313 if self.user or self.passwd or self.host:
s += '//'
Patrick Mezard
util: fix url.__str__() for windows file URLs...
r15609 elif self.scheme and (not self.path or self.path.startswith('/')
or hasdriveletter(self.path)):
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076 s += '//'
Patrick Mezard
util: fix url.__str__() for windows file URLs...
r15609 if hasdriveletter(self.path):
s += '/'
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076 if self.user:
s += urllib.quote(self.user, safe=self._safechars)
if self.passwd:
s += ':' + urllib.quote(self.passwd, safe=self._safechars)
if self.user or self.passwd:
s += '@'
if self.host:
if not (self.host.startswith('[') and self.host.endswith(']')):
s += urllib.quote(self.host)
else:
s += self.host
if self.port:
s += ':' + urllib.quote(self.port)
if self.host:
s += '/'
if self.path:
Benoit Boissinot
url: store and assume the query part of an url is in escaped form (issue2921)
r14988 # TODO: similar to the query string, we should not unescape the
# path when we store it, the path might contain '%2f' = '/',
# which we should *not* escape.
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076 s += urllib.quote(self.path, safe=self._safepchars)
if self.query:
Benoit Boissinot
url: store and assume the query part of an url is in escaped form (issue2921)
r14988 # we store the query in escaped form.
s += '?' + self.query
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076 if self.fragment is not None:
s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
return s
def authinfo(self):
user, passwd = self.user, self.passwd
try:
self.user, self.passwd = None, None
s = str(self)
finally:
self.user, self.passwd = user, passwd
if not self.user:
return (s, None)
Patrick Mezard
http: explain why the host is passed to urllib2 password manager...
r15028 # authinfo[1] is passed to urllib2 password manager, and its
# URIs must not contain credentials. The host is passed in the
# URIs list because Python < 2.4.3 uses only that to search for
# a password.
Patrick Mezard
http: strip credentials from urllib2 manager URIs (issue2885)...
r15024 return (s, (None, (s, self.host),
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076 self.user, self.passwd or ''))
Matt Mackall
subrepos: be smarter about what's an absolute path (issue2808)
r14766 def isabs(self):
if self.scheme and self.scheme != 'file':
return True # remote URL
if hasdriveletter(self.path):
return True # absolute for our purposes - can't be joined()
if self.path.startswith(r'\\'):
return True # Windows UNC path
if self.path.startswith('/'):
return True # POSIX-style
return False
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076 def localpath(self):
if self.scheme == 'file' or self.scheme == 'bundle':
path = self.path or '/'
# For Windows, we need to promote hosts containing drive
# letters to paths with drive letters.
if hasdriveletter(self._hostport):
path = self._hostport + '/' + self.path
Mads Kiilerich
url: handle file://localhost/c:/foo "correctly"...
r15496 elif (self.host is not None and self.path
and not hasdriveletter(path)):
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076 path = '/' + path
return path
return self._origpath
Siddharth Agarwal
util.url: add an 'islocal' method...
r20353 def islocal(self):
'''whether localpath will return something that posixfile can open'''
return (not self.scheme or self.scheme == 'file'
or self.scheme == 'bundle')
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076 def hasscheme(path):
return bool(url(path).scheme)
def hasdriveletter(path):
Patrick Mezard
util: fix url.__str__() for windows file URLs...
r15609 return path and path[1:2] == ':' and path[0:1].isalpha()
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076
Mads Kiilerich
util: rename the util.localpath that uses url to urllocalpath (issue2875)...
r14825 def urllocalpath(path):
Brodie Rao
url: move URL parsing functions into util to improve startup time...
r14076 return url(path, parsequery=False, parsefragment=False).localpath()
def hidepassword(u):
'''hide user credential in a url string'''
u = url(u)
if u.passwd:
u.passwd = '***'
return str(u)
def removeauth(u):
'''remove all authentication information from a url string'''
u = url(u)
u.user = u.passwd = None
return str(u)
Idan Kamara
util: add helper function isatty(fd) to check for tty-ness
r14515
Yuya Nishihara
util: rename argument of isatty()...
r27363 def isatty(fp):
Idan Kamara
util: add helper function isatty(fd) to check for tty-ness
r14515 try:
Yuya Nishihara
util: rename argument of isatty()...
r27363 return fp.isatty()
Idan Kamara
util: add helper function isatty(fd) to check for tty-ness
r14515 except AttributeError:
return False
Bryan O'Sullivan
util: add a timed function for use during development...
r18736
timecount = unitcountfn(
(1, 1e3, _('%.0f s')),
(100, 1, _('%.1f s')),
(10, 1, _('%.2f s')),
(1, 1, _('%.3f s')),
(100, 0.001, _('%.1f ms')),
(10, 0.001, _('%.2f ms')),
(1, 0.001, _('%.3f ms')),
(100, 0.000001, _('%.1f us')),
(10, 0.000001, _('%.2f us')),
(1, 0.000001, _('%.3f us')),
(100, 0.000000001, _('%.1f ns')),
(10, 0.000000001, _('%.2f ns')),
(1, 0.000000001, _('%.3f ns')),
)
_timenesting = [0]
def timed(func):
'''Report the execution time of a function call to stderr.
During development, use as a decorator when you need to measure
the cost of a function, e.g. as follows:
@util.timed
def foo(a, b, c):
pass
'''
def wrapper(*args, **kwargs):
start = time.time()
indent = 2
_timenesting[0] += indent
try:
return func(*args, **kwargs)
finally:
elapsed = time.time() - start
_timenesting[0] -= indent
sys.stderr.write('%s%s: %s\n' %
(' ' * _timenesting[0], func.__name__,
timecount(elapsed)))
return wrapper
Bryan O'Sullivan
util: migrate fileset._sizetoint to util.sizetoint...
r19194
_sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
def sizetoint(s):
'''Convert a space specifier to a byte count.
>>> sizetoint('30')
30
>>> sizetoint('2.2kb')
2252
>>> sizetoint('6M')
6291456
'''
t = s.strip().lower()
try:
for k, u in _sizeunits:
if t.endswith(k):
return int(float(t[:-len(k)]) * u)
return int(t)
except ValueError:
raise error.ParseError(_("couldn't parse size: %s") % s)
Bryan O'Sullivan
summary: augment output with info from extensions
r19211
class hooks(object):
'''A collection of hook functions that can be used to extend a
timeless@mozdev.org
spelling: behaviour -> behavior
r26098 function's behavior. Hooks are called in lexicographic order,
Bryan O'Sullivan
summary: augment output with info from extensions
r19211 based on the names of their sources.'''
def __init__(self):
self._hooks = []
def add(self, source, hook):
self._hooks.append((source, hook))
def __call__(self, *args):
self._hooks.sort(key=lambda x: x[0])
FUJIWARA Katsunori
util: enable "hooks" to return list of the values returned from each hooks
r21046 results = []
Bryan O'Sullivan
summary: augment output with info from extensions
r19211 for source, hook in self._hooks:
FUJIWARA Katsunori
util: enable "hooks" to return list of the values returned from each hooks
r21046 results.append(hook(*args))
return results
Mads Kiilerich
util: introduce util.debugstacktrace for showing a stack trace without crashing...
r20244
timeless
util: refactor getstackframes
r28497 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s'):
'''Yields lines for a nicely formatted stacktrace.
Skips the 'skip' last entries.
Each file+linenumber is formatted according to fileline.
Each line is formatted according to line.
If line is None, it yields:
length of longest filepath+line number,
filepath+linenumber,
function
Not be used in production code but very convenient while developing.
'''
entries = [(fileline % (fn, ln), func)
for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]]
if entries:
fnmax = max(len(entry[0]) for entry in entries)
for fnln, func in entries:
if line is None:
yield (fnmax, fnln, func)
else:
yield line % (fnmax, fnln, func)
Mads Kiilerich
util: debugstacktrace, flush before and after writing...
r20542 def debugstacktrace(msg='stacktrace', skip=0, f=sys.stderr, otherf=sys.stdout):
Mads Kiilerich
util: introduce util.debugstacktrace for showing a stack trace without crashing...
r20244 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
Mads Kiilerich
util: debugstacktrace, flush before and after writing...
r20542 Skips the 'skip' last entries. By default it will flush stdout first.
timeless
util: reword debugstacktrace comment
r28496 It can be used everywhere and intentionally does not require an ui object.
Mads Kiilerich
util: introduce util.debugstacktrace for showing a stack trace without crashing...
r20244 Not be used in production code but very convenient while developing.
'''
Mads Kiilerich
util: debugstacktrace, flush before and after writing...
r20542 if otherf:
otherf.flush()
Mads Kiilerich
util: introduce util.debugstacktrace for showing a stack trace without crashing...
r20244 f.write('%s at:\n' % msg)
timeless
util: refactor getstackframes
r28497 for line in getstackframes(skip + 1):
f.write(line)
Mads Kiilerich
util: debugstacktrace, flush before and after writing...
r20542 f.flush()
Mads Kiilerich
util: introduce util.debugstacktrace for showing a stack trace without crashing...
r20244
Drew Gottlieb
util: move dirs() and finddirs() from scmutil to util...
r24635 class dirs(object):
'''a multiset of directory names from a dirstate or manifest'''
def __init__(self, map, skip=None):
self._dirs = {}
addpath = self.addpath
if safehasattr(map, 'iteritems') and skip is not None:
for f, s in map.iteritems():
if s[0] != skip:
addpath(f)
else:
for f in map:
addpath(f)
def addpath(self, path):
dirs = self._dirs
for base in finddirs(path):
if base in dirs:
dirs[base] += 1
return
dirs[base] = 1
def delpath(self, path):
dirs = self._dirs
for base in finddirs(path):
if dirs[base] > 1:
dirs[base] -= 1
return
del dirs[base]
def __iter__(self):
return self._dirs.iterkeys()
def __contains__(self, d):
return d in self._dirs
if safehasattr(parsers, 'dirs'):
dirs = parsers.dirs
def finddirs(path):
pos = path.rfind('/')
while pos != -1:
yield path[:pos]
pos = path.rfind('/', 0, pos)
Pierre-Yves David
changegroup: move all compressions utilities in util...
r26266 # compression utility
class nocompress(object):
def compress(self, x):
return x
def flush(self):
return ""
compressors = {
Pierre-Yves David
compression: use 'None' for no-compression...
r26267 None: nocompress,
Pierre-Yves David
changegroup: move all compressions utilities in util...
r26266 # lambda to prevent early import
'BZ': lambda: bz2.BZ2Compressor(),
'GZ': lambda: zlib.compressobj(),
}
Pierre-Yves David
compression: use 'None' for no-compression...
r26267 # also support the old form by courtesies
compressors['UN'] = compressors[None]
Pierre-Yves David
changegroup: move all compressions utilities in util...
r26266
def _makedecompressor(decompcls):
def generator(f):
d = decompcls()
for chunk in filechunkiter(f):
yield d.decompress(chunk)
def func(fh):
return chunkbuffer(generator(fh))
return func
Bryan O'Sullivan
util: introduce ctxmanager, to avoid nested try/finally blocks...
r27703 class ctxmanager(object):
'''A context manager for use in 'with' blocks to allow multiple
contexts to be entered at once. This is both safer and more
flexible than contextlib.nested.
Once Mercurial supports Python 2.7+, this will become mostly
unnecessary.
'''
def __init__(self, *args):
'''Accepts a list of no-argument functions that return context
managers. These will be invoked at __call__ time.'''
self._pending = args
self._atexit = []
def __enter__(self):
return self
Bryan O'Sullivan
util: rename ctxmanager's __call__ method to enter
r27785 def enter(self):
Bryan O'Sullivan
util: introduce ctxmanager, to avoid nested try/finally blocks...
r27703 '''Create and enter context managers in the order in which they were
passed to the constructor.'''
values = []
for func in self._pending:
obj = func()
values.append(obj.__enter__())
self._atexit.append(obj.__exit__)
del self._pending
return values
def atexit(self, func, *args, **kwargs):
'''Add a function to call when this context manager exits. The
ordering of multiple atexit calls is unspecified, save that
they will happen before any __exit__ functions.'''
def wrapper(exc_type, exc_val, exc_tb):
func(*args, **kwargs)
self._atexit.append(wrapper)
return func
def __exit__(self, exc_type, exc_val, exc_tb):
'''Context managers are exited in the reverse order from which
they were created.'''
received = exc_type is not None
suppressed = False
pending = None
self._atexit.reverse()
for exitfunc in self._atexit:
try:
if exitfunc(exc_type, exc_val, exc_tb):
suppressed = True
exc_type = None
exc_val = None
exc_tb = None
Augie Fackler
util: don't capture exception with a name since we don't use it...
r27755 except BaseException:
Bryan O'Sullivan
util: introduce ctxmanager, to avoid nested try/finally blocks...
r27703 pending = sys.exc_info()
exc_type, exc_val, exc_tb = pending = sys.exc_info()
del self._atexit
if pending:
raise exc_val
return received and suppressed
Pierre-Yves David
changegroup: move all compressions utilities in util...
r26266 def _bz2():
d = bz2.BZ2Decompressor()
# Bzip2 stream start with BZ, but we stripped it.
# we put it back for good measure.
d.decompress('BZ')
return d
Pierre-Yves David
compression: use 'None' for no-compression...
r26267 decompressors = {None: lambda fh: fh,
Pierre-Yves David
changegroup: use a different compression key for BZ in HG10...
r26392 '_truncatedBZ': _makedecompressor(_bz2),
'BZ': _makedecompressor(lambda: bz2.BZ2Decompressor()),
Pierre-Yves David
changegroup: move all compressions utilities in util...
r26266 'GZ': _makedecompressor(lambda: zlib.decompressobj()),
}
Pierre-Yves David
compression: use 'None' for no-compression...
r26267 # also support the old form by courtesies
decompressors['UN'] = decompressors[None]
Pierre-Yves David
changegroup: move all compressions utilities in util...
r26266
Mads Kiilerich
util: introduce util.debugstacktrace for showing a stack trace without crashing...
r20244 # convenient shortcut
dst = debugstacktrace