##// END OF EJS Templates
bugzilla: allow change comment to mark bugs fixed...
bugzilla: allow change comment to mark bugs fixed Add a second regular expression used when scanning change comments. Bugs matched by this new regular expression have the bug comments and optionally hours updated as with the first regular expression, but they are also marked as fixed. The bug status and resolution to set to mark a bug as fixed can be configured. By default status is set to RESOLVED and resolution to FIXED, the default Bugzilla settings. For example, a change comment containing 'Fixes 1234 h1.5' will be added to bug 1234, the bug will have its working time increased by 1.65 hours, and the bug will be marked RESOLVED/FIXED. Change comments may contain both bug update and fix instructions. If the same bug ID occurs in both, the last instruction found takes precedence. The patch adds new bug states 'bug_status' and 'resolution' and actions to update them to the XMLRPC and XMLRPC/email access methods. XMLRPC does not support marking bugs as fixed when used with Bugzilla versions prior to 4.0. When used with an earlier Bugzilla version, a warning is issued and only comment and hours updated.

File last commit:

r16166:5b0a4383 merge default
r16223:ac4fd323 default
Show More
lfutil.py
459 lines | 15.4 KiB | text/x-python | PythonLexer
various
hgext: add largefiles extension...
r15168 # Copyright 2009-2010 Gregory P. Ward
# Copyright 2009-2010 Intelerad Medical Systems Incorporated
# Copyright 2010-2011 Fog Creek Software
# Copyright 2010-2011 Unity Technologies
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
'''largefiles utility code: must not import other modules in this package.'''
import os
import errno
Benjamin Pollack
largefiles: use XDG and OS X-specific cache locations by default (issue3067)
r15320 import platform
various
hgext: add largefiles extension...
r15168 import shutil
import stat
Na'Tosha Bard
largefiles: cleanup import, now that we can assume > 1.9 for bundled extension
r15226 from mercurial import dirstate, httpconnection, match as match_, util, scmutil
various
hgext: add largefiles extension...
r15168 from mercurial.i18n import _
shortname = '.hglf'
longname = 'largefiles'
# -- Portability wrappers ----------------------------------------------
Na'Tosha Bard
largefiles: remove pre-1.9 code from extension first bundled with 1.9
r15224 def dirstate_walk(dirstate, matcher, unknown=False, ignored=False):
return dirstate.walk(matcher, [], unknown, ignored)
various
hgext: add largefiles extension...
r15168
def repo_add(repo, list):
Na'Tosha Bard
largefiles: remove pre-1.9 code from extension first bundled with 1.9
r15224 add = repo[None].add
various
hgext: add largefiles extension...
r15168 return add(list)
def repo_remove(repo, list, unlink=False):
Na'Tosha Bard
largefiles: remove pre-1.9 code from extension first bundled with 1.9
r15224 def remove(list, unlink):
wlock = repo.wlock()
various
hgext: add largefiles extension...
r15168 try:
Na'Tosha Bard
largefiles: remove pre-1.9 code from extension first bundled with 1.9
r15224 if unlink:
for f in list:
try:
util.unlinkpath(repo.wjoin(f))
except OSError, inst:
if inst.errno != errno.ENOENT:
raise
repo[None].forget(list)
finally:
wlock.release()
various
hgext: add largefiles extension...
r15168 return remove(list, unlink=unlink)
def repo_forget(repo, list):
Na'Tosha Bard
largefiles: remove pre-1.9 code from extension first bundled with 1.9
r15224 forget = repo[None].forget
various
hgext: add largefiles extension...
r15168 return forget(list)
def findoutgoing(repo, remote, force):
Na'Tosha Bard
largefiles: remove pre-1.9 code from extension first bundled with 1.9
r15224 from mercurial import discovery
common, _anyinc, _heads = discovery.findcommonincoming(repo,
remote, force=force)
return repo.changelog.findmissing(common)
various
hgext: add largefiles extension...
r15168
# -- Private worker functions ------------------------------------------
Greg Ward
largefiles: factor out lfutil.getminsize()
r15227 def getminsize(ui, assumelfiles, opt, default=10):
lfsize = opt
if not lfsize and assumelfiles:
Greg Ward
largefiles: rename config setting 'size' to 'minsize'
r15304 lfsize = ui.config(longname, 'minsize', default=default)
Greg Ward
largefiles: factor out lfutil.getminsize()
r15227 if lfsize:
try:
Greg Ward
largefiles: allow minimum size to be a float...
r15228 lfsize = float(lfsize)
Greg Ward
largefiles: factor out lfutil.getminsize()
r15227 except ValueError:
Greg Ward
largefiles: allow minimum size to be a float...
r15228 raise util.Abort(_('largefiles: size must be number (not %s)\n')
Greg Ward
largefiles: factor out lfutil.getminsize()
r15227 % lfsize)
if lfsize is None:
raise util.Abort(_('minimum size for largefiles must be specified'))
return lfsize
various
hgext: add largefiles extension...
r15168 def link(src, dest):
try:
Na'Tosha Bard
largefiles: fix commit of specified file on non-windows
r15206 util.oslink(src, dest)
various
hgext: add largefiles extension...
r15168 except OSError:
Martin Geisler
largefiles: copy files into .hg/largefiles atomically...
r15572 # if hardlinks fail, fallback on atomic copy
dst = util.atomictempfile(dest)
Matt Mackall
largefiles: copy files in binary mode (issue3164)
r15699 for chunk in util.filechunkiter(open(src, 'rb')):
Martin Geisler
largefiles: copy files into .hg/largefiles atomically...
r15572 dst.write(chunk)
dst.close()
various
hgext: add largefiles extension...
r15168 os.chmod(dest, os.stat(src).st_mode)
Benjamin Pollack
largefiles: rename functions and methods to match desired behavior...
r15316 def usercachepath(ui, hash):
Greg Ward
largefiles: use ui.configpath() where appropriate
r15350 path = ui.configpath(longname, 'usercache', None)
various
hgext: add largefiles extension...
r15168 if path:
path = os.path.join(path, hash)
else:
if os.name == 'nt':
Greg Ward
largefiles: cosmetics, whitespace, code style...
r15255 appdata = os.getenv('LOCALAPPDATA', os.getenv('APPDATA'))
Kevin Gessner
largefiles: don't require a user cache (issue3088) (issue3155)...
r15658 if appdata:
path = os.path.join(appdata, longname, hash)
Benjamin Pollack
largefiles: use XDG and OS X-specific cache locations by default (issue3067)
r15320 elif platform.system() == 'Darwin':
Kevin Gessner
largefiles: don't require a user cache (issue3088) (issue3155)...
r15658 home = os.getenv('HOME')
if home:
path = os.path.join(home, 'Library', 'Caches',
longname, hash)
various
hgext: add largefiles extension...
r15168 elif os.name == 'posix':
Benjamin Pollack
largefiles: use XDG and OS X-specific cache locations by default (issue3067)
r15320 path = os.getenv('XDG_CACHE_HOME')
if path:
path = os.path.join(path, longname, hash)
else:
Kevin Gessner
largefiles: don't require a user cache (issue3088) (issue3155)...
r15658 home = os.getenv('HOME')
if home:
path = os.path.join(home, '.cache', longname, hash)
various
hgext: add largefiles extension...
r15168 else:
Greg Ward
largefiles: improve error reporting...
r15253 raise util.Abort(_('unknown operating system: %s\n') % os.name)
various
hgext: add largefiles extension...
r15168 return path
Benjamin Pollack
largefiles: rename functions and methods to match desired behavior...
r15316 def inusercache(ui, hash):
Kevin Gessner
largefiles: don't require a user cache (issue3088) (issue3155)...
r15658 path = usercachepath(ui, hash)
return path and os.path.exists(path)
various
hgext: add largefiles extension...
r15168
def findfile(repo, hash):
Benjamin Pollack
largefiles: rename functions and methods to match desired behavior...
r15316 if instore(repo, hash):
repo.ui.note(_('Found %s in store\n') % hash)
Na'Tosha Bard
largefiles: refactor lfutil.findfiles to be more logical
r15913 return storepath(repo, hash)
Benjamin Pollack
largefiles: make the store primary, and the user cache secondary...
r15317 elif inusercache(repo.ui, hash):
various
hgext: add largefiles extension...
r15168 repo.ui.note(_('Found %s in system cache\n') % hash)
Hao Lian
largefiles: ensure destination directory exists before findfile links to there...
r15408 path = storepath(repo, hash)
util.makedirs(os.path.dirname(path))
link(usercachepath(repo.ui, hash), path)
Na'Tosha Bard
largefiles: refactor lfutil.findfiles to be more logical
r15913 return path
return None
various
hgext: add largefiles extension...
r15168
class largefiles_dirstate(dirstate.dirstate):
def __getitem__(self, key):
return super(largefiles_dirstate, self).__getitem__(unixpath(key))
def normal(self, f):
return super(largefiles_dirstate, self).normal(unixpath(f))
def remove(self, f):
return super(largefiles_dirstate, self).remove(unixpath(f))
def add(self, f):
return super(largefiles_dirstate, self).add(unixpath(f))
def drop(self, f):
return super(largefiles_dirstate, self).drop(unixpath(f))
def forget(self, f):
return super(largefiles_dirstate, self).forget(unixpath(f))
Na'Tosha Bard
largefiles: correctly handle dirstate status when rebasing...
r15793 def normallookup(self, f):
return super(largefiles_dirstate, self).normallookup(unixpath(f))
various
hgext: add largefiles extension...
r15168
def openlfdirstate(ui, repo):
'''
Greg Ward
largefiles: improve comments, internal docstrings...
r15252 Return a dirstate object that tracks largefiles: i.e. its root is
the repo root, but it is saved in .hg/largefiles/dirstate.
various
hgext: add largefiles extension...
r15168 '''
admin = repo.join(longname)
Na'Tosha Bard
largefiles: remove pre-1.9 code from extension first bundled with 1.9
r15224 opener = scmutil.opener(admin)
Greg Ward
largefiles: drop more unnecessary compatibility checks
r15349 lfdirstate = largefiles_dirstate(opener, ui, repo.root,
repo.dirstate._validate)
various
hgext: add largefiles extension...
r15168
Greg Ward
largefiles: improve comments, internal docstrings...
r15252 # If the largefiles dirstate does not exist, populate and create
# it. This ensures that we create it on the first meaningful
Levi Bard
largefiles: fix inappropriate locking (issue3182)...
r15794 # largefiles operation in a new clone.
various
hgext: add largefiles extension...
r15168 if not os.path.exists(os.path.join(admin, 'dirstate')):
util.makedirs(admin)
matcher = getstandinmatcher(repo)
for standin in dirstate_walk(repo.dirstate, matcher):
lfile = splitstandin(standin)
hash = readstandin(repo, lfile)
lfdirstate.normallookup(lfile)
try:
Mads Kiilerich
largefiles: file storage should be relative to repo, not relative to cwd...
r15553 if hash == hashfile(repo.wjoin(lfile)):
various
hgext: add largefiles extension...
r15168 lfdirstate.normal(lfile)
Martin Geisler
largefiles: fix 'hg clone . ../foo' OSError abort...
r15548 except OSError, err:
various
hgext: add largefiles extension...
r15168 if err.errno != errno.ENOENT:
raise
return lfdirstate
def lfdirstate_status(lfdirstate, repo, rev):
Levi Bard
largefiles: fix inappropriate locking (issue3182)...
r15794 match = match_.always(repo.root, repo.getcwd())
s = lfdirstate.status(match, [], False, False, False)
unsure, modified, added, removed, missing, unknown, ignored, clean = s
for lfile in unsure:
if repo[rev][standin(lfile)].data().strip() != \
hashfile(repo.wjoin(lfile)):
modified.append(lfile)
else:
clean.append(lfile)
lfdirstate.normal(lfile)
various
hgext: add largefiles extension...
r15168 return (modified, added, removed, missing, unknown, ignored, clean)
def listlfiles(repo, rev=None, matcher=None):
Greg Ward
largefiles: improve comments, internal docstrings...
r15252 '''return a list of largefiles in the working copy or the
specified changeset'''
various
hgext: add largefiles extension...
r15168
if matcher is None:
matcher = getstandinmatcher(repo)
# ignore unknown files in working directory
Greg Ward
largefiles: cosmetics, whitespace, code style...
r15255 return [splitstandin(f)
for f in repo[rev].walk(matcher)
various
hgext: add largefiles extension...
r15168 if rev is not None or repo.dirstate[f] != '?']
Benjamin Pollack
largefiles: rename functions and methods to match desired behavior...
r15316 def instore(repo, hash):
return os.path.exists(storepath(repo, hash))
various
hgext: add largefiles extension...
r15168
Benjamin Pollack
largefiles: rename functions and methods to match desired behavior...
r15316 def storepath(repo, hash):
various
hgext: add largefiles extension...
r15168 return repo.join(os.path.join(longname, hash))
def copyfromcache(repo, hash, filename):
Greg Ward
largefiles: improve comments, internal docstrings...
r15252 '''Copy the specified largefile from the repo or system cache to
filename in the repository. Return true on success or false if the
file was not found in either cache (which should not happened:
this is meant to be called only after ensuring that the needed
largefile exists in the cache).'''
various
hgext: add largefiles extension...
r15168 path = findfile(repo, hash)
if path is None:
return False
util.makedirs(os.path.dirname(repo.wjoin(filename)))
Martin Geisler
largefiles: add comment about non-atomic working directory...
r15570 # The write may fail before the file is fully written, but we
# don't use atomic writes in the working copy.
various
hgext: add largefiles extension...
r15168 shutil.copy(path, repo.wjoin(filename))
return True
Benjamin Pollack
largefiles: rename functions and methods to match desired behavior...
r15316 def copytostore(repo, rev, file, uploaded=False):
various
hgext: add largefiles extension...
r15168 hash = readstandin(repo, file)
Benjamin Pollack
largefiles: rename functions and methods to match desired behavior...
r15316 if instore(repo, hash):
various
hgext: add largefiles extension...
r15168 return
Benjamin Pollack
largefiles: rename functions and methods to match desired behavior...
r15316 copytostoreabsolute(repo, repo.wjoin(file), hash)
various
hgext: add largefiles extension...
r15168
Dan Villiom Podlaski Christiansen
largefiles: factor out a copyalltostore() function
r15796 def copyalltostore(repo, node):
'''Copy all largefiles in a given revision to the store'''
ctx = repo[node]
for filename in ctx.files():
if isstandin(filename) and filename in ctx.manifest():
realfile = splitstandin(filename)
copytostore(repo, ctx.node(), realfile)
Benjamin Pollack
largefiles: rename functions and methods to match desired behavior...
r15316 def copytostoreabsolute(repo, file, hash):
Hao Lian
largefiles: remove lfutil.createdir, replace calls with util.makedirs
r15371 util.makedirs(os.path.dirname(storepath(repo, hash)))
Benjamin Pollack
largefiles: rename functions and methods to match desired behavior...
r15316 if inusercache(repo.ui, hash):
link(usercachepath(repo.ui, hash), storepath(repo, hash))
various
hgext: add largefiles extension...
r15168 else:
Martin Geisler
largefiles: use repo.store.createmode for new files in .hg/largefiles...
r16153 dst = util.atomictempfile(storepath(repo, hash),
createmode=repo.store.createmode)
Matt Mackall
largefiles: copy files in binary mode (issue3164)
r15699 for chunk in util.filechunkiter(open(file, 'rb')):
Martin Geisler
largefiles: write .hg/largefiles/ files atomically...
r15571 dst.write(chunk)
dst.close()
Benjamin Pollack
largefiles: rename functions and methods to match desired behavior...
r15316 linktousercache(repo, hash)
various
hgext: add largefiles extension...
r15168
Benjamin Pollack
largefiles: rename functions and methods to match desired behavior...
r15316 def linktousercache(repo, hash):
Kevin Gessner
largefiles: don't require a user cache (issue3088) (issue3155)...
r15658 path = usercachepath(repo.ui, hash)
if path:
util.makedirs(os.path.dirname(path))
link(storepath(repo, hash), path)
various
hgext: add largefiles extension...
r15168
def getstandinmatcher(repo, pats=[], opts={}):
'''Return a match object that applies pats to the standin directory'''
standindir = repo.pathto(shortname)
if pats:
# patterns supplied: search standin directory relative to current dir
cwd = repo.getcwd()
if os.path.isabs(cwd):
# cwd is an absolute path for hg -R <reponame>
# work relative to the repository root in this case
cwd = ''
pats = [os.path.join(standindir, cwd, pat) for pat in pats]
elif os.path.isdir(standindir):
# no patterns: relative to repo root
pats = [standindir]
else:
# no patterns and no standin dir: return matcher that matches nothing
match = match_.match(repo.root, None, [], exact=True)
match.matchfn = lambda f: False
return match
return getmatcher(repo, pats, opts, showbad=False)
def getmatcher(repo, pats=[], opts={}, showbad=True):
Greg Ward
largefiles: improve comments, internal docstrings...
r15252 '''Wrapper around scmutil.match() that adds showbad: if false,
neuter the match object's bad() method so it does not print any
warnings about missing files or directories.'''
Na'Tosha Bard
largefiles: remove pre-1.9 code from extension first bundled with 1.9
r15224 match = scmutil.match(repo[None], pats, opts)
various
hgext: add largefiles extension...
r15168
if not showbad:
match.bad = lambda f, msg: None
return match
def composestandinmatcher(repo, rmatcher):
Greg Ward
largefiles: improve comments, internal docstrings...
r15252 '''Return a matcher that accepts standins corresponding to the
files accepted by rmatcher. Pass the list of files in the matcher
as the paths specified by the user.'''
various
hgext: add largefiles extension...
r15168 smatcher = getstandinmatcher(repo, rmatcher.files())
isstandin = smatcher.matchfn
def composed_matchfn(f):
return isstandin(f) and rmatcher.matchfn(splitstandin(f))
smatcher.matchfn = composed_matchfn
return smatcher
def standin(filename):
'''Return the repo-relative path to the standin for the specified big
file.'''
# Notes:
# 1) Most callers want an absolute path, but _create_standin() needs
# it repo-relative so lfadd() can pass it to repo_add(). So leave
# it up to the caller to use repo.wjoin() to get an absolute path.
# 2) Join with '/' because that's what dirstate always uses, even on
# Windows. Change existing separator to '/' first in case we are
# passed filenames from an external source (like the command line).
FUJIWARA Katsunori
i18n: use util.pconvert() instead of 'str.replace()' for problematic encoding...
r16066 return shortname + '/' + util.pconvert(filename)
various
hgext: add largefiles extension...
r15168
def isstandin(filename):
Greg Ward
largefiles: improve comments, internal docstrings...
r15252 '''Return true if filename is a big file standin. filename must be
in Mercurial's internal form (slash-separated).'''
various
hgext: add largefiles extension...
r15168 return filename.startswith(shortname + '/')
def splitstandin(filename):
# Split on / because that's what dirstate always uses, even on Windows.
# Change local separator to / first just in case we are passed filenames
# from an external source (like the command line).
FUJIWARA Katsunori
i18n: use util.pconvert() instead of 'str.replace()' for problematic encoding...
r16066 bits = util.pconvert(filename).split('/', 1)
various
hgext: add largefiles extension...
r15168 if len(bits) == 2 and bits[0] == shortname:
return bits[1]
else:
return None
def updatestandin(repo, standin):
file = repo.wjoin(splitstandin(standin))
if os.path.exists(file):
hash = hashfile(file)
executable = getexecutable(file)
writestandin(repo, standin, hash, executable)
def readstandin(repo, filename, node=None):
'''read hex hash from standin for filename at given node, or working
directory if no node is given'''
return repo[node][standin(filename)].data().strip()
def writestandin(repo, standin, hash, executable):
Greg Ward
largefiles: improve comments, internal docstrings...
r15252 '''write hash to <repo.root>/<standin>'''
various
hgext: add largefiles extension...
r15168 writehash(hash, repo.wjoin(standin), executable)
def copyandhash(instream, outfile):
'''Read bytes from instream (iterable) and write them to outfile,
computing the SHA-1 hash of the data along the way. Close outfile
when done and return the binary hash.'''
hasher = util.sha1('')
for data in instream:
hasher.update(data)
outfile.write(data)
# Blecch: closing a file that somebody else opened is rude and
Greg Ward
largefiles: improve comments, internal docstrings...
r15252 # wrong. But it's so darn convenient and practical! After all,
various
hgext: add largefiles extension...
r15168 # outfile was opened just to copy and hash.
outfile.close()
return hasher.digest()
def hashrepofile(repo, file):
return hashfile(repo.wjoin(file))
def hashfile(file):
if not os.path.exists(file):
return ''
hasher = util.sha1('')
fd = open(file, 'rb')
for data in blockstream(fd):
hasher.update(data)
fd.close()
return hasher.hexdigest()
class limitreader(object):
def __init__(self, f, limit):
self.f = f
self.limit = limit
def read(self, length):
if self.limit == 0:
return ''
length = length > self.limit and self.limit or length
self.limit -= length
return self.f.read(length)
def close(self):
pass
def blockstream(infile, blocksize=128 * 1024):
"""Generator that yields blocks of data from infile and closes infile."""
while True:
data = infile.read(blocksize)
if not data:
break
yield data
Greg Ward
largefiles: improve comments, internal docstrings...
r15252 # same blecch as copyandhash() above
various
hgext: add largefiles extension...
r15168 infile.close()
def writehash(hash, filename, executable):
util.makedirs(os.path.dirname(filename))
Martin Geisler
largefiles: simplify lfutil.writehash...
r15574 util.writefile(filename, hash + '\n')
os.chmod(filename, getmode(executable))
various
hgext: add largefiles extension...
r15168
def getexecutable(filename):
mode = os.stat(filename).st_mode
Greg Ward
largefiles: cosmetics, whitespace, code style...
r15255 return ((mode & stat.S_IXUSR) and
(mode & stat.S_IXGRP) and
(mode & stat.S_IXOTH))
various
hgext: add largefiles extension...
r15168
def getmode(executable):
if executable:
return 0755
else:
return 0644
def urljoin(first, second, *arg):
def join(left, right):
if not left.endswith('/'):
left += '/'
if right.startswith('/'):
right = right[1:]
return left + right
url = join(first, second)
for a in arg:
url = join(url, a)
return url
def hexsha1(data):
"""hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
object data"""
Thomas Arendsen Hein
largefiles: use util.sha1() instead of hashlib.sha1() everywhere
r15347 h = util.sha1()
various
hgext: add largefiles extension...
r15168 for chunk in util.filechunkiter(data):
h.update(chunk)
return h.hexdigest()
def httpsendfile(ui, filename):
Na'Tosha Bard
largefiles: remove pre-1.9 code from extension first bundled with 1.9
r15224 return httpconnection.httpsendfile(ui, filename, 'rb')
various
hgext: add largefiles extension...
r15168
def unixpath(path):
Greg Ward
largefiles: improve comments, internal docstrings...
r15252 '''Return a version of path normalized for use with the lfdirstate.'''
FUJIWARA Katsunori
i18n: use util.pconvert() instead of 'str.replace()' for problematic encoding...
r16066 return util.pconvert(os.path.normpath(path))
various
hgext: add largefiles extension...
r15168
def islfilesrepo(repo):
Matt Mackall
largefiles: fix over-long lines
r15170 return ('largefiles' in repo.requirements and
Benjamin Pollack
largefiles: remove redundant any_ function
r15319 util.any(shortname + '/' in f[0] for f in repo.store.datafiles()))
various
hgext: add largefiles extension...
r15168
Matt Mackall
largefiles: py2.4 doesn't have BaseException...
r15333 class storeprotonotcapable(Exception):
various
hgext: add largefiles extension...
r15168 def __init__(self, storetypes):
self.storetypes = storetypes
Na'Tosha Bard
largefiles: only cache largefiles in new heads...
r16103
def getcurrentheads(repo):
branches = repo.branchmap()
heads = []
for branch in branches:
newheads = repo.branchheads(branch)
heads = heads + newheads
return heads
Na'Tosha Bard
largefiles: optimize update speed by only updating changed largefiles...
r16120
def getstandinsstate(repo):
standins = []
matcher = getstandinmatcher(repo)
for standin in dirstate_walk(repo.dirstate, matcher):
lfile = splitstandin(standin)
standins.append((lfile, readstandin(repo, lfile)))
return standins