##// END OF EJS Templates
branchmap-v3: filter topo heads using node for performance reason...
branchmap-v3: filter topo heads using node for performance reason The branchmap currently contains heads as nodeid. If we build a set of revnum with the topological heads, we need to turn the nodeid in the branchmap to revnum to be able to check if they are topo-heads. That nodeid → revnum lookup is "expensive" and adds up to something noticeable if you do it hundreds of thousand of time. Instead we turn all the topo-heads revnums into nodes and build a set. So we can directly test membership of the nodeids stored in the branchmap. That is much faster. Ideally we would have revnum in the branchmap and could directly test revnum against a revnum set and that would be even faster. However that's an adventure for another time. Without this change, the branchmap format "v3" was significantly slower than the "v2" format. With this changes, some of that gap is recovered With rust + persistent nodemap, this overhead was smaller because the extra lookup did not had to to build the nodemap from scratch. In addition the mozilla-unified repository is able to use the "pure_top" mode of branchmap v3, so it was not really affected by this. Future changeset will work of the remaining of the performance gap. ### benchmark.name = hg.command.unbundle # bin-env-vars.hg.py-re2-module = default # benchmark.variants.issue6528 = disabled # benchmark.variants.resource-usage = default # benchmark.variants.reuse-external-delta-parent = yes # benchmark.variants.revs = any-1-extra-rev # benchmark.variants.source = unbundle # benchmark.variants.validate = default # benchmark.variants.verbosity = quiet ## data-env-vars.name = netbeans-2018-08-01-zstd-sparse-revlog # bin-env-vars.hg.flavor = default branch-v2: 0.233711 ~~~~~ branch-v3 before: 0.380994 (+63.02%, +0.15) branch-v3 after: 0.368769 (+57.79%, +0.14) # bin-env-vars.hg.flavor = rust branch-v2: 0.235230 ~~~~~ branch-v3 before: 0.385060 (+63.70%, +0.15) branch-v3 after: 0.372460 (+58.34%, +0.14) ## data-env-vars.name = netbeans-2018-08-01-ds2-pnm # bin-env-vars.hg.flavor = rust branch-v2: 0.255586 ~~~~~ branch-v3 before: 0.317524 (+24.23%, +0.06) branch-v3 after: 0.318907 (+24.78%, +0.06) ## data-env-vars.name = mozilla-central-2024-03-22-zstd-sparse-revlog # bin-env-vars.hg.flavor = default branch-v2: 0.339010 ~~~~~ branch-v3 before: 0.410007 (+20.94%, +0.07) branch-v3 after: 0.349752 (+3.17%, +0.01) # bin-env-vars.hg.flavor = rust branch-v2: 0.346525 ~~~~~ branch-v3 before: 0.410428 (+18.44%, +0.06) branch-v3 after: 0.354300 (+2.24%, +0.01) ## data-env-vars.name = mozilla-central-2024-03-22-ds2-pnm # bin-env-vars.hg.flavor = rust branch-v2: 0.380202 ~~~~~ branch-v3 before: 0.393871 (+3.60%, +0.01) branch-v3 after: 0.396293 (+4.23%, +0.02) ## data-env-vars.name = mozilla-unified-2024-03-22-zstd-sparse-revlog # bin-env-vars.hg.flavor = default branch-v2: 0.412165 ~~~~~ branch-v3 before: 0.438105 (+6.29%, +0.03) branch-v3 after: 0.424769 (+3.06%, +0.01) # bin-env-vars.hg.flavor = rust branch-v2: 0.412397 ~~~~~ branch-v3 before: 0.438405 (+6.31%, +0.03) branch-v3 after: 0.421796 (+2.28%, +0.01) ## data-env-vars.name = mozilla-unified-2024-03-22-ds2-pnm # bin-env-vars.hg.flavor = rust branch-v2: 0.429501 ~~~~~ branch-v3 before: 0.452692 (+5.40%, +0.02) branch-v3 after: 0.443849 (+3.34%, +0.01) ## data-env-vars.name = mozilla-try-2024-03-26-zstd-sparse-revlog # bin-env-vars.hg.flavor = default branch-v2: 3.403171 ~~~~~ branch-v3 before: 6.562345 (+92.83%, +3.16) branch-v3 after: 6.234055 (+83.18%, +2.83) # bin-env-vars.hg.flavor = rust branch-v2: 3.454876 ~~~~~ branch-v3 before: 6.160248 (+78.31%, +2.71) branch-v3 after: 6.307813 (+82.58%, +2.85) ## data-env-vars.name = mozilla-try-2024-03-26-ds2-pnm # bin-env-vars.hg.flavor = rust branch-v2: 3.465435 ~~~~~ branch-v3 before: 5.381648 (+55.30%, +1.92) branch-v3 after: 5.176076 (+49.36%, +1.71)

File last commit:

r52756:f4733654 default
r52869:41b8892a default
Show More
archival.py
393 lines | 11.2 KiB | text/x-python | PythonLexer
# archival.py - revision archival for mercurial
#
# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from __future__ import annotations
import gzip
import os
import struct
import tarfile
import time
import typing
import zipfile
import zlib
from typing import (
Optional,
)
from .i18n import _
from .node import nullrev
from .pycompat import open
from . import (
error,
formatter,
match as matchmod,
pycompat,
scmutil,
util,
vfs as vfsmod,
)
from .utils import stringutil
if typing.TYPE_CHECKING:
from . import (
localrepo,
)
stringio = util.stringio
# from unzip source code:
_UNX_IFREG = 0x8000
_UNX_IFLNK = 0xA000
def tidyprefix(dest, kind, prefix):
"""choose prefix to use for names in archive. make sure prefix is
safe for consumers."""
if prefix:
prefix = util.normpath(prefix)
else:
if not isinstance(dest, bytes):
raise ValueError(b'dest must be string if no prefix')
prefix = os.path.basename(dest)
lower = prefix.lower()
for sfx in exts.get(kind, []):
if lower.endswith(sfx):
prefix = prefix[: -len(sfx)]
break
lpfx = os.path.normpath(util.localpath(prefix))
prefix = util.pconvert(lpfx)
if not prefix.endswith(b'/'):
prefix += b'/'
# Drop the leading '.' path component if present, so Windows can read the
# zip files (issue4634)
if prefix.startswith(b'./'):
prefix = prefix[2:]
if prefix.startswith(b'../') or os.path.isabs(lpfx) or b'/../' in prefix:
raise error.Abort(_(b'archive prefix contains illegal components'))
return prefix
exts = {
b'tar': [b'.tar'],
b'tbz2': [b'.tbz2', b'.tar.bz2'],
b'tgz': [b'.tgz', b'.tar.gz'],
b'zip': [b'.zip'],
b'txz': [b'.txz', b'.tar.xz'],
}
def guesskind(dest):
for kind, extensions in exts.items():
if any(dest.endswith(ext) for ext in extensions):
return kind
return None
def _rootctx(repo):
# repo[0] may be hidden
for rev in repo:
return repo[rev]
return repo[nullrev]
# {tags} on ctx includes local tags and 'tip', with no current way to limit
# that to global tags. Therefore, use {latesttag} as a substitute when
# the distance is 0, since that will be the list of global tags on ctx.
_defaultmetatemplate = br'''
repo: {root}
node: {ifcontains(rev, revset("wdir()"), "{p1node}{dirty}", "{node}")}
branch: {branch|utf8}
{ifeq(latesttagdistance, 0, join(latesttag % "tag: {tag}", "\n"),
separate("\n",
join(latesttag % "latesttag: {tag}", "\n"),
"latesttagdistance: {latesttagdistance}",
"changessincelatesttag: {changessincelatesttag}"))}
'''[
1:
] # drop leading '\n'
def buildmetadata(ctx):
'''build content of .hg_archival.txt'''
repo = ctx.repo()
opts = {
b'template': repo.ui.config(
b'experimental', b'archivemetatemplate', _defaultmetatemplate
)
}
out = util.stringio()
fm = formatter.formatter(repo.ui, out, b'archive', opts)
fm.startitem()
fm.context(ctx=ctx)
fm.data(root=_rootctx(repo).hex())
if ctx.rev() is None:
dirty = b''
if ctx.dirty(missing=True):
dirty = b'+'
fm.data(dirty=dirty)
fm.end()
return out.getvalue()
class tarit:
"""write archive to tar file or stream. can write uncompressed,
or compress with gzip or bzip2."""
def __init__(self, dest, mtime, kind=b''):
self.mtime = mtime
self.fileobj = None
def taropen(mode, name=b'', fileobj=None):
if kind == b'gz':
mode = mode[0:1]
if not fileobj:
fileobj = open(name, mode + b'b')
gzfileobj = gzip.GzipFile(
name,
pycompat.sysstr(mode + b'b'),
zlib.Z_BEST_COMPRESSION,
fileobj,
mtime=mtime,
)
self.fileobj = gzfileobj
return tarfile.TarFile.taropen(name, "w", gzfileobj)
else:
try:
return tarfile.open(
name, pycompat.sysstr(mode + kind), fileobj
)
except tarfile.CompressionError as e:
raise error.Abort(stringutil.forcebytestr(e))
if isinstance(dest, bytes):
self.z = taropen(b'w:', name=dest)
else:
self.z = taropen(b'w|', fileobj=dest)
def addfile(self, name, mode, islink, data):
name = pycompat.fsdecode(name)
i = tarfile.TarInfo(name)
i.mtime = self.mtime
i.size = len(data)
if islink:
i.type = tarfile.SYMTYPE
i.mode = 0o777
i.linkname = pycompat.fsdecode(data)
data = None
i.size = 0
else:
i.mode = mode
data = stringio(data)
self.z.addfile(i, data)
def done(self):
self.z.close()
if self.fileobj:
self.fileobj.close()
class zipit:
"""write archive to zip file or stream. can write uncompressed,
or compressed with deflate."""
def __init__(self, dest, mtime, compress=True):
if isinstance(dest, bytes):
dest = pycompat.fsdecode(dest)
self.z = zipfile.ZipFile(
dest, 'w', compress and zipfile.ZIP_DEFLATED or zipfile.ZIP_STORED
)
# Python's zipfile module emits deprecation warnings if we try
# to store files with a date before 1980.
epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
if mtime < epoch:
mtime = epoch
self.mtime = mtime
self.date_time = time.gmtime(mtime)[:6]
def addfile(self, name, mode, islink, data):
i = zipfile.ZipInfo(pycompat.fsdecode(name), self.date_time)
i.compress_type = self.z.compression # pytype: disable=attribute-error
# unzip will not honor unix file modes unless file creator is
# set to unix (id 3).
i.create_system = 3
ftype = _UNX_IFREG
if islink:
mode = 0o777
ftype = _UNX_IFLNK
i.external_attr = (mode | ftype) << 16
# add "extended-timestamp" extra block, because zip archives
# without this will be extracted with unexpected timestamp,
# if TZ is not configured as GMT
i.extra += struct.pack(
b'<hhBl',
0x5455, # block type: "extended-timestamp"
1 + 4, # size of this block
1, # "modification time is present"
int(self.mtime),
) # last modification (UTC)
self.z.writestr(i, data)
def done(self):
self.z.close()
class fileit:
'''write archive as files in directory.'''
def __init__(self, name, mtime):
self.basedir = name
self.opener = vfsmod.vfs(self.basedir)
self.mtime = mtime
def addfile(self, name, mode, islink, data):
if islink:
self.opener.symlink(data, name)
return
f = self.opener(name, b"w", atomictemp=False)
f.write(data)
f.close()
destfile = os.path.join(self.basedir, name)
os.chmod(destfile, mode)
if self.mtime is not None:
os.utime(destfile, (self.mtime, self.mtime))
def done(self):
pass
archivers = {
b'files': fileit,
b'tar': tarit,
b'tbz2': lambda name, mtime: tarit(name, mtime, b'bz2'),
b'tgz': lambda name, mtime: tarit(name, mtime, b'gz'),
b'txz': lambda name, mtime: tarit(name, mtime, b'xz'),
b'uzip': lambda name, mtime: zipit(name, mtime, False),
b'zip': zipit,
}
def archive(
repo: "localrepo.localrepository",
dest, # TODO: should be bytes, but could be Callable
node,
kind: bytes,
decode: bool = True,
match=None,
prefix: bytes = b'',
mtime: Optional[float] = None,
subrepos: bool = False,
) -> int:
"""create archive of repo as it was at node.
dest can be name of directory, name of archive file, a callable, or file
object to write archive to. If it is a callable, it will called to open
the actual file object before the first archive member is written.
kind is type of archive to create.
decode tells whether to put files through decode filters from
hgrc.
match is a matcher to filter names of files to write to archive.
prefix is name of path to put before every archive member.
mtime is the modified time, in seconds, or None to use the changeset time.
subrepos tells whether to include subrepos.
"""
if kind == b'files':
if prefix:
raise error.Abort(_(b'cannot give prefix when archiving to files'))
else:
prefix = tidyprefix(dest, kind, prefix)
archiver = None
ctx = repo[node]
def opencallback():
"""Return the archiver instance, creating it if necessary.
This function is called when the first actual entry is created.
It may be called multiple times from different layers.
When serving the archive via hgweb, no errors should happen after
this point.
"""
nonlocal archiver
if archiver is None:
if callable(dest):
output = dest()
else:
output = dest
archiver = archivers[kind](output, mtime or ctx.date()[0])
assert archiver is not None
if repo.ui.configbool(b"ui", b"archivemeta"):
metaname = b'.hg_archival.txt'
if match(metaname):
write(metaname, 0o644, False, lambda: buildmetadata(ctx))
return archiver
def write(name, mode, islink, getdata):
if archiver is None:
opencallback()
assert archiver is not None, "archive should be opened by now"
data = getdata()
if decode:
data = repo.wwritedata(name, data)
archiver.addfile(prefix + name, mode, islink, data)
if kind not in archivers:
raise error.Abort(_(b"unknown archive type '%s'") % kind)
if not match:
match = scmutil.matchall(repo)
files = list(ctx.manifest().walk(match))
total = len(files)
if total:
files.sort()
scmutil.prefetchfiles(
repo, [(ctx.rev(), scmutil.matchfiles(repo, files))]
)
progress = repo.ui.makeprogress(
_(b'archiving'), unit=_(b'files'), total=total
)
progress.update(0)
for f in files:
ff = ctx.flags(f)
write(f, b'x' in ff and 0o755 or 0o644, b'l' in ff, ctx[f].data)
progress.increment(item=f)
progress.complete()
if subrepos:
for subpath in sorted(ctx.substate):
sub = ctx.workingsub(subpath)
submatch = matchmod.subdirmatcher(subpath, match)
subprefix = prefix + subpath + b'/'
total += sub.archive(opencallback, subprefix, submatch, decode)
if total == 0:
raise error.Abort(_(b'no files match the archive pattern'))
assert archiver is not None, "archive should have been opened before"
archiver.done()
return total