##// END OF EJS Templates
revlog: don't cache parsed tuples in the C module...
revlog: don't cache parsed tuples in the C module A cached entry creates ~8 Python objects per cached changeset, which comes to around 200 Bytes per cached changeset on AMD64. Especially for operations that touch a lot of changesets, that can easily sum up to more than a 100MB of memory. Simple tests on large repositories show <2% runtime penalty for ripping out the cache, even for cache heavy operations like "hg log" for all revisions. Differential Revision: https://phab.mercurial-scm.org/D9155

File last commit:

r43347:687b865b default
r46402:9c6f9a1a default
Show More
wirestore.py
42 lines | 1.3 KiB | text/x-python | PythonLexer
# Copyright 2010-2011 Fog Creek Software
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
'''largefile store working over Mercurial's wire protocol'''
from __future__ import absolute_import
from . import (
lfutil,
remotestore,
)
class wirestore(remotestore.remotestore):
def __init__(self, ui, repo, remote):
cap = remote.capable(b'largefiles')
if not cap:
raise lfutil.storeprotonotcapable([])
storetypes = cap.split(b',')
if b'serve' not in storetypes:
raise lfutil.storeprotonotcapable(storetypes)
self.remote = remote
super(wirestore, self).__init__(ui, repo, remote.url())
def _put(self, hash, fd):
return self.remote.putlfile(hash, fd)
def _get(self, hash):
return self.remote.getlfile(hash)
def _stat(self, hashes):
'''For each hash, return 0 if it is available, other values if not.
It is usually 2 if the largefile is missing, but might be 1 the server
has a corrupted copy.'''
with self.remote.commandexecutor() as e:
fs = []
for hash in hashes:
fs.append((hash, e.callcommand(b'statlfile', {b'sha': hash,})))
return {hash: f.result() for hash, f in fs}