historypack.py
572 lines
| 18.7 KiB
| text/x-python
|
PythonLexer
Augie Fackler
|
r40530 | from __future__ import absolute_import | ||
import hashlib | ||||
import struct | ||||
from mercurial.node import hex, nullid | ||||
from mercurial import ( | ||||
pycompat, | ||||
util, | ||||
) | ||||
from . import ( | ||||
basepack, | ||||
constants, | ||||
shallowutil, | ||||
) | ||||
# (filename hash, offset, size) | ||||
Augie Fackler
|
r43347 | INDEXFORMAT2 = b'!20sQQII' | ||
Augie Fackler
|
r40541 | INDEXENTRYLENGTH2 = struct.calcsize(INDEXFORMAT2) | ||
Augie Fackler
|
r40530 | NODELENGTH = 20 | ||
Augie Fackler
|
r43347 | NODEINDEXFORMAT = b'!20sQ' | ||
Augie Fackler
|
r40530 | NODEINDEXENTRYLENGTH = struct.calcsize(NODEINDEXFORMAT) | ||
# (node, p1, p2, linknode) | ||||
Augie Fackler
|
r43347 | PACKFORMAT = b"!20s20s20s20sH" | ||
Augie Fackler
|
r40530 | PACKENTRYLENGTH = 82 | ||
ENTRYCOUNTSIZE = 4 | ||||
Augie Fackler
|
r43347 | INDEXSUFFIX = b'.histidx' | ||
PACKSUFFIX = b'.histpack' | ||||
Augie Fackler
|
r40530 | |||
ANC_NODE = 0 | ||||
ANC_P1NODE = 1 | ||||
ANC_P2NODE = 2 | ||||
ANC_LINKNODE = 3 | ||||
ANC_COPYFROM = 4 | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | class historypackstore(basepack.basepackstore): | ||
INDEXSUFFIX = INDEXSUFFIX | ||||
PACKSUFFIX = PACKSUFFIX | ||||
def getpack(self, path): | ||||
return historypack(path) | ||||
def getancestors(self, name, node, known=None): | ||||
for pack in self.packs: | ||||
try: | ||||
return pack.getancestors(name, node, known=known) | ||||
except KeyError: | ||||
pass | ||||
for pack in self.refresh(): | ||||
try: | ||||
return pack.getancestors(name, node, known=known) | ||||
except KeyError: | ||||
pass | ||||
raise KeyError((name, node)) | ||||
def getnodeinfo(self, name, node): | ||||
for pack in self.packs: | ||||
try: | ||||
return pack.getnodeinfo(name, node) | ||||
except KeyError: | ||||
pass | ||||
for pack in self.refresh(): | ||||
try: | ||||
return pack.getnodeinfo(name, node) | ||||
except KeyError: | ||||
pass | ||||
raise KeyError((name, node)) | ||||
def add(self, filename, node, p1, p2, linknode, copyfrom): | ||||
Augie Fackler
|
r43346 | raise RuntimeError( | ||
Augie Fackler
|
r43347 | b"cannot add to historypackstore (%s:%s)" % (filename, hex(node)) | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | |||
class historypack(basepack.basepack): | ||||
INDEXSUFFIX = INDEXSUFFIX | ||||
PACKSUFFIX = PACKSUFFIX | ||||
Augie Fackler
|
r40541 | SUPPORTED_VERSIONS = [2] | ||
Augie Fackler
|
r40530 | |||
def __init__(self, path): | ||||
super(historypack, self).__init__(path) | ||||
Augie Fackler
|
r40541 | self.INDEXFORMAT = INDEXFORMAT2 | ||
self.INDEXENTRYLENGTH = INDEXENTRYLENGTH2 | ||||
Augie Fackler
|
r40530 | |||
def getmissing(self, keys): | ||||
missing = [] | ||||
for name, node in keys: | ||||
try: | ||||
self._findnode(name, node) | ||||
except KeyError: | ||||
missing.append((name, node)) | ||||
return missing | ||||
def getancestors(self, name, node, known=None): | ||||
"""Returns as many ancestors as we're aware of. | ||||
return value: { | ||||
node: (p1, p2, linknode, copyfrom), | ||||
... | ||||
} | ||||
""" | ||||
if known and node in known: | ||||
return [] | ||||
ancestors = self._getancestors(name, node, known=known) | ||||
results = {} | ||||
for ancnode, p1, p2, linknode, copyfrom in ancestors: | ||||
results[ancnode] = (p1, p2, linknode, copyfrom) | ||||
if not results: | ||||
raise KeyError((name, node)) | ||||
return results | ||||
def getnodeinfo(self, name, node): | ||||
# Drop the node from the tuple before returning, since the result should | ||||
# just be (p1, p2, linknode, copyfrom) | ||||
return self._findnode(name, node)[1:] | ||||
def _getancestors(self, name, node, known=None): | ||||
if known is None: | ||||
known = set() | ||||
section = self._findsection(name) | ||||
filename, offset, size, nodeindexoffset, nodeindexsize = section | ||||
pending = set((node,)) | ||||
o = 0 | ||||
while o < size: | ||||
if not pending: | ||||
break | ||||
entry, copyfrom = self._readentry(offset + o) | ||||
o += PACKENTRYLENGTH | ||||
if copyfrom: | ||||
o += len(copyfrom) | ||||
ancnode = entry[ANC_NODE] | ||||
if ancnode in pending: | ||||
pending.remove(ancnode) | ||||
p1node = entry[ANC_P1NODE] | ||||
p2node = entry[ANC_P2NODE] | ||||
if p1node != nullid and p1node not in known: | ||||
pending.add(p1node) | ||||
if p2node != nullid and p2node not in known: | ||||
pending.add(p2node) | ||||
yield (ancnode, p1node, p2node, entry[ANC_LINKNODE], copyfrom) | ||||
def _readentry(self, offset): | ||||
data = self._data | ||||
Augie Fackler
|
r43346 | entry = struct.unpack( | ||
PACKFORMAT, data[offset : offset + PACKENTRYLENGTH] | ||||
) | ||||
Augie Fackler
|
r40530 | copyfrom = None | ||
copyfromlen = entry[ANC_COPYFROM] | ||||
if copyfromlen != 0: | ||||
offset += PACKENTRYLENGTH | ||||
Augie Fackler
|
r43346 | copyfrom = data[offset : offset + copyfromlen] | ||
Augie Fackler
|
r40530 | return entry, copyfrom | ||
def add(self, filename, node, p1, p2, linknode, copyfrom): | ||||
Augie Fackler
|
r43346 | raise RuntimeError( | ||
Augie Fackler
|
r43347 | b"cannot add to historypack (%s:%s)" % (filename, hex(node)) | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | |||
def _findnode(self, name, node): | ||||
if self.VERSION == 0: | ||||
ancestors = self._getancestors(name, node) | ||||
for ancnode, p1node, p2node, linknode, copyfrom in ancestors: | ||||
if ancnode == node: | ||||
return (ancnode, p1node, p2node, linknode, copyfrom) | ||||
else: | ||||
section = self._findsection(name) | ||||
nodeindexoffset, nodeindexsize = section[3:] | ||||
Augie Fackler
|
r43346 | entry = self._bisect( | ||
node, | ||||
nodeindexoffset, | ||||
nodeindexoffset + nodeindexsize, | ||||
NODEINDEXENTRYLENGTH, | ||||
) | ||||
Augie Fackler
|
r40530 | if entry is not None: | ||
node, offset = struct.unpack(NODEINDEXFORMAT, entry) | ||||
entry, copyfrom = self._readentry(offset) | ||||
# Drop the copyfromlen from the end of entry, and replace it | ||||
# with the copyfrom string. | ||||
return entry[:4] + (copyfrom,) | ||||
Augie Fackler
|
r43347 | raise KeyError(b"unable to find history for %s:%s" % (name, hex(node))) | ||
Augie Fackler
|
r40530 | |||
def _findsection(self, name): | ||||
params = self.params | ||||
namehash = hashlib.sha1(name).digest() | ||||
Augie Fackler
|
r43346 | fanoutkey = struct.unpack( | ||
params.fanoutstruct, namehash[: params.fanoutprefix] | ||||
)[0] | ||||
Augie Fackler
|
r40530 | fanout = self._fanouttable | ||
start = fanout[fanoutkey] + params.indexstart | ||||
indexend = self._indexend | ||||
for i in pycompat.xrange(fanoutkey + 1, params.fanoutcount): | ||||
end = fanout[i] + params.indexstart | ||||
if end != start: | ||||
break | ||||
else: | ||||
end = indexend | ||||
entry = self._bisect(namehash, start, end, self.INDEXENTRYLENGTH) | ||||
if not entry: | ||||
raise KeyError(name) | ||||
rawentry = struct.unpack(self.INDEXFORMAT, entry) | ||||
Augie Fackler
|
r40541 | x, offset, size, nodeindexoffset, nodeindexsize = rawentry | ||
Augie Fackler
|
r43346 | rawnamelen = self._index[ | ||
nodeindexoffset : nodeindexoffset + constants.FILENAMESIZE | ||||
] | ||||
Augie Fackler
|
r43347 | actualnamelen = struct.unpack(b'!H', rawnamelen)[0] | ||
Augie Fackler
|
r40541 | nodeindexoffset += constants.FILENAMESIZE | ||
Augie Fackler
|
r43346 | actualname = self._index[ | ||
nodeindexoffset : nodeindexoffset + actualnamelen | ||||
] | ||||
Augie Fackler
|
r40541 | if actualname != name: | ||
Augie Fackler
|
r43346 | raise KeyError( | ||
Augie Fackler
|
r43347 | b"found file name %s when looking for %s" % (actualname, name) | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40541 | nodeindexoffset += actualnamelen | ||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43346 | filenamelength = struct.unpack( | ||
Augie Fackler
|
r43347 | b'!H', self._data[offset : offset + constants.FILENAMESIZE] | ||
Augie Fackler
|
r43346 | )[0] | ||
Augie Fackler
|
r40530 | offset += constants.FILENAMESIZE | ||
Augie Fackler
|
r43346 | actualname = self._data[offset : offset + filenamelength] | ||
Augie Fackler
|
r40530 | offset += filenamelength | ||
if name != actualname: | ||||
Augie Fackler
|
r43346 | raise KeyError( | ||
Augie Fackler
|
r43347 | b"found file name %s when looking for %s" % (actualname, name) | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | |||
# Skip entry list size | ||||
offset += ENTRYCOUNTSIZE | ||||
nodelistoffset = offset | ||||
Augie Fackler
|
r43346 | nodelistsize = ( | ||
size - constants.FILENAMESIZE - filenamelength - ENTRYCOUNTSIZE | ||||
) | ||||
return ( | ||||
name, | ||||
nodelistoffset, | ||||
nodelistsize, | ||||
nodeindexoffset, | ||||
nodeindexsize, | ||||
) | ||||
Augie Fackler
|
r40530 | |||
def _bisect(self, node, start, end, entrylen): | ||||
# Bisect between start and end to find node | ||||
origstart = start | ||||
Augie Fackler
|
r43346 | startnode = self._index[start : start + NODELENGTH] | ||
endnode = self._index[end : end + NODELENGTH] | ||||
Augie Fackler
|
r40530 | |||
if startnode == node: | ||||
Augie Fackler
|
r43346 | return self._index[start : start + entrylen] | ||
Augie Fackler
|
r40530 | elif endnode == node: | ||
Augie Fackler
|
r43346 | return self._index[end : end + entrylen] | ||
Augie Fackler
|
r40530 | else: | ||
while start < end - entrylen: | ||||
Pulkit Goyal
|
r40766 | mid = start + (end - start) // 2 | ||
Augie Fackler
|
r40530 | mid = mid - ((mid - origstart) % entrylen) | ||
Augie Fackler
|
r43346 | midnode = self._index[mid : mid + NODELENGTH] | ||
Augie Fackler
|
r40530 | if midnode == node: | ||
Augie Fackler
|
r43346 | return self._index[mid : mid + entrylen] | ||
Augie Fackler
|
r40530 | if node > midnode: | ||
start = mid | ||||
elif node < midnode: | ||||
end = mid | ||||
return None | ||||
def markledger(self, ledger, options=None): | ||||
for filename, node in self: | ||||
ledger.markhistoryentry(self, filename, node) | ||||
def cleanup(self, ledger): | ||||
entries = ledger.sources.get(self, []) | ||||
allkeys = set(self) | ||||
Augie Fackler
|
r43346 | repackedkeys = set( | ||
(e.filename, e.node) for e in entries if e.historyrepacked | ||||
) | ||||
Augie Fackler
|
r40530 | |||
if len(allkeys - repackedkeys) == 0: | ||||
if self.path not in ledger.created: | ||||
util.unlinkpath(self.indexpath, ignoremissing=True) | ||||
util.unlinkpath(self.packpath, ignoremissing=True) | ||||
def __iter__(self): | ||||
for f, n, x, x, x, x in self.iterentries(): | ||||
yield f, n | ||||
def iterentries(self): | ||||
# Start at 1 to skip the header | ||||
offset = 1 | ||||
while offset < self.datasize: | ||||
data = self._data | ||||
# <2 byte len> + <filename> | ||||
Augie Fackler
|
r43346 | filenamelen = struct.unpack( | ||
Augie Fackler
|
r43347 | b'!H', data[offset : offset + constants.FILENAMESIZE] | ||
Augie Fackler
|
r43346 | )[0] | ||
Augie Fackler
|
r40530 | offset += constants.FILENAMESIZE | ||
Augie Fackler
|
r43346 | filename = data[offset : offset + filenamelen] | ||
Augie Fackler
|
r40530 | offset += filenamelen | ||
Augie Fackler
|
r43346 | revcount = struct.unpack( | ||
Augie Fackler
|
r43347 | b'!I', data[offset : offset + ENTRYCOUNTSIZE] | ||
Augie Fackler
|
r43346 | )[0] | ||
Augie Fackler
|
r40530 | offset += ENTRYCOUNTSIZE | ||
for i in pycompat.xrange(revcount): | ||||
Augie Fackler
|
r43346 | entry = struct.unpack( | ||
PACKFORMAT, data[offset : offset + PACKENTRYLENGTH] | ||||
) | ||||
Augie Fackler
|
r40530 | offset += PACKENTRYLENGTH | ||
Augie Fackler
|
r43346 | copyfrom = data[offset : offset + entry[ANC_COPYFROM]] | ||
Augie Fackler
|
r40530 | offset += entry[ANC_COPYFROM] | ||
Augie Fackler
|
r43346 | yield ( | ||
filename, | ||||
entry[ANC_NODE], | ||||
entry[ANC_P1NODE], | ||||
entry[ANC_P2NODE], | ||||
entry[ANC_LINKNODE], | ||||
copyfrom, | ||||
) | ||||
Augie Fackler
|
r40530 | |||
self._pagedin += PACKENTRYLENGTH | ||||
# If we've read a lot of data from the mmap, free some memory. | ||||
self.freememory() | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | class mutablehistorypack(basepack.mutablebasepack): | ||
"""A class for constructing and serializing a histpack file and index. | ||||
A history pack is a pair of files that contain the revision history for | ||||
various file revisions in Mercurial. It contains only revision history (like | ||||
parent pointers and linknodes), not any revision content information. | ||||
It consists of two files, with the following format: | ||||
.histpack | ||||
The pack itself is a series of file revisions with some basic header | ||||
information on each. | ||||
datapack = <version: 1 byte> | ||||
[<filesection>,...] | ||||
filesection = <filename len: 2 byte unsigned int> | ||||
<filename> | ||||
<revision count: 4 byte unsigned int> | ||||
[<revision>,...] | ||||
revision = <node: 20 byte> | ||||
<p1node: 20 byte> | ||||
<p2node: 20 byte> | ||||
<linknode: 20 byte> | ||||
<copyfromlen: 2 byte> | ||||
<copyfrom> | ||||
The revisions within each filesection are stored in topological order | ||||
(newest first). If a given entry has a parent from another file (a copy) | ||||
then p1node is the node from the other file, and copyfrom is the | ||||
filepath of the other file. | ||||
.histidx | ||||
The index file provides a mapping from filename to the file section in | ||||
the histpack. In V1 it also contains sub-indexes for specific nodes | ||||
within each file. It consists of three parts, the fanout, the file index | ||||
and the node indexes. | ||||
The file index is a list of index entries, sorted by filename hash (one | ||||
per file section in the pack). Each entry has: | ||||
- node (The 20 byte hash of the filename) | ||||
- pack entry offset (The location of this file section in the histpack) | ||||
- pack content size (The on-disk length of this file section's pack | ||||
data) | ||||
- node index offset (The location of the file's node index in the index | ||||
file) [1] | ||||
- node index size (the on-disk length of this file's node index) [1] | ||||
The fanout is a quick lookup table to reduce the number of steps for | ||||
bisecting the index. It is a series of 4 byte pointers to positions | ||||
within the index. It has 2^16 entries, which corresponds to hash | ||||
prefixes [00, 01, 02,..., FD, FE, FF]. Example: the pointer in slot 4F | ||||
points to the index position of the first revision whose node starts | ||||
with 4F. This saves log(2^16) bisect steps. | ||||
dataidx = <fanouttable> | ||||
<file count: 8 byte unsigned> [1] | ||||
<fileindex> | ||||
<node count: 8 byte unsigned> [1] | ||||
[<nodeindex>,...] [1] | ||||
fanouttable = [<index offset: 4 byte unsigned int>,...] (2^16 entries) | ||||
fileindex = [<file index entry>,...] | ||||
fileindexentry = <node: 20 byte> | ||||
<pack file section offset: 8 byte unsigned int> | ||||
<pack file section size: 8 byte unsigned int> | ||||
<node index offset: 4 byte unsigned int> [1] | ||||
<node index size: 4 byte unsigned int> [1] | ||||
nodeindex = <filename>[<node index entry>,...] [1] | ||||
filename = <filename len : 2 byte unsigned int><filename value> [1] | ||||
nodeindexentry = <node: 20 byte> [1] | ||||
<pack file node offset: 8 byte unsigned int> [1] | ||||
[1]: new in version 1. | ||||
""" | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | INDEXSUFFIX = INDEXSUFFIX | ||
PACKSUFFIX = PACKSUFFIX | ||||
Augie Fackler
|
r40541 | SUPPORTED_VERSIONS = [2] | ||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r40541 | def __init__(self, ui, packpath, version=2): | ||
Augie Fackler
|
r40530 | super(mutablehistorypack, self).__init__(ui, packpath, version=version) | ||
self.files = {} | ||||
self.entrylocations = {} | ||||
self.fileentries = {} | ||||
Augie Fackler
|
r40541 | self.INDEXFORMAT = INDEXFORMAT2 | ||
self.INDEXENTRYLENGTH = INDEXENTRYLENGTH2 | ||||
Augie Fackler
|
r40530 | |||
self.NODEINDEXFORMAT = NODEINDEXFORMAT | ||||
self.NODEINDEXENTRYLENGTH = NODEINDEXENTRYLENGTH | ||||
def add(self, filename, node, p1, p2, linknode, copyfrom): | ||||
Augie Fackler
|
r43347 | copyfrom = copyfrom or b'' | ||
copyfromlen = struct.pack(b'!H', len(copyfrom)) | ||||
Augie Fackler
|
r43346 | self.fileentries.setdefault(filename, []).append( | ||
(node, p1, p2, linknode, copyfromlen, copyfrom) | ||||
) | ||||
Augie Fackler
|
r40530 | |||
def _write(self): | ||||
for filename in sorted(self.fileentries): | ||||
entries = self.fileentries[filename] | ||||
sectionstart = self.packfp.tell() | ||||
# Write the file section content | ||||
entrymap = dict((e[0], e) for e in entries) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def parentfunc(node): | ||
x, p1, p2, x, x, x = entrymap[node] | ||||
parents = [] | ||||
if p1 != nullid: | ||||
parents.append(p1) | ||||
if p2 != nullid: | ||||
parents.append(p2) | ||||
return parents | ||||
Augie Fackler
|
r43346 | sortednodes = list( | ||
reversed( | ||||
shallowutil.sortnodes((e[0] for e in entries), parentfunc) | ||||
) | ||||
) | ||||
Augie Fackler
|
r40530 | |||
# Write the file section header | ||||
Augie Fackler
|
r43346 | self.writeraw( | ||
Augie Fackler
|
r43347 | b"%s%s%s" | ||
Augie Fackler
|
r43346 | % ( | ||
Augie Fackler
|
r43347 | struct.pack(b'!H', len(filename)), | ||
Augie Fackler
|
r43346 | filename, | ||
Augie Fackler
|
r43347 | struct.pack(b'!I', len(sortednodes)), | ||
Augie Fackler
|
r43346 | ) | ||
) | ||||
Augie Fackler
|
r40530 | |||
sectionlen = constants.FILENAMESIZE + len(filename) + 4 | ||||
rawstrings = [] | ||||
# Record the node locations for the index | ||||
locations = self.entrylocations.setdefault(filename, {}) | ||||
offset = sectionstart + sectionlen | ||||
for node in sortednodes: | ||||
locations[node] = offset | ||||
Augie Fackler
|
r43347 | raw = b'%s%s%s%s%s%s' % entrymap[node] | ||
Augie Fackler
|
r40530 | rawstrings.append(raw) | ||
offset += len(raw) | ||||
Augie Fackler
|
r43347 | rawdata = b''.join(rawstrings) | ||
Augie Fackler
|
r40530 | sectionlen += len(rawdata) | ||
self.writeraw(rawdata) | ||||
# Record metadata for the index | ||||
self.files[filename] = (sectionstart, sectionlen) | ||||
node = hashlib.sha1(filename).digest() | ||||
self.entries[node] = node | ||||
def close(self, ledger=None): | ||||
if self._closed: | ||||
return | ||||
self._write() | ||||
return super(mutablehistorypack, self).close(ledger=ledger) | ||||
def createindex(self, nodelocations, indexoffset): | ||||
fileindexformat = self.INDEXFORMAT | ||||
fileindexlength = self.INDEXENTRYLENGTH | ||||
nodeindexformat = self.NODEINDEXFORMAT | ||||
nodeindexlength = self.NODEINDEXENTRYLENGTH | ||||
Augie Fackler
|
r43346 | files = ( | ||
(hashlib.sha1(filename).digest(), filename, offset, size) | ||||
Gregory Szorc
|
r43375 | for filename, (offset, size) in pycompat.iteritems(self.files) | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | files = sorted(files) | ||
# node index is after file index size, file index, and node index size | ||||
Augie Fackler
|
r43347 | indexlensize = struct.calcsize(b'!Q') | ||
Augie Fackler
|
r43346 | nodeindexoffset = ( | ||
indexoffset | ||||
+ indexlensize | ||||
+ (len(files) * fileindexlength) | ||||
+ indexlensize | ||||
) | ||||
Augie Fackler
|
r40530 | |||
fileindexentries = [] | ||||
nodeindexentries = [] | ||||
nodecount = 0 | ||||
for namehash, filename, offset, size in files: | ||||
# File section index | ||||
Augie Fackler
|
r40541 | nodelocations = self.entrylocations[filename] | ||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r40541 | nodeindexsize = len(nodelocations) * nodeindexlength | ||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43346 | rawentry = struct.pack( | ||
fileindexformat, | ||||
namehash, | ||||
offset, | ||||
size, | ||||
nodeindexoffset, | ||||
nodeindexsize, | ||||
) | ||||
Augie Fackler
|
r40541 | # Node index | ||
Augie Fackler
|
r43346 | nodeindexentries.append( | ||
struct.pack(constants.FILENAMESTRUCT, len(filename)) + filename | ||||
) | ||||
Augie Fackler
|
r40541 | nodeindexoffset += constants.FILENAMESIZE + len(filename) | ||
Augie Fackler
|
r40530 | |||
Gregory Szorc
|
r43375 | for node, location in sorted(pycompat.iteritems(nodelocations)): | ||
Augie Fackler
|
r43346 | nodeindexentries.append( | ||
struct.pack(nodeindexformat, node, location) | ||||
) | ||||
Augie Fackler
|
r40541 | nodecount += 1 | ||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r40541 | nodeindexoffset += len(nodelocations) * nodeindexlength | ||
Augie Fackler
|
r40530 | |||
fileindexentries.append(rawentry) | ||||
Augie Fackler
|
r43347 | nodecountraw = struct.pack(b'!Q', nodecount) | ||
Augie Fackler
|
r43346 | return ( | ||
Augie Fackler
|
r43347 | b''.join(fileindexentries) | ||
+ nodecountraw | ||||
+ b''.join(nodeindexentries) | ||||
Augie Fackler
|
r43346 | ) | ||