pvec.py
227 lines
| 6.0 KiB
| text/x-python
|
PythonLexer
/ mercurial / pvec.py
Matt Mackall
|
r16249 | # pvec.py - probabilistic vector clocks for Mercurial | ||
# | ||||
# Copyright 2012 Matt Mackall <mpm@selenic.com> | ||||
# | ||||
# This software may be used and distributed according to the terms of the | ||||
# GNU General Public License version 2 or any later version. | ||||
''' | ||||
A "pvec" is a changeset property based on the theory of vector clocks | ||||
that can be compared to discover relatedness without consulting a | ||||
graph. This can be useful for tasks like determining how a | ||||
disconnected patch relates to a repository. | ||||
Currently a pvec consist of 448 bits, of which 24 are 'depth' and the | ||||
remainder are a bit vector. It is represented as a 70-character base85 | ||||
string. | ||||
Construction: | ||||
- a root changeset has a depth of 0 and a bit vector based on its hash | ||||
- a normal commit has a changeset where depth is increased by one and | ||||
one bit vector bit is flipped based on its hash | ||||
- a merge changeset pvec is constructed by copying changes from one pvec into | ||||
the other to balance its depth | ||||
Properties: | ||||
- for linear changes, difference in depth is always <= hamming distance | ||||
- otherwise, changes are probably divergent | ||||
- when hamming distance is < 200, we can reliably detect when pvecs are near | ||||
Issues: | ||||
- hamming distance ceases to work over distances of ~ 200 | ||||
- detecting divergence is less accurate when the common ancestor is very close | ||||
to either revision or total distance is high | ||||
- this could probably be improved by modeling the relation between | ||||
delta and hdist | ||||
Uses: | ||||
- a patch pvec can be used to locate the nearest available common ancestor for | ||||
resolving conflicts | ||||
- ordering of patches can be established without a DAG | ||||
- two head pvecs can be compared to determine whether push/pull/merge is needed | ||||
and approximately how many changesets are involved | ||||
- can be used to find a heuristic divergence measure between changesets on | ||||
different branches | ||||
''' | ||||
Gregory Szorc
|
r27501 | from __future__ import absolute_import | ||
from .node import nullrev | ||||
from . import ( | ||||
Gregory Szorc
|
r38806 | pycompat, | ||
Gregory Szorc
|
r27501 | util, | ||
) | ||||
Matt Mackall
|
r16249 | |||
Augie Fackler
|
r43346 | _size = 448 # 70 chars b85-encoded | ||
Matt Mackall
|
r16249 | _bytes = _size / 8 | ||
_depthbits = 24 | ||||
_depthbytes = _depthbits / 8 | ||||
_vecbytes = _bytes - _depthbytes | ||||
_vecbits = _vecbytes * 8 | ||||
Augie Fackler
|
r43346 | _radius = (_vecbits - 30) / 2 # high probability vectors are related | ||
Matt Mackall
|
r16249 | |||
def _bin(bs): | ||||
'''convert a bytestring to a long''' | ||||
v = 0 | ||||
for b in bs: | ||||
v = v * 256 + ord(b) | ||||
return v | ||||
Augie Fackler
|
r43346 | |||
Matt Mackall
|
r16249 | def _str(v, l): | ||
Augie Fackler
|
r43347 | bs = b"" | ||
Gregory Szorc
|
r38806 | for p in pycompat.xrange(l): | ||
Augie Fackler
|
r43764 | bs = pycompat.bytechr(v & 255) + bs | ||
Matt Mackall
|
r16249 | v >>= 8 | ||
return bs | ||||
Augie Fackler
|
r43346 | |||
Matt Mackall
|
r16249 | def _split(b): | ||
'''depth and bitvec''' | ||||
return _bin(b[:_depthbytes]), _bin(b[_depthbytes:]) | ||||
Augie Fackler
|
r43346 | |||
Matt Mackall
|
r16249 | def _join(depth, bitvec): | ||
return _str(depth, _depthbytes) + _str(bitvec, _vecbytes) | ||||
Augie Fackler
|
r43346 | |||
Matt Mackall
|
r16249 | def _hweight(x): | ||
c = 0 | ||||
while x: | ||||
if x & 1: | ||||
c += 1 | ||||
x >>= 1 | ||||
return c | ||||
Augie Fackler
|
r43346 | |||
Gregory Szorc
|
r38806 | _htab = [_hweight(x) for x in pycompat.xrange(256)] | ||
Matt Mackall
|
r16249 | |||
Augie Fackler
|
r43346 | |||
Matt Mackall
|
r16249 | def _hamming(a, b): | ||
'''find the hamming distance between two longs''' | ||||
d = a ^ b | ||||
c = 0 | ||||
while d: | ||||
Augie Fackler
|
r43346 | c += _htab[d & 0xFF] | ||
Matt Mackall
|
r16249 | d >>= 8 | ||
return c | ||||
Augie Fackler
|
r43346 | |||
Matt Mackall
|
r16249 | def _mergevec(x, y, c): | ||
# Ideally, this function would be x ^ y ^ ancestor, but finding | ||||
# ancestors is a nuisance. So instead we find the minimal number | ||||
# of changes to balance the depth and hamming distance | ||||
d1, v1 = x | ||||
d2, v2 = y | ||||
if d1 < d2: | ||||
d1, d2, v1, v2 = d2, d1, v2, v1 | ||||
hdist = _hamming(v1, v2) | ||||
ddist = d1 - d2 | ||||
v = v1 | ||||
Augie Fackler
|
r43346 | m = v1 ^ v2 # mask of different bits | ||
Matt Mackall
|
r16249 | i = 1 | ||
if hdist > ddist: | ||||
# if delta = 10 and hdist = 100, then we need to go up 55 steps | ||||
# to the ancestor and down 45 | ||||
changes = (hdist - ddist + 1) / 2 | ||||
else: | ||||
# must make at least one change | ||||
changes = 1 | ||||
depth = d1 + changes | ||||
# copy changes from v2 | ||||
if m: | ||||
while changes: | ||||
if m & i: | ||||
v ^= i | ||||
changes -= 1 | ||||
i <<= 1 | ||||
else: | ||||
v = _flipbit(v, c) | ||||
return depth, v | ||||
Augie Fackler
|
r43346 | |||
Matt Mackall
|
r16249 | def _flipbit(v, node): | ||
# converting bit strings to longs is slow | ||||
Augie Fackler
|
r43346 | bit = (hash(node) & 0xFFFFFFFF) % _vecbits | ||
return v ^ (1 << bit) | ||||
Matt Mackall
|
r16249 | |||
def ctxpvec(ctx): | ||||
'''construct a pvec for ctx while filling in the cache''' | ||||
Matt Harbison
|
r24339 | r = ctx.repo() | ||
Martin von Zweigbergk
|
r43385 | if not util.safehasattr(r, "_pveccache"): | ||
Matt Mackall
|
r16249 | r._pveccache = {} | ||
pvc = r._pveccache | ||||
if ctx.rev() not in pvc: | ||||
cl = r.changelog | ||||
Gregory Szorc
|
r38806 | for n in pycompat.xrange(ctx.rev() + 1): | ||
Matt Mackall
|
r16249 | if n not in pvc: | ||
node = cl.node(n) | ||||
p1, p2 = cl.parentrevs(n) | ||||
if p1 == nullrev: | ||||
# start with a 'random' vector at root | ||||
pvc[n] = (0, _bin((node * 3)[:_vecbytes])) | ||||
elif p2 == nullrev: | ||||
d, v = pvc[p1] | ||||
pvc[n] = (d + 1, _flipbit(v, node)) | ||||
else: | ||||
pvc[n] = _mergevec(pvc[p1], pvc[p2], node) | ||||
bs = _join(*pvc[ctx.rev()]) | ||||
Yuya Nishihara
|
r32200 | return pvec(util.b85encode(bs)) | ||
Matt Mackall
|
r16249 | |||
Augie Fackler
|
r43346 | |||
Matt Mackall
|
r16249 | class pvec(object): | ||
def __init__(self, hashorctx): | ||||
if isinstance(hashorctx, str): | ||||
self._bs = hashorctx | ||||
Yuya Nishihara
|
r32200 | self._depth, self._vec = _split(util.b85decode(hashorctx)) | ||
Matt Mackall
|
r16249 | else: | ||
Bryan O'Sullivan
|
r18918 | self._vec = ctxpvec(hashorctx) | ||
Matt Mackall
|
r16249 | |||
def __str__(self): | ||||
return self._bs | ||||
def __eq__(self, b): | ||||
return self._vec == b._vec and self._depth == b._depth | ||||
def __lt__(self, b): | ||||
delta = b._depth - self._depth | ||||
if delta < 0: | ||||
Augie Fackler
|
r43346 | return False # always correct | ||
Matt Mackall
|
r16249 | if _hamming(self._vec, b._vec) > delta: | ||
return False | ||||
return True | ||||
def __gt__(self, b): | ||||
return b < self | ||||
def __or__(self, b): | ||||
delta = abs(b._depth - self._depth) | ||||
if _hamming(self._vec, b._vec) <= delta: | ||||
return False | ||||
return True | ||||
def __sub__(self, b): | ||||
if self | b: | ||||
Augie Fackler
|
r43347 | raise ValueError(b"concurrent pvecs") | ||
Matt Mackall
|
r16249 | return self._depth - b._depth | ||
def distance(self, b): | ||||
d = abs(b._depth - self._depth) | ||||
h = _hamming(self._vec, b._vec) | ||||
return max(d, h) | ||||
def near(self, b): | ||||
dist = abs(b.depth - self._depth) | ||||
if dist > _radius or _hamming(self._vec, b._vec) > _radius: | ||||
return False | ||||