##// END OF EJS Templates
obsolete: helper class to access obsolete marker data
Pierre-Yves.David@ens-lyon.org -
r17072:517af63b default
parent child Browse files
Show More
@@ -1,192 +1,220 b''
1 # obsolete.py - obsolete markers handling
1 # obsolete.py - obsolete markers handling
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 """Obsolete markers handling
9 """Obsolete markers handling
10
10
11 An obsolete marker maps an old changeset to a list of new
11 An obsolete marker maps an old changeset to a list of new
12 changesets. If the list of new changesets is empty, the old changeset
12 changesets. If the list of new changesets is empty, the old changeset
13 is said to be "killed". Otherwise, the old changeset is being
13 is said to be "killed". Otherwise, the old changeset is being
14 "replaced" by the new changesets.
14 "replaced" by the new changesets.
15
15
16 Obsolete markers can be used to record and distribute changeset graph
16 Obsolete markers can be used to record and distribute changeset graph
17 transformations performed by history rewriting operations, and help
17 transformations performed by history rewriting operations, and help
18 building new tools to reconciliate conflicting rewriting actions. To
18 building new tools to reconciliate conflicting rewriting actions. To
19 facilitate conflicts resolution, markers include various annotations
19 facilitate conflicts resolution, markers include various annotations
20 besides old and news changeset identifiers, such as creation date or
20 besides old and news changeset identifiers, such as creation date or
21 author name.
21 author name.
22
22
23
23
24 Format
24 Format
25 ------
25 ------
26
26
27 Markers are stored in an append-only file stored in
27 Markers are stored in an append-only file stored in
28 '.hg/store/obsstore'.
28 '.hg/store/obsstore'.
29
29
30 The file starts with a version header:
30 The file starts with a version header:
31
31
32 - 1 unsigned byte: version number, starting at zero.
32 - 1 unsigned byte: version number, starting at zero.
33
33
34
34
35 The header is followed by the markers. Each marker is made of:
35 The header is followed by the markers. Each marker is made of:
36
36
37 - 1 unsigned byte: number of new changesets "R", could be zero.
37 - 1 unsigned byte: number of new changesets "R", could be zero.
38
38
39 - 1 unsigned 32-bits integer: metadata size "M" in bytes.
39 - 1 unsigned 32-bits integer: metadata size "M" in bytes.
40
40
41 - 1 byte: a bit field. It is reserved for flags used in obsolete
41 - 1 byte: a bit field. It is reserved for flags used in obsolete
42 markers common operations, to avoid repeated decoding of metadata
42 markers common operations, to avoid repeated decoding of metadata
43 entries.
43 entries.
44
44
45 - 20 bytes: obsoleted changeset identifier.
45 - 20 bytes: obsoleted changeset identifier.
46
46
47 - N*20 bytes: new changesets identifiers.
47 - N*20 bytes: new changesets identifiers.
48
48
49 - M bytes: metadata as a sequence of nul-terminated strings. Each
49 - M bytes: metadata as a sequence of nul-terminated strings. Each
50 string contains a key and a value, separated by a color ':', without
50 string contains a key and a value, separated by a color ':', without
51 additional encoding. Keys cannot contain '\0' or ':' and values
51 additional encoding. Keys cannot contain '\0' or ':' and values
52 cannot contain '\0'.
52 cannot contain '\0'.
53 """
53 """
54 import struct
54 import struct
55 from mercurial import util
55 from mercurial import util
56 from i18n import _
56 from i18n import _
57
57
58 _pack = struct.pack
58 _pack = struct.pack
59 _unpack = struct.unpack
59 _unpack = struct.unpack
60
60
61
61
62
62
63 # data used for parsing and writing
63 # data used for parsing and writing
64 _fmversion = 0
64 _fmversion = 0
65 _fmfixed = '>BIB20s'
65 _fmfixed = '>BIB20s'
66 _fmnode = '20s'
66 _fmnode = '20s'
67 _fmfsize = struct.calcsize(_fmfixed)
67 _fmfsize = struct.calcsize(_fmfixed)
68 _fnodesize = struct.calcsize(_fmnode)
68 _fnodesize = struct.calcsize(_fmnode)
69
69
70 def _readmarkers(data):
70 def _readmarkers(data):
71 """Read and enumerate markers from raw data"""
71 """Read and enumerate markers from raw data"""
72 off = 0
72 off = 0
73 diskversion = _unpack('>B', data[off:off + 1])[0]
73 diskversion = _unpack('>B', data[off:off + 1])[0]
74 off += 1
74 off += 1
75 if diskversion != _fmversion:
75 if diskversion != _fmversion:
76 raise util.Abort(_('parsing obsolete marker: unknown version %r')
76 raise util.Abort(_('parsing obsolete marker: unknown version %r')
77 % diskversion)
77 % diskversion)
78
78
79 # Loop on markers
79 # Loop on markers
80 l = len(data)
80 l = len(data)
81 while off + _fmfsize <= l:
81 while off + _fmfsize <= l:
82 # read fixed part
82 # read fixed part
83 cur = data[off:off + _fmfsize]
83 cur = data[off:off + _fmfsize]
84 off += _fmfsize
84 off += _fmfsize
85 nbsuc, mdsize, flags, pre = _unpack(_fmfixed, cur)
85 nbsuc, mdsize, flags, pre = _unpack(_fmfixed, cur)
86 # read replacement
86 # read replacement
87 sucs = ()
87 sucs = ()
88 if nbsuc:
88 if nbsuc:
89 s = (_fnodesize * nbsuc)
89 s = (_fnodesize * nbsuc)
90 cur = data[off:off + s]
90 cur = data[off:off + s]
91 sucs = _unpack(_fmnode * nbsuc, cur)
91 sucs = _unpack(_fmnode * nbsuc, cur)
92 off += s
92 off += s
93 # read metadata
93 # read metadata
94 # (metadata will be decoded on demand)
94 # (metadata will be decoded on demand)
95 metadata = data[off:off + mdsize]
95 metadata = data[off:off + mdsize]
96 if len(metadata) != mdsize:
96 if len(metadata) != mdsize:
97 raise util.Abort(_('parsing obsolete marker: metadata is too '
97 raise util.Abort(_('parsing obsolete marker: metadata is too '
98 'short, %d bytes expected, got %d')
98 'short, %d bytes expected, got %d')
99 % (len(metadata), mdsize))
99 % (len(metadata), mdsize))
100 off += mdsize
100 off += mdsize
101 yield (pre, sucs, flags, metadata)
101 yield (pre, sucs, flags, metadata)
102
102
103 def encodemeta(meta):
103 def encodemeta(meta):
104 """Return encoded metadata string to string mapping.
104 """Return encoded metadata string to string mapping.
105
105
106 Assume no ':' in key and no '\0' in both key and value."""
106 Assume no ':' in key and no '\0' in both key and value."""
107 for key, value in meta.iteritems():
107 for key, value in meta.iteritems():
108 if ':' in key or '\0' in key:
108 if ':' in key or '\0' in key:
109 raise ValueError("':' and '\0' are forbidden in metadata key'")
109 raise ValueError("':' and '\0' are forbidden in metadata key'")
110 if '\0' in value:
110 if '\0' in value:
111 raise ValueError("':' are forbidden in metadata value'")
111 raise ValueError("':' are forbidden in metadata value'")
112 return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
112 return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
113
113
114 def decodemeta(data):
114 def decodemeta(data):
115 """Return string to string dictionary from encoded version."""
115 """Return string to string dictionary from encoded version."""
116 d = {}
116 d = {}
117 for l in data.split('\0'):
117 for l in data.split('\0'):
118 if l:
118 if l:
119 key, value = l.split(':')
119 key, value = l.split(':')
120 d[key] = value
120 d[key] = value
121 return d
121 return d
122
122
123 class marker(object):
124 """Wrap obsolete marker raw data"""
125
126 def __init__(self, repo, data):
127 # the repo argument will be used to create changectx in later version
128 self._repo = repo
129 self._data = data
130 self._decodedmeta = None
131
132 def precnode(self):
133 """Precursor changeset node identifier"""
134 return self._data[0]
135
136 def succnodes(self):
137 """List of successor changesets node identifiers"""
138 return self._data[1]
139
140 def metadata(self):
141 """Decoded metadata dictionary"""
142 if self._decodedmeta is None:
143 self._decodedmeta = decodemeta(self._data[3])
144 return self._decodedmeta
145
146 def date(self):
147 """Creation date as (unixtime, offset)"""
148 parts = self.metadata()['date'].split(' ')
149 return (float(parts[0]), int(parts[1]))
150
123 class obsstore(object):
151 class obsstore(object):
124 """Store obsolete markers
152 """Store obsolete markers
125
153
126 Markers can be accessed with two mappings:
154 Markers can be accessed with two mappings:
127 - precursors: old -> set(new)
155 - precursors: old -> set(new)
128 - successors: new -> set(old)
156 - successors: new -> set(old)
129 """
157 """
130
158
131 def __init__(self):
159 def __init__(self):
132 self._all = []
160 self._all = []
133 # new markers to serialize
161 # new markers to serialize
134 self._new = []
162 self._new = []
135 self.precursors = {}
163 self.precursors = {}
136 self.successors = {}
164 self.successors = {}
137
165
138 def create(self, prec, succs=(), flag=0, metadata=None):
166 def create(self, prec, succs=(), flag=0, metadata=None):
139 """obsolete: add a new obsolete marker
167 """obsolete: add a new obsolete marker
140
168
141 * ensuring it is hashable
169 * ensuring it is hashable
142 * check mandatory metadata
170 * check mandatory metadata
143 * encode metadata
171 * encode metadata
144 """
172 """
145 if metadata is None:
173 if metadata is None:
146 metadata = {}
174 metadata = {}
147 if len(prec) != 20:
175 if len(prec) != 20:
148 raise ValueError(prec)
176 raise ValueError(prec)
149 for succ in succs:
177 for succ in succs:
150 if len(succ) != 20:
178 if len(succ) != 20:
151 raise ValueError(prec)
179 raise ValueError(prec)
152 marker = (str(prec), tuple(succs), int(flag), encodemeta(metadata))
180 marker = (str(prec), tuple(succs), int(flag), encodemeta(metadata))
153 self.add(marker)
181 self.add(marker)
154
182
155 def add(self, marker):
183 def add(self, marker):
156 """Add a new marker to the store
184 """Add a new marker to the store
157
185
158 This marker still needs to be written to disk"""
186 This marker still needs to be written to disk"""
159 self._new.append(marker)
187 self._new.append(marker)
160 self._load(marker)
188 self._load(marker)
161
189
162 def loadmarkers(self, data):
190 def loadmarkers(self, data):
163 """Load all markers in data, mark them as known."""
191 """Load all markers in data, mark them as known."""
164 for marker in _readmarkers(data):
192 for marker in _readmarkers(data):
165 self._load(marker)
193 self._load(marker)
166
194
167 def flushmarkers(self, stream):
195 def flushmarkers(self, stream):
168 """Write all markers to a stream
196 """Write all markers to a stream
169
197
170 After this operation, "new" markers are considered "known"."""
198 After this operation, "new" markers are considered "known"."""
171 self._writemarkers(stream)
199 self._writemarkers(stream)
172 self._new[:] = []
200 self._new[:] = []
173
201
174 def _load(self, marker):
202 def _load(self, marker):
175 self._all.append(marker)
203 self._all.append(marker)
176 pre, sucs = marker[:2]
204 pre, sucs = marker[:2]
177 self.precursors.setdefault(pre, set()).add(marker)
205 self.precursors.setdefault(pre, set()).add(marker)
178 for suc in sucs:
206 for suc in sucs:
179 self.successors.setdefault(suc, set()).add(marker)
207 self.successors.setdefault(suc, set()).add(marker)
180
208
181 def _writemarkers(self, stream):
209 def _writemarkers(self, stream):
182 # Kept separate from flushmarkers(), it will be reused for
210 # Kept separate from flushmarkers(), it will be reused for
183 # markers exchange.
211 # markers exchange.
184 stream.write(_pack('>B', _fmversion))
212 stream.write(_pack('>B', _fmversion))
185 for marker in self._all:
213 for marker in self._all:
186 pre, sucs, flags, metadata = marker
214 pre, sucs, flags, metadata = marker
187 nbsuc = len(sucs)
215 nbsuc = len(sucs)
188 format = _fmfixed + (_fmnode * nbsuc)
216 format = _fmfixed + (_fmnode * nbsuc)
189 data = [nbsuc, len(metadata), flags, pre]
217 data = [nbsuc, len(metadata), flags, pre]
190 data.extend(sucs)
218 data.extend(sucs)
191 stream.write(_pack(format, *data))
219 stream.write(_pack(format, *data))
192 stream.write(metadata)
220 stream.write(metadata)
General Comments 0
You need to be logged in to leave comments. Login now