##// END OF EJS Templates
obsolete: add easy way to iterate over obsolete marker object
Pierre-Yves.David@ens-lyon.org -
r17073:3a79a568 default
parent child Browse files
Show More
@@ -1,220 +1,231 b''
1 # obsolete.py - obsolete markers handling
1 # obsolete.py - obsolete markers handling
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 """Obsolete markers handling
9 """Obsolete markers handling
10
10
11 An obsolete marker maps an old changeset to a list of new
11 An obsolete marker maps an old changeset to a list of new
12 changesets. If the list of new changesets is empty, the old changeset
12 changesets. If the list of new changesets is empty, the old changeset
13 is said to be "killed". Otherwise, the old changeset is being
13 is said to be "killed". Otherwise, the old changeset is being
14 "replaced" by the new changesets.
14 "replaced" by the new changesets.
15
15
16 Obsolete markers can be used to record and distribute changeset graph
16 Obsolete markers can be used to record and distribute changeset graph
17 transformations performed by history rewriting operations, and help
17 transformations performed by history rewriting operations, and help
18 building new tools to reconciliate conflicting rewriting actions. To
18 building new tools to reconciliate conflicting rewriting actions. To
19 facilitate conflicts resolution, markers include various annotations
19 facilitate conflicts resolution, markers include various annotations
20 besides old and news changeset identifiers, such as creation date or
20 besides old and news changeset identifiers, such as creation date or
21 author name.
21 author name.
22
22
23
23
24 Format
24 Format
25 ------
25 ------
26
26
27 Markers are stored in an append-only file stored in
27 Markers are stored in an append-only file stored in
28 '.hg/store/obsstore'.
28 '.hg/store/obsstore'.
29
29
30 The file starts with a version header:
30 The file starts with a version header:
31
31
32 - 1 unsigned byte: version number, starting at zero.
32 - 1 unsigned byte: version number, starting at zero.
33
33
34
34
35 The header is followed by the markers. Each marker is made of:
35 The header is followed by the markers. Each marker is made of:
36
36
37 - 1 unsigned byte: number of new changesets "R", could be zero.
37 - 1 unsigned byte: number of new changesets "R", could be zero.
38
38
39 - 1 unsigned 32-bits integer: metadata size "M" in bytes.
39 - 1 unsigned 32-bits integer: metadata size "M" in bytes.
40
40
41 - 1 byte: a bit field. It is reserved for flags used in obsolete
41 - 1 byte: a bit field. It is reserved for flags used in obsolete
42 markers common operations, to avoid repeated decoding of metadata
42 markers common operations, to avoid repeated decoding of metadata
43 entries.
43 entries.
44
44
45 - 20 bytes: obsoleted changeset identifier.
45 - 20 bytes: obsoleted changeset identifier.
46
46
47 - N*20 bytes: new changesets identifiers.
47 - N*20 bytes: new changesets identifiers.
48
48
49 - M bytes: metadata as a sequence of nul-terminated strings. Each
49 - M bytes: metadata as a sequence of nul-terminated strings. Each
50 string contains a key and a value, separated by a color ':', without
50 string contains a key and a value, separated by a color ':', without
51 additional encoding. Keys cannot contain '\0' or ':' and values
51 additional encoding. Keys cannot contain '\0' or ':' and values
52 cannot contain '\0'.
52 cannot contain '\0'.
53 """
53 """
54 import struct
54 import struct
55 from mercurial import util
55 from mercurial import util
56 from i18n import _
56 from i18n import _
57
57
58 _pack = struct.pack
58 _pack = struct.pack
59 _unpack = struct.unpack
59 _unpack = struct.unpack
60
60
61
61
62
62
63 # data used for parsing and writing
63 # data used for parsing and writing
64 _fmversion = 0
64 _fmversion = 0
65 _fmfixed = '>BIB20s'
65 _fmfixed = '>BIB20s'
66 _fmnode = '20s'
66 _fmnode = '20s'
67 _fmfsize = struct.calcsize(_fmfixed)
67 _fmfsize = struct.calcsize(_fmfixed)
68 _fnodesize = struct.calcsize(_fmnode)
68 _fnodesize = struct.calcsize(_fmnode)
69
69
70 def _readmarkers(data):
70 def _readmarkers(data):
71 """Read and enumerate markers from raw data"""
71 """Read and enumerate markers from raw data"""
72 off = 0
72 off = 0
73 diskversion = _unpack('>B', data[off:off + 1])[0]
73 diskversion = _unpack('>B', data[off:off + 1])[0]
74 off += 1
74 off += 1
75 if diskversion != _fmversion:
75 if diskversion != _fmversion:
76 raise util.Abort(_('parsing obsolete marker: unknown version %r')
76 raise util.Abort(_('parsing obsolete marker: unknown version %r')
77 % diskversion)
77 % diskversion)
78
78
79 # Loop on markers
79 # Loop on markers
80 l = len(data)
80 l = len(data)
81 while off + _fmfsize <= l:
81 while off + _fmfsize <= l:
82 # read fixed part
82 # read fixed part
83 cur = data[off:off + _fmfsize]
83 cur = data[off:off + _fmfsize]
84 off += _fmfsize
84 off += _fmfsize
85 nbsuc, mdsize, flags, pre = _unpack(_fmfixed, cur)
85 nbsuc, mdsize, flags, pre = _unpack(_fmfixed, cur)
86 # read replacement
86 # read replacement
87 sucs = ()
87 sucs = ()
88 if nbsuc:
88 if nbsuc:
89 s = (_fnodesize * nbsuc)
89 s = (_fnodesize * nbsuc)
90 cur = data[off:off + s]
90 cur = data[off:off + s]
91 sucs = _unpack(_fmnode * nbsuc, cur)
91 sucs = _unpack(_fmnode * nbsuc, cur)
92 off += s
92 off += s
93 # read metadata
93 # read metadata
94 # (metadata will be decoded on demand)
94 # (metadata will be decoded on demand)
95 metadata = data[off:off + mdsize]
95 metadata = data[off:off + mdsize]
96 if len(metadata) != mdsize:
96 if len(metadata) != mdsize:
97 raise util.Abort(_('parsing obsolete marker: metadata is too '
97 raise util.Abort(_('parsing obsolete marker: metadata is too '
98 'short, %d bytes expected, got %d')
98 'short, %d bytes expected, got %d')
99 % (len(metadata), mdsize))
99 % (len(metadata), mdsize))
100 off += mdsize
100 off += mdsize
101 yield (pre, sucs, flags, metadata)
101 yield (pre, sucs, flags, metadata)
102
102
103 def encodemeta(meta):
103 def encodemeta(meta):
104 """Return encoded metadata string to string mapping.
104 """Return encoded metadata string to string mapping.
105
105
106 Assume no ':' in key and no '\0' in both key and value."""
106 Assume no ':' in key and no '\0' in both key and value."""
107 for key, value in meta.iteritems():
107 for key, value in meta.iteritems():
108 if ':' in key or '\0' in key:
108 if ':' in key or '\0' in key:
109 raise ValueError("':' and '\0' are forbidden in metadata key'")
109 raise ValueError("':' and '\0' are forbidden in metadata key'")
110 if '\0' in value:
110 if '\0' in value:
111 raise ValueError("':' are forbidden in metadata value'")
111 raise ValueError("':' are forbidden in metadata value'")
112 return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
112 return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
113
113
114 def decodemeta(data):
114 def decodemeta(data):
115 """Return string to string dictionary from encoded version."""
115 """Return string to string dictionary from encoded version."""
116 d = {}
116 d = {}
117 for l in data.split('\0'):
117 for l in data.split('\0'):
118 if l:
118 if l:
119 key, value = l.split(':')
119 key, value = l.split(':')
120 d[key] = value
120 d[key] = value
121 return d
121 return d
122
122
123 class marker(object):
123 class marker(object):
124 """Wrap obsolete marker raw data"""
124 """Wrap obsolete marker raw data"""
125
125
126 def __init__(self, repo, data):
126 def __init__(self, repo, data):
127 # the repo argument will be used to create changectx in later version
127 # the repo argument will be used to create changectx in later version
128 self._repo = repo
128 self._repo = repo
129 self._data = data
129 self._data = data
130 self._decodedmeta = None
130 self._decodedmeta = None
131
131
132 def precnode(self):
132 def precnode(self):
133 """Precursor changeset node identifier"""
133 """Precursor changeset node identifier"""
134 return self._data[0]
134 return self._data[0]
135
135
136 def succnodes(self):
136 def succnodes(self):
137 """List of successor changesets node identifiers"""
137 """List of successor changesets node identifiers"""
138 return self._data[1]
138 return self._data[1]
139
139
140 def metadata(self):
140 def metadata(self):
141 """Decoded metadata dictionary"""
141 """Decoded metadata dictionary"""
142 if self._decodedmeta is None:
142 if self._decodedmeta is None:
143 self._decodedmeta = decodemeta(self._data[3])
143 self._decodedmeta = decodemeta(self._data[3])
144 return self._decodedmeta
144 return self._decodedmeta
145
145
146 def date(self):
146 def date(self):
147 """Creation date as (unixtime, offset)"""
147 """Creation date as (unixtime, offset)"""
148 parts = self.metadata()['date'].split(' ')
148 parts = self.metadata()['date'].split(' ')
149 return (float(parts[0]), int(parts[1]))
149 return (float(parts[0]), int(parts[1]))
150
150
151 class obsstore(object):
151 class obsstore(object):
152 """Store obsolete markers
152 """Store obsolete markers
153
153
154 Markers can be accessed with two mappings:
154 Markers can be accessed with two mappings:
155 - precursors: old -> set(new)
155 - precursors: old -> set(new)
156 - successors: new -> set(old)
156 - successors: new -> set(old)
157 """
157 """
158
158
159 def __init__(self):
159 def __init__(self):
160 self._all = []
160 self._all = []
161 # new markers to serialize
161 # new markers to serialize
162 self._new = []
162 self._new = []
163 self.precursors = {}
163 self.precursors = {}
164 self.successors = {}
164 self.successors = {}
165
165
166 def __iter__(self):
167 return iter(self._all)
168
166 def create(self, prec, succs=(), flag=0, metadata=None):
169 def create(self, prec, succs=(), flag=0, metadata=None):
167 """obsolete: add a new obsolete marker
170 """obsolete: add a new obsolete marker
168
171
169 * ensuring it is hashable
172 * ensuring it is hashable
170 * check mandatory metadata
173 * check mandatory metadata
171 * encode metadata
174 * encode metadata
172 """
175 """
173 if metadata is None:
176 if metadata is None:
174 metadata = {}
177 metadata = {}
175 if len(prec) != 20:
178 if len(prec) != 20:
176 raise ValueError(prec)
179 raise ValueError(prec)
177 for succ in succs:
180 for succ in succs:
178 if len(succ) != 20:
181 if len(succ) != 20:
179 raise ValueError(prec)
182 raise ValueError(prec)
180 marker = (str(prec), tuple(succs), int(flag), encodemeta(metadata))
183 marker = (str(prec), tuple(succs), int(flag), encodemeta(metadata))
181 self.add(marker)
184 self.add(marker)
182
185
183 def add(self, marker):
186 def add(self, marker):
184 """Add a new marker to the store
187 """Add a new marker to the store
185
188
186 This marker still needs to be written to disk"""
189 This marker still needs to be written to disk"""
187 self._new.append(marker)
190 self._new.append(marker)
188 self._load(marker)
191 self._load(marker)
189
192
190 def loadmarkers(self, data):
193 def loadmarkers(self, data):
191 """Load all markers in data, mark them as known."""
194 """Load all markers in data, mark them as known."""
192 for marker in _readmarkers(data):
195 for marker in _readmarkers(data):
193 self._load(marker)
196 self._load(marker)
194
197
195 def flushmarkers(self, stream):
198 def flushmarkers(self, stream):
196 """Write all markers to a stream
199 """Write all markers to a stream
197
200
198 After this operation, "new" markers are considered "known"."""
201 After this operation, "new" markers are considered "known"."""
199 self._writemarkers(stream)
202 self._writemarkers(stream)
200 self._new[:] = []
203 self._new[:] = []
201
204
202 def _load(self, marker):
205 def _load(self, marker):
203 self._all.append(marker)
206 self._all.append(marker)
204 pre, sucs = marker[:2]
207 pre, sucs = marker[:2]
205 self.precursors.setdefault(pre, set()).add(marker)
208 self.precursors.setdefault(pre, set()).add(marker)
206 for suc in sucs:
209 for suc in sucs:
207 self.successors.setdefault(suc, set()).add(marker)
210 self.successors.setdefault(suc, set()).add(marker)
208
211
209 def _writemarkers(self, stream):
212 def _writemarkers(self, stream):
210 # Kept separate from flushmarkers(), it will be reused for
213 # Kept separate from flushmarkers(), it will be reused for
211 # markers exchange.
214 # markers exchange.
212 stream.write(_pack('>B', _fmversion))
215 stream.write(_pack('>B', _fmversion))
213 for marker in self._all:
216 for marker in self._all:
214 pre, sucs, flags, metadata = marker
217 pre, sucs, flags, metadata = marker
215 nbsuc = len(sucs)
218 nbsuc = len(sucs)
216 format = _fmfixed + (_fmnode * nbsuc)
219 format = _fmfixed + (_fmnode * nbsuc)
217 data = [nbsuc, len(metadata), flags, pre]
220 data = [nbsuc, len(metadata), flags, pre]
218 data.extend(sucs)
221 data.extend(sucs)
219 stream.write(_pack(format, *data))
222 stream.write(_pack(format, *data))
220 stream.write(metadata)
223 stream.write(metadata)
224
225
226
227 def allmarkers(repo):
228 """all obsolete markers known in a repository"""
229 for markerdata in repo.obsstore:
230 yield marker(repo, markerdata)
231
General Comments 0
You need to be logged in to leave comments. Login now