##// END OF EJS Templates
obsolete: fix error message at marker creation...
Pierre-Yves David -
r17117:217bfb10 default
parent child Browse files
Show More
@@ -1,279 +1,279
1 # obsolete.py - obsolete markers handling
1 # obsolete.py - obsolete markers handling
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 """Obsolete markers handling
9 """Obsolete markers handling
10
10
11 An obsolete marker maps an old changeset to a list of new
11 An obsolete marker maps an old changeset to a list of new
12 changesets. If the list of new changesets is empty, the old changeset
12 changesets. If the list of new changesets is empty, the old changeset
13 is said to be "killed". Otherwise, the old changeset is being
13 is said to be "killed". Otherwise, the old changeset is being
14 "replaced" by the new changesets.
14 "replaced" by the new changesets.
15
15
16 Obsolete markers can be used to record and distribute changeset graph
16 Obsolete markers can be used to record and distribute changeset graph
17 transformations performed by history rewriting operations, and help
17 transformations performed by history rewriting operations, and help
18 building new tools to reconciliate conflicting rewriting actions. To
18 building new tools to reconciliate conflicting rewriting actions. To
19 facilitate conflicts resolution, markers include various annotations
19 facilitate conflicts resolution, markers include various annotations
20 besides old and news changeset identifiers, such as creation date or
20 besides old and news changeset identifiers, such as creation date or
21 author name.
21 author name.
22
22
23
23
24 Format
24 Format
25 ------
25 ------
26
26
27 Markers are stored in an append-only file stored in
27 Markers are stored in an append-only file stored in
28 '.hg/store/obsstore'.
28 '.hg/store/obsstore'.
29
29
30 The file starts with a version header:
30 The file starts with a version header:
31
31
32 - 1 unsigned byte: version number, starting at zero.
32 - 1 unsigned byte: version number, starting at zero.
33
33
34
34
35 The header is followed by the markers. Each marker is made of:
35 The header is followed by the markers. Each marker is made of:
36
36
37 - 1 unsigned byte: number of new changesets "R", could be zero.
37 - 1 unsigned byte: number of new changesets "R", could be zero.
38
38
39 - 1 unsigned 32-bits integer: metadata size "M" in bytes.
39 - 1 unsigned 32-bits integer: metadata size "M" in bytes.
40
40
41 - 1 byte: a bit field. It is reserved for flags used in obsolete
41 - 1 byte: a bit field. It is reserved for flags used in obsolete
42 markers common operations, to avoid repeated decoding of metadata
42 markers common operations, to avoid repeated decoding of metadata
43 entries.
43 entries.
44
44
45 - 20 bytes: obsoleted changeset identifier.
45 - 20 bytes: obsoleted changeset identifier.
46
46
47 - N*20 bytes: new changesets identifiers.
47 - N*20 bytes: new changesets identifiers.
48
48
49 - M bytes: metadata as a sequence of nul-terminated strings. Each
49 - M bytes: metadata as a sequence of nul-terminated strings. Each
50 string contains a key and a value, separated by a color ':', without
50 string contains a key and a value, separated by a color ':', without
51 additional encoding. Keys cannot contain '\0' or ':' and values
51 additional encoding. Keys cannot contain '\0' or ':' and values
52 cannot contain '\0'.
52 cannot contain '\0'.
53 """
53 """
54 import struct
54 import struct
55 from mercurial import util, base85
55 from mercurial import util, base85
56 from i18n import _
56 from i18n import _
57
57
58 _pack = struct.pack
58 _pack = struct.pack
59 _unpack = struct.unpack
59 _unpack = struct.unpack
60
60
61
61
62
62
63 # data used for parsing and writing
63 # data used for parsing and writing
64 _fmversion = 0
64 _fmversion = 0
65 _fmfixed = '>BIB20s'
65 _fmfixed = '>BIB20s'
66 _fmnode = '20s'
66 _fmnode = '20s'
67 _fmfsize = struct.calcsize(_fmfixed)
67 _fmfsize = struct.calcsize(_fmfixed)
68 _fnodesize = struct.calcsize(_fmnode)
68 _fnodesize = struct.calcsize(_fmnode)
69
69
70 def _readmarkers(data):
70 def _readmarkers(data):
71 """Read and enumerate markers from raw data"""
71 """Read and enumerate markers from raw data"""
72 off = 0
72 off = 0
73 diskversion = _unpack('>B', data[off:off + 1])[0]
73 diskversion = _unpack('>B', data[off:off + 1])[0]
74 off += 1
74 off += 1
75 if diskversion != _fmversion:
75 if diskversion != _fmversion:
76 raise util.Abort(_('parsing obsolete marker: unknown version %r')
76 raise util.Abort(_('parsing obsolete marker: unknown version %r')
77 % diskversion)
77 % diskversion)
78
78
79 # Loop on markers
79 # Loop on markers
80 l = len(data)
80 l = len(data)
81 while off + _fmfsize <= l:
81 while off + _fmfsize <= l:
82 # read fixed part
82 # read fixed part
83 cur = data[off:off + _fmfsize]
83 cur = data[off:off + _fmfsize]
84 off += _fmfsize
84 off += _fmfsize
85 nbsuc, mdsize, flags, pre = _unpack(_fmfixed, cur)
85 nbsuc, mdsize, flags, pre = _unpack(_fmfixed, cur)
86 # read replacement
86 # read replacement
87 sucs = ()
87 sucs = ()
88 if nbsuc:
88 if nbsuc:
89 s = (_fnodesize * nbsuc)
89 s = (_fnodesize * nbsuc)
90 cur = data[off:off + s]
90 cur = data[off:off + s]
91 sucs = _unpack(_fmnode * nbsuc, cur)
91 sucs = _unpack(_fmnode * nbsuc, cur)
92 off += s
92 off += s
93 # read metadata
93 # read metadata
94 # (metadata will be decoded on demand)
94 # (metadata will be decoded on demand)
95 metadata = data[off:off + mdsize]
95 metadata = data[off:off + mdsize]
96 if len(metadata) != mdsize:
96 if len(metadata) != mdsize:
97 raise util.Abort(_('parsing obsolete marker: metadata is too '
97 raise util.Abort(_('parsing obsolete marker: metadata is too '
98 'short, %d bytes expected, got %d')
98 'short, %d bytes expected, got %d')
99 % (len(metadata), mdsize))
99 % (len(metadata), mdsize))
100 off += mdsize
100 off += mdsize
101 yield (pre, sucs, flags, metadata)
101 yield (pre, sucs, flags, metadata)
102
102
103 def encodemeta(meta):
103 def encodemeta(meta):
104 """Return encoded metadata string to string mapping.
104 """Return encoded metadata string to string mapping.
105
105
106 Assume no ':' in key and no '\0' in both key and value."""
106 Assume no ':' in key and no '\0' in both key and value."""
107 for key, value in meta.iteritems():
107 for key, value in meta.iteritems():
108 if ':' in key or '\0' in key:
108 if ':' in key or '\0' in key:
109 raise ValueError("':' and '\0' are forbidden in metadata key'")
109 raise ValueError("':' and '\0' are forbidden in metadata key'")
110 if '\0' in value:
110 if '\0' in value:
111 raise ValueError("':' are forbidden in metadata value'")
111 raise ValueError("':' are forbidden in metadata value'")
112 return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
112 return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
113
113
114 def decodemeta(data):
114 def decodemeta(data):
115 """Return string to string dictionary from encoded version."""
115 """Return string to string dictionary from encoded version."""
116 d = {}
116 d = {}
117 for l in data.split('\0'):
117 for l in data.split('\0'):
118 if l:
118 if l:
119 key, value = l.split(':')
119 key, value = l.split(':')
120 d[key] = value
120 d[key] = value
121 return d
121 return d
122
122
123 class marker(object):
123 class marker(object):
124 """Wrap obsolete marker raw data"""
124 """Wrap obsolete marker raw data"""
125
125
126 def __init__(self, repo, data):
126 def __init__(self, repo, data):
127 # the repo argument will be used to create changectx in later version
127 # the repo argument will be used to create changectx in later version
128 self._repo = repo
128 self._repo = repo
129 self._data = data
129 self._data = data
130 self._decodedmeta = None
130 self._decodedmeta = None
131
131
132 def precnode(self):
132 def precnode(self):
133 """Precursor changeset node identifier"""
133 """Precursor changeset node identifier"""
134 return self._data[0]
134 return self._data[0]
135
135
136 def succnodes(self):
136 def succnodes(self):
137 """List of successor changesets node identifiers"""
137 """List of successor changesets node identifiers"""
138 return self._data[1]
138 return self._data[1]
139
139
140 def metadata(self):
140 def metadata(self):
141 """Decoded metadata dictionary"""
141 """Decoded metadata dictionary"""
142 if self._decodedmeta is None:
142 if self._decodedmeta is None:
143 self._decodedmeta = decodemeta(self._data[3])
143 self._decodedmeta = decodemeta(self._data[3])
144 return self._decodedmeta
144 return self._decodedmeta
145
145
146 def date(self):
146 def date(self):
147 """Creation date as (unixtime, offset)"""
147 """Creation date as (unixtime, offset)"""
148 parts = self.metadata()['date'].split(' ')
148 parts = self.metadata()['date'].split(' ')
149 return (float(parts[0]), int(parts[1]))
149 return (float(parts[0]), int(parts[1]))
150
150
151 class obsstore(object):
151 class obsstore(object):
152 """Store obsolete markers
152 """Store obsolete markers
153
153
154 Markers can be accessed with two mappings:
154 Markers can be accessed with two mappings:
155 - precursors: old -> set(new)
155 - precursors: old -> set(new)
156 - successors: new -> set(old)
156 - successors: new -> set(old)
157 """
157 """
158
158
159 def __init__(self):
159 def __init__(self):
160 self._all = []
160 self._all = []
161 # new markers to serialize
161 # new markers to serialize
162 self._new = []
162 self._new = []
163 self.precursors = {}
163 self.precursors = {}
164 self.successors = {}
164 self.successors = {}
165
165
166 def __iter__(self):
166 def __iter__(self):
167 return iter(self._all)
167 return iter(self._all)
168
168
169 def __nonzero__(self):
169 def __nonzero__(self):
170 return bool(self._all)
170 return bool(self._all)
171
171
172 def create(self, prec, succs=(), flag=0, metadata=None):
172 def create(self, prec, succs=(), flag=0, metadata=None):
173 """obsolete: add a new obsolete marker
173 """obsolete: add a new obsolete marker
174
174
175 * ensuring it is hashable
175 * ensuring it is hashable
176 * check mandatory metadata
176 * check mandatory metadata
177 * encode metadata
177 * encode metadata
178 """
178 """
179 if metadata is None:
179 if metadata is None:
180 metadata = {}
180 metadata = {}
181 if len(prec) != 20:
181 if len(prec) != 20:
182 raise ValueError(prec)
182 raise ValueError(prec)
183 for succ in succs:
183 for succ in succs:
184 if len(succ) != 20:
184 if len(succ) != 20:
185 raise ValueError(prec)
185 raise ValueError(succ)
186 marker = (str(prec), tuple(succs), int(flag), encodemeta(metadata))
186 marker = (str(prec), tuple(succs), int(flag), encodemeta(metadata))
187 self.add(marker)
187 self.add(marker)
188
188
189 def add(self, marker):
189 def add(self, marker):
190 """Add a new marker to the store
190 """Add a new marker to the store
191
191
192 This marker still needs to be written to disk"""
192 This marker still needs to be written to disk"""
193 self._new.append(marker)
193 self._new.append(marker)
194 self._load(marker)
194 self._load(marker)
195
195
196 def loadmarkers(self, data):
196 def loadmarkers(self, data):
197 """Load all markers in data, mark them as known."""
197 """Load all markers in data, mark them as known."""
198 for marker in _readmarkers(data):
198 for marker in _readmarkers(data):
199 self._load(marker)
199 self._load(marker)
200
200
201 def mergemarkers(self, data):
201 def mergemarkers(self, data):
202 other = set(_readmarkers(data))
202 other = set(_readmarkers(data))
203 local = set(self._all)
203 local = set(self._all)
204 new = other - local
204 new = other - local
205 for marker in new:
205 for marker in new:
206 self.add(marker)
206 self.add(marker)
207
207
208 def flushmarkers(self, stream):
208 def flushmarkers(self, stream):
209 """Write all markers to a stream
209 """Write all markers to a stream
210
210
211 After this operation, "new" markers are considered "known"."""
211 After this operation, "new" markers are considered "known"."""
212 self._writemarkers(stream)
212 self._writemarkers(stream)
213 self._new[:] = []
213 self._new[:] = []
214
214
215 def _load(self, marker):
215 def _load(self, marker):
216 self._all.append(marker)
216 self._all.append(marker)
217 pre, sucs = marker[:2]
217 pre, sucs = marker[:2]
218 self.precursors.setdefault(pre, set()).add(marker)
218 self.precursors.setdefault(pre, set()).add(marker)
219 for suc in sucs:
219 for suc in sucs:
220 self.successors.setdefault(suc, set()).add(marker)
220 self.successors.setdefault(suc, set()).add(marker)
221
221
222 def _writemarkers(self, stream=None):
222 def _writemarkers(self, stream=None):
223 # Kept separate from flushmarkers(), it will be reused for
223 # Kept separate from flushmarkers(), it will be reused for
224 # markers exchange.
224 # markers exchange.
225 if stream is None:
225 if stream is None:
226 final = []
226 final = []
227 w = final.append
227 w = final.append
228 else:
228 else:
229 w = stream.write
229 w = stream.write
230 w(_pack('>B', _fmversion))
230 w(_pack('>B', _fmversion))
231 for marker in self._all:
231 for marker in self._all:
232 pre, sucs, flags, metadata = marker
232 pre, sucs, flags, metadata = marker
233 nbsuc = len(sucs)
233 nbsuc = len(sucs)
234 format = _fmfixed + (_fmnode * nbsuc)
234 format = _fmfixed + (_fmnode * nbsuc)
235 data = [nbsuc, len(metadata), flags, pre]
235 data = [nbsuc, len(metadata), flags, pre]
236 data.extend(sucs)
236 data.extend(sucs)
237 w(_pack(format, *data))
237 w(_pack(format, *data))
238 w(metadata)
238 w(metadata)
239 if stream is None:
239 if stream is None:
240 return ''.join(final)
240 return ''.join(final)
241
241
242 def listmarkers(repo):
242 def listmarkers(repo):
243 """List markers over pushkey"""
243 """List markers over pushkey"""
244 if not repo.obsstore:
244 if not repo.obsstore:
245 return {}
245 return {}
246 data = repo.obsstore._writemarkers()
246 data = repo.obsstore._writemarkers()
247 return {'dump': base85.b85encode(data)}
247 return {'dump': base85.b85encode(data)}
248
248
249 def pushmarker(repo, key, old, new):
249 def pushmarker(repo, key, old, new):
250 """Push markers over pushkey"""
250 """Push markers over pushkey"""
251 if key != 'dump':
251 if key != 'dump':
252 repo.ui.warn(_('unknown key: %r') % key)
252 repo.ui.warn(_('unknown key: %r') % key)
253 return 0
253 return 0
254 if old:
254 if old:
255 repo.ui.warn(_('unexpected old value') % key)
255 repo.ui.warn(_('unexpected old value') % key)
256 return 0
256 return 0
257 data = base85.b85decode(new)
257 data = base85.b85decode(new)
258 lock = repo.lock()
258 lock = repo.lock()
259 try:
259 try:
260 repo.obsstore.mergemarkers(data)
260 repo.obsstore.mergemarkers(data)
261 return 1
261 return 1
262 finally:
262 finally:
263 lock.release()
263 lock.release()
264
264
265 def allmarkers(repo):
265 def allmarkers(repo):
266 """all obsolete markers known in a repository"""
266 """all obsolete markers known in a repository"""
267 for markerdata in repo.obsstore:
267 for markerdata in repo.obsstore:
268 yield marker(repo, markerdata)
268 yield marker(repo, markerdata)
269
269
270 def precursormarkers(ctx):
270 def precursormarkers(ctx):
271 """obsolete marker making this changeset obsolete"""
271 """obsolete marker making this changeset obsolete"""
272 for data in ctx._repo.obsstore.precursors.get(ctx.node(), ()):
272 for data in ctx._repo.obsstore.precursors.get(ctx.node(), ()):
273 yield marker(ctx._repo, data)
273 yield marker(ctx._repo, data)
274
274
275 def successormarkers(ctx):
275 def successormarkers(ctx):
276 """obsolete marker marking this changeset as a successors"""
276 """obsolete marker marking this changeset as a successors"""
277 for data in ctx._repo.obsstore.successors.get(ctx.node(), ()):
277 for data in ctx._repo.obsstore.successors.get(ctx.node(), ()):
278 yield marker(ctx._repo, data)
278 yield marker(ctx._repo, data)
279
279
General Comments 0
You need to be logged in to leave comments. Login now