##// END OF EJS Templates
obsolete: os.SEEK_END first appeared in Python 2.5...
Adrian Buehlmann -
r17200:19f5dec2 default
parent child Browse files
Show More
@@ -1,287 +1,287 b''
1 # obsolete.py - obsolete markers handling
1 # obsolete.py - obsolete markers handling
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 """Obsolete markers handling
9 """Obsolete markers handling
10
10
11 An obsolete marker maps an old changeset to a list of new
11 An obsolete marker maps an old changeset to a list of new
12 changesets. If the list of new changesets is empty, the old changeset
12 changesets. If the list of new changesets is empty, the old changeset
13 is said to be "killed". Otherwise, the old changeset is being
13 is said to be "killed". Otherwise, the old changeset is being
14 "replaced" by the new changesets.
14 "replaced" by the new changesets.
15
15
16 Obsolete markers can be used to record and distribute changeset graph
16 Obsolete markers can be used to record and distribute changeset graph
17 transformations performed by history rewriting operations, and help
17 transformations performed by history rewriting operations, and help
18 building new tools to reconciliate conflicting rewriting actions. To
18 building new tools to reconciliate conflicting rewriting actions. To
19 facilitate conflicts resolution, markers include various annotations
19 facilitate conflicts resolution, markers include various annotations
20 besides old and news changeset identifiers, such as creation date or
20 besides old and news changeset identifiers, such as creation date or
21 author name.
21 author name.
22
22
23
23
24 Format
24 Format
25 ------
25 ------
26
26
27 Markers are stored in an append-only file stored in
27 Markers are stored in an append-only file stored in
28 '.hg/store/obsstore'.
28 '.hg/store/obsstore'.
29
29
30 The file starts with a version header:
30 The file starts with a version header:
31
31
32 - 1 unsigned byte: version number, starting at zero.
32 - 1 unsigned byte: version number, starting at zero.
33
33
34
34
35 The header is followed by the markers. Each marker is made of:
35 The header is followed by the markers. Each marker is made of:
36
36
37 - 1 unsigned byte: number of new changesets "R", could be zero.
37 - 1 unsigned byte: number of new changesets "R", could be zero.
38
38
39 - 1 unsigned 32-bits integer: metadata size "M" in bytes.
39 - 1 unsigned 32-bits integer: metadata size "M" in bytes.
40
40
41 - 1 byte: a bit field. It is reserved for flags used in obsolete
41 - 1 byte: a bit field. It is reserved for flags used in obsolete
42 markers common operations, to avoid repeated decoding of metadata
42 markers common operations, to avoid repeated decoding of metadata
43 entries.
43 entries.
44
44
45 - 20 bytes: obsoleted changeset identifier.
45 - 20 bytes: obsoleted changeset identifier.
46
46
47 - N*20 bytes: new changesets identifiers.
47 - N*20 bytes: new changesets identifiers.
48
48
49 - M bytes: metadata as a sequence of nul-terminated strings. Each
49 - M bytes: metadata as a sequence of nul-terminated strings. Each
50 string contains a key and a value, separated by a color ':', without
50 string contains a key and a value, separated by a color ':', without
51 additional encoding. Keys cannot contain '\0' or ':' and values
51 additional encoding. Keys cannot contain '\0' or ':' and values
52 cannot contain '\0'.
52 cannot contain '\0'.
53 """
53 """
54 import os, struct
54 import struct
55 from mercurial import util, base85
55 from mercurial import util, base85
56 from i18n import _
56 from i18n import _
57
57
58 _pack = struct.pack
58 _pack = struct.pack
59 _unpack = struct.unpack
59 _unpack = struct.unpack
60
60
61
61
62
62
63 # data used for parsing and writing
63 # data used for parsing and writing
64 _fmversion = 0
64 _fmversion = 0
65 _fmfixed = '>BIB20s'
65 _fmfixed = '>BIB20s'
66 _fmnode = '20s'
66 _fmnode = '20s'
67 _fmfsize = struct.calcsize(_fmfixed)
67 _fmfsize = struct.calcsize(_fmfixed)
68 _fnodesize = struct.calcsize(_fmnode)
68 _fnodesize = struct.calcsize(_fmnode)
69
69
70 def _readmarkers(data):
70 def _readmarkers(data):
71 """Read and enumerate markers from raw data"""
71 """Read and enumerate markers from raw data"""
72 off = 0
72 off = 0
73 diskversion = _unpack('>B', data[off:off + 1])[0]
73 diskversion = _unpack('>B', data[off:off + 1])[0]
74 off += 1
74 off += 1
75 if diskversion != _fmversion:
75 if diskversion != _fmversion:
76 raise util.Abort(_('parsing obsolete marker: unknown version %r')
76 raise util.Abort(_('parsing obsolete marker: unknown version %r')
77 % diskversion)
77 % diskversion)
78
78
79 # Loop on markers
79 # Loop on markers
80 l = len(data)
80 l = len(data)
81 while off + _fmfsize <= l:
81 while off + _fmfsize <= l:
82 # read fixed part
82 # read fixed part
83 cur = data[off:off + _fmfsize]
83 cur = data[off:off + _fmfsize]
84 off += _fmfsize
84 off += _fmfsize
85 nbsuc, mdsize, flags, pre = _unpack(_fmfixed, cur)
85 nbsuc, mdsize, flags, pre = _unpack(_fmfixed, cur)
86 # read replacement
86 # read replacement
87 sucs = ()
87 sucs = ()
88 if nbsuc:
88 if nbsuc:
89 s = (_fnodesize * nbsuc)
89 s = (_fnodesize * nbsuc)
90 cur = data[off:off + s]
90 cur = data[off:off + s]
91 sucs = _unpack(_fmnode * nbsuc, cur)
91 sucs = _unpack(_fmnode * nbsuc, cur)
92 off += s
92 off += s
93 # read metadata
93 # read metadata
94 # (metadata will be decoded on demand)
94 # (metadata will be decoded on demand)
95 metadata = data[off:off + mdsize]
95 metadata = data[off:off + mdsize]
96 if len(metadata) != mdsize:
96 if len(metadata) != mdsize:
97 raise util.Abort(_('parsing obsolete marker: metadata is too '
97 raise util.Abort(_('parsing obsolete marker: metadata is too '
98 'short, %d bytes expected, got %d')
98 'short, %d bytes expected, got %d')
99 % (len(metadata), mdsize))
99 % (len(metadata), mdsize))
100 off += mdsize
100 off += mdsize
101 yield (pre, sucs, flags, metadata)
101 yield (pre, sucs, flags, metadata)
102
102
103 def encodemeta(meta):
103 def encodemeta(meta):
104 """Return encoded metadata string to string mapping.
104 """Return encoded metadata string to string mapping.
105
105
106 Assume no ':' in key and no '\0' in both key and value."""
106 Assume no ':' in key and no '\0' in both key and value."""
107 for key, value in meta.iteritems():
107 for key, value in meta.iteritems():
108 if ':' in key or '\0' in key:
108 if ':' in key or '\0' in key:
109 raise ValueError("':' and '\0' are forbidden in metadata key'")
109 raise ValueError("':' and '\0' are forbidden in metadata key'")
110 if '\0' in value:
110 if '\0' in value:
111 raise ValueError("':' are forbidden in metadata value'")
111 raise ValueError("':' are forbidden in metadata value'")
112 return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
112 return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
113
113
114 def decodemeta(data):
114 def decodemeta(data):
115 """Return string to string dictionary from encoded version."""
115 """Return string to string dictionary from encoded version."""
116 d = {}
116 d = {}
117 for l in data.split('\0'):
117 for l in data.split('\0'):
118 if l:
118 if l:
119 key, value = l.split(':')
119 key, value = l.split(':')
120 d[key] = value
120 d[key] = value
121 return d
121 return d
122
122
123 class marker(object):
123 class marker(object):
124 """Wrap obsolete marker raw data"""
124 """Wrap obsolete marker raw data"""
125
125
126 def __init__(self, repo, data):
126 def __init__(self, repo, data):
127 # the repo argument will be used to create changectx in later version
127 # the repo argument will be used to create changectx in later version
128 self._repo = repo
128 self._repo = repo
129 self._data = data
129 self._data = data
130 self._decodedmeta = None
130 self._decodedmeta = None
131
131
132 def precnode(self):
132 def precnode(self):
133 """Precursor changeset node identifier"""
133 """Precursor changeset node identifier"""
134 return self._data[0]
134 return self._data[0]
135
135
136 def succnodes(self):
136 def succnodes(self):
137 """List of successor changesets node identifiers"""
137 """List of successor changesets node identifiers"""
138 return self._data[1]
138 return self._data[1]
139
139
140 def metadata(self):
140 def metadata(self):
141 """Decoded metadata dictionary"""
141 """Decoded metadata dictionary"""
142 if self._decodedmeta is None:
142 if self._decodedmeta is None:
143 self._decodedmeta = decodemeta(self._data[3])
143 self._decodedmeta = decodemeta(self._data[3])
144 return self._decodedmeta
144 return self._decodedmeta
145
145
146 def date(self):
146 def date(self):
147 """Creation date as (unixtime, offset)"""
147 """Creation date as (unixtime, offset)"""
148 parts = self.metadata()['date'].split(' ')
148 parts = self.metadata()['date'].split(' ')
149 return (float(parts[0]), int(parts[1]))
149 return (float(parts[0]), int(parts[1]))
150
150
151 class obsstore(object):
151 class obsstore(object):
152 """Store obsolete markers
152 """Store obsolete markers
153
153
154 Markers can be accessed with two mappings:
154 Markers can be accessed with two mappings:
155 - precursors: old -> set(new)
155 - precursors: old -> set(new)
156 - successors: new -> set(old)
156 - successors: new -> set(old)
157 """
157 """
158
158
159 def __init__(self, sopener):
159 def __init__(self, sopener):
160 self._all = []
160 self._all = []
161 # new markers to serialize
161 # new markers to serialize
162 self.precursors = {}
162 self.precursors = {}
163 self.successors = {}
163 self.successors = {}
164 self.sopener = sopener
164 self.sopener = sopener
165 data = sopener.tryread('obsstore')
165 data = sopener.tryread('obsstore')
166 if data:
166 if data:
167 for marker in _readmarkers(data):
167 for marker in _readmarkers(data):
168 self._load(marker)
168 self._load(marker)
169
169
170 def __iter__(self):
170 def __iter__(self):
171 return iter(self._all)
171 return iter(self._all)
172
172
173 def __nonzero__(self):
173 def __nonzero__(self):
174 return bool(self._all)
174 return bool(self._all)
175
175
176 def create(self, transaction, prec, succs=(), flag=0, metadata=None):
176 def create(self, transaction, prec, succs=(), flag=0, metadata=None):
177 """obsolete: add a new obsolete marker
177 """obsolete: add a new obsolete marker
178
178
179 * ensuring it is hashable
179 * ensuring it is hashable
180 * check mandatory metadata
180 * check mandatory metadata
181 * encode metadata
181 * encode metadata
182 """
182 """
183 if metadata is None:
183 if metadata is None:
184 metadata = {}
184 metadata = {}
185 if len(prec) != 20:
185 if len(prec) != 20:
186 raise ValueError(prec)
186 raise ValueError(prec)
187 for succ in succs:
187 for succ in succs:
188 if len(succ) != 20:
188 if len(succ) != 20:
189 raise ValueError(succ)
189 raise ValueError(succ)
190 marker = (str(prec), tuple(succs), int(flag), encodemeta(metadata))
190 marker = (str(prec), tuple(succs), int(flag), encodemeta(metadata))
191 self.add(transaction, marker)
191 self.add(transaction, marker)
192
192
193 def add(self, transaction, marker):
193 def add(self, transaction, marker):
194 """Add a new marker to the store"""
194 """Add a new marker to the store"""
195 if marker not in self._all:
195 if marker not in self._all:
196 f = self.sopener('obsstore', 'ab')
196 f = self.sopener('obsstore', 'ab')
197 try:
197 try:
198 # Whether the file's current position is at the begin or at
198 # Whether the file's current position is at the begin or at
199 # the end after opening a file for appending is implementation
199 # the end after opening a file for appending is implementation
200 # defined. So we must seek to the end before calling tell(),
200 # defined. So we must seek to the end before calling tell(),
201 # or we may get a zero offset for non-zero sized files on
201 # or we may get a zero offset for non-zero sized files on
202 # some platforms (issue3543).
202 # some platforms (issue3543).
203 f.seek(0, os.SEEK_END)
203 f.seek(0, 2) # os.SEEK_END
204 offset = f.tell()
204 offset = f.tell()
205 transaction.add('obsstore', offset)
205 transaction.add('obsstore', offset)
206 if offset == 0:
206 if offset == 0:
207 # new file add version header
207 # new file add version header
208 f.write(_pack('>B', _fmversion))
208 f.write(_pack('>B', _fmversion))
209 _writemarkers(f.write, [marker])
209 _writemarkers(f.write, [marker])
210 finally:
210 finally:
211 # XXX: f.close() == filecache invalidation == obsstore rebuilt.
211 # XXX: f.close() == filecache invalidation == obsstore rebuilt.
212 # call 'filecacheentry.refresh()' here
212 # call 'filecacheentry.refresh()' here
213 f.close()
213 f.close()
214 self._load(marker)
214 self._load(marker)
215
215
216 def mergemarkers(self, transation, data):
216 def mergemarkers(self, transation, data):
217 other = _readmarkers(data)
217 other = _readmarkers(data)
218 local = set(self._all)
218 local = set(self._all)
219 new = [m for m in other if m not in local]
219 new = [m for m in other if m not in local]
220 for marker in new:
220 for marker in new:
221 # XXX: N marker == N x (open, write, close)
221 # XXX: N marker == N x (open, write, close)
222 # we should write them all at once
222 # we should write them all at once
223 self.add(transation, marker)
223 self.add(transation, marker)
224
224
225 def _load(self, marker):
225 def _load(self, marker):
226 self._all.append(marker)
226 self._all.append(marker)
227 pre, sucs = marker[:2]
227 pre, sucs = marker[:2]
228 self.precursors.setdefault(pre, set()).add(marker)
228 self.precursors.setdefault(pre, set()).add(marker)
229 for suc in sucs:
229 for suc in sucs:
230 self.successors.setdefault(suc, set()).add(marker)
230 self.successors.setdefault(suc, set()).add(marker)
231
231
232 def _writemarkers(write, markers):
232 def _writemarkers(write, markers):
233 # Kept separate from flushmarkers(), it will be reused for
233 # Kept separate from flushmarkers(), it will be reused for
234 # markers exchange.
234 # markers exchange.
235 for marker in markers:
235 for marker in markers:
236 pre, sucs, flags, metadata = marker
236 pre, sucs, flags, metadata = marker
237 nbsuc = len(sucs)
237 nbsuc = len(sucs)
238 format = _fmfixed + (_fmnode * nbsuc)
238 format = _fmfixed + (_fmnode * nbsuc)
239 data = [nbsuc, len(metadata), flags, pre]
239 data = [nbsuc, len(metadata), flags, pre]
240 data.extend(sucs)
240 data.extend(sucs)
241 write(_pack(format, *data))
241 write(_pack(format, *data))
242 write(metadata)
242 write(metadata)
243
243
244 def listmarkers(repo):
244 def listmarkers(repo):
245 """List markers over pushkey"""
245 """List markers over pushkey"""
246 if not repo.obsstore:
246 if not repo.obsstore:
247 return {}
247 return {}
248 data = [_pack('>B', _fmversion)]
248 data = [_pack('>B', _fmversion)]
249 _writemarkers(data.append, repo.obsstore)
249 _writemarkers(data.append, repo.obsstore)
250 return {'dump': base85.b85encode(''.join(data))}
250 return {'dump': base85.b85encode(''.join(data))}
251
251
252 def pushmarker(repo, key, old, new):
252 def pushmarker(repo, key, old, new):
253 """Push markers over pushkey"""
253 """Push markers over pushkey"""
254 if key != 'dump':
254 if key != 'dump':
255 repo.ui.warn(_('unknown key: %r') % key)
255 repo.ui.warn(_('unknown key: %r') % key)
256 return 0
256 return 0
257 if old:
257 if old:
258 repo.ui.warn(_('unexpected old value') % key)
258 repo.ui.warn(_('unexpected old value') % key)
259 return 0
259 return 0
260 data = base85.b85decode(new)
260 data = base85.b85decode(new)
261 lock = repo.lock()
261 lock = repo.lock()
262 try:
262 try:
263 tr = repo.transaction('pushkey: obsolete markers')
263 tr = repo.transaction('pushkey: obsolete markers')
264 try:
264 try:
265 repo.obsstore.mergemarkers(tr, data)
265 repo.obsstore.mergemarkers(tr, data)
266 tr.close()
266 tr.close()
267 return 1
267 return 1
268 finally:
268 finally:
269 tr.release()
269 tr.release()
270 finally:
270 finally:
271 lock.release()
271 lock.release()
272
272
273 def allmarkers(repo):
273 def allmarkers(repo):
274 """all obsolete markers known in a repository"""
274 """all obsolete markers known in a repository"""
275 for markerdata in repo.obsstore:
275 for markerdata in repo.obsstore:
276 yield marker(repo, markerdata)
276 yield marker(repo, markerdata)
277
277
278 def precursormarkers(ctx):
278 def precursormarkers(ctx):
279 """obsolete marker making this changeset obsolete"""
279 """obsolete marker making this changeset obsolete"""
280 for data in ctx._repo.obsstore.precursors.get(ctx.node(), ()):
280 for data in ctx._repo.obsstore.precursors.get(ctx.node(), ()):
281 yield marker(ctx._repo, data)
281 yield marker(ctx._repo, data)
282
282
283 def successormarkers(ctx):
283 def successormarkers(ctx):
284 """obsolete marker marking this changeset as a successors"""
284 """obsolete marker marking this changeset as a successors"""
285 for data in ctx._repo.obsstore.successors.get(ctx.node(), ()):
285 for data in ctx._repo.obsstore.successors.get(ctx.node(), ()):
286 yield marker(ctx._repo, data)
286 yield marker(ctx._repo, data)
287
287
General Comments 0
You need to be logged in to leave comments. Login now