##// END OF EJS Templates
obsolete: use absolute_import
Gregory Szorc -
r27332:04f346b8 default
parent child Browse files
Show More
@@ -1,1263 +1,1273 b''
1 # obsolete.py - obsolete markers handling
1 # obsolete.py - obsolete markers handling
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 """Obsolete marker handling
9 """Obsolete marker handling
10
10
11 An obsolete marker maps an old changeset to a list of new
11 An obsolete marker maps an old changeset to a list of new
12 changesets. If the list of new changesets is empty, the old changeset
12 changesets. If the list of new changesets is empty, the old changeset
13 is said to be "killed". Otherwise, the old changeset is being
13 is said to be "killed". Otherwise, the old changeset is being
14 "replaced" by the new changesets.
14 "replaced" by the new changesets.
15
15
16 Obsolete markers can be used to record and distribute changeset graph
16 Obsolete markers can be used to record and distribute changeset graph
17 transformations performed by history rewrite operations, and help
17 transformations performed by history rewrite operations, and help
18 building new tools to reconcile conflicting rewrite actions. To
18 building new tools to reconcile conflicting rewrite actions. To
19 facilitate conflict resolution, markers include various annotations
19 facilitate conflict resolution, markers include various annotations
20 besides old and news changeset identifiers, such as creation date or
20 besides old and news changeset identifiers, such as creation date or
21 author name.
21 author name.
22
22
23 The old obsoleted changeset is called a "precursor" and possible
23 The old obsoleted changeset is called a "precursor" and possible
24 replacements are called "successors". Markers that used changeset X as
24 replacements are called "successors". Markers that used changeset X as
25 a precursor are called "successor markers of X" because they hold
25 a precursor are called "successor markers of X" because they hold
26 information about the successors of X. Markers that use changeset Y as
26 information about the successors of X. Markers that use changeset Y as
27 a successors are call "precursor markers of Y" because they hold
27 a successors are call "precursor markers of Y" because they hold
28 information about the precursors of Y.
28 information about the precursors of Y.
29
29
30 Examples:
30 Examples:
31
31
32 - When changeset A is replaced by changeset A', one marker is stored:
32 - When changeset A is replaced by changeset A', one marker is stored:
33
33
34 (A, (A',))
34 (A, (A',))
35
35
36 - When changesets A and B are folded into a new changeset C, two markers are
36 - When changesets A and B are folded into a new changeset C, two markers are
37 stored:
37 stored:
38
38
39 (A, (C,)) and (B, (C,))
39 (A, (C,)) and (B, (C,))
40
40
41 - When changeset A is simply "pruned" from the graph, a marker is created:
41 - When changeset A is simply "pruned" from the graph, a marker is created:
42
42
43 (A, ())
43 (A, ())
44
44
45 - When changeset A is split into B and C, a single marker are used:
45 - When changeset A is split into B and C, a single marker are used:
46
46
47 (A, (C, C))
47 (A, (C, C))
48
48
49 We use a single marker to distinguish the "split" case from the "divergence"
49 We use a single marker to distinguish the "split" case from the "divergence"
50 case. If two independent operations rewrite the same changeset A in to A' and
50 case. If two independent operations rewrite the same changeset A in to A' and
51 A'', we have an error case: divergent rewriting. We can detect it because
51 A'', we have an error case: divergent rewriting. We can detect it because
52 two markers will be created independently:
52 two markers will be created independently:
53
53
54 (A, (B,)) and (A, (C,))
54 (A, (B,)) and (A, (C,))
55
55
56 Format
56 Format
57 ------
57 ------
58
58
59 Markers are stored in an append-only file stored in
59 Markers are stored in an append-only file stored in
60 '.hg/store/obsstore'.
60 '.hg/store/obsstore'.
61
61
62 The file starts with a version header:
62 The file starts with a version header:
63
63
64 - 1 unsigned byte: version number, starting at zero.
64 - 1 unsigned byte: version number, starting at zero.
65
65
66 The header is followed by the markers. Marker format depend of the version. See
66 The header is followed by the markers. Marker format depend of the version. See
67 comment associated with each format for details.
67 comment associated with each format for details.
68
68
69 """
69 """
70 import errno, struct
70 from __future__ import absolute_import
71 import util, base85, node, parsers, error
71
72 import phases
72 import errno
73 from i18n import _
73 import struct
74
75 from .i18n import _
76 from . import (
77 base85,
78 error,
79 node,
80 parsers,
81 phases,
82 util,
83 )
74
84
75 _pack = struct.pack
85 _pack = struct.pack
76 _unpack = struct.unpack
86 _unpack = struct.unpack
77 _calcsize = struct.calcsize
87 _calcsize = struct.calcsize
78 propertycache = util.propertycache
88 propertycache = util.propertycache
79
89
80 # the obsolete feature is not mature enough to be enabled by default.
90 # the obsolete feature is not mature enough to be enabled by default.
81 # you have to rely on third party extension extension to enable this.
91 # you have to rely on third party extension extension to enable this.
82 _enabled = False
92 _enabled = False
83
93
84 # Options for obsolescence
94 # Options for obsolescence
85 createmarkersopt = 'createmarkers'
95 createmarkersopt = 'createmarkers'
86 allowunstableopt = 'allowunstable'
96 allowunstableopt = 'allowunstable'
87 exchangeopt = 'exchange'
97 exchangeopt = 'exchange'
88
98
89 ### obsolescence marker flag
99 ### obsolescence marker flag
90
100
91 ## bumpedfix flag
101 ## bumpedfix flag
92 #
102 #
93 # When a changeset A' succeed to a changeset A which became public, we call A'
103 # When a changeset A' succeed to a changeset A which became public, we call A'
94 # "bumped" because it's a successors of a public changesets
104 # "bumped" because it's a successors of a public changesets
95 #
105 #
96 # o A' (bumped)
106 # o A' (bumped)
97 # |`:
107 # |`:
98 # | o A
108 # | o A
99 # |/
109 # |/
100 # o Z
110 # o Z
101 #
111 #
102 # The way to solve this situation is to create a new changeset Ad as children
112 # The way to solve this situation is to create a new changeset Ad as children
103 # of A. This changeset have the same content than A'. So the diff from A to A'
113 # of A. This changeset have the same content than A'. So the diff from A to A'
104 # is the same than the diff from A to Ad. Ad is marked as a successors of A'
114 # is the same than the diff from A to Ad. Ad is marked as a successors of A'
105 #
115 #
106 # o Ad
116 # o Ad
107 # |`:
117 # |`:
108 # | x A'
118 # | x A'
109 # |'|
119 # |'|
110 # o | A
120 # o | A
111 # |/
121 # |/
112 # o Z
122 # o Z
113 #
123 #
114 # But by transitivity Ad is also a successors of A. To avoid having Ad marked
124 # But by transitivity Ad is also a successors of A. To avoid having Ad marked
115 # as bumped too, we add the `bumpedfix` flag to the marker. <A', (Ad,)>.
125 # as bumped too, we add the `bumpedfix` flag to the marker. <A', (Ad,)>.
116 # This flag mean that the successors express the changes between the public and
126 # This flag mean that the successors express the changes between the public and
117 # bumped version and fix the situation, breaking the transitivity of
127 # bumped version and fix the situation, breaking the transitivity of
118 # "bumped" here.
128 # "bumped" here.
119 bumpedfix = 1
129 bumpedfix = 1
120 usingsha256 = 2
130 usingsha256 = 2
121
131
122 ## Parsing and writing of version "0"
132 ## Parsing and writing of version "0"
123 #
133 #
124 # The header is followed by the markers. Each marker is made of:
134 # The header is followed by the markers. Each marker is made of:
125 #
135 #
126 # - 1 uint8 : number of new changesets "N", can be zero.
136 # - 1 uint8 : number of new changesets "N", can be zero.
127 #
137 #
128 # - 1 uint32: metadata size "M" in bytes.
138 # - 1 uint32: metadata size "M" in bytes.
129 #
139 #
130 # - 1 byte: a bit field. It is reserved for flags used in common
140 # - 1 byte: a bit field. It is reserved for flags used in common
131 # obsolete marker operations, to avoid repeated decoding of metadata
141 # obsolete marker operations, to avoid repeated decoding of metadata
132 # entries.
142 # entries.
133 #
143 #
134 # - 20 bytes: obsoleted changeset identifier.
144 # - 20 bytes: obsoleted changeset identifier.
135 #
145 #
136 # - N*20 bytes: new changesets identifiers.
146 # - N*20 bytes: new changesets identifiers.
137 #
147 #
138 # - M bytes: metadata as a sequence of nul-terminated strings. Each
148 # - M bytes: metadata as a sequence of nul-terminated strings. Each
139 # string contains a key and a value, separated by a colon ':', without
149 # string contains a key and a value, separated by a colon ':', without
140 # additional encoding. Keys cannot contain '\0' or ':' and values
150 # additional encoding. Keys cannot contain '\0' or ':' and values
141 # cannot contain '\0'.
151 # cannot contain '\0'.
142 _fm0version = 0
152 _fm0version = 0
143 _fm0fixed = '>BIB20s'
153 _fm0fixed = '>BIB20s'
144 _fm0node = '20s'
154 _fm0node = '20s'
145 _fm0fsize = _calcsize(_fm0fixed)
155 _fm0fsize = _calcsize(_fm0fixed)
146 _fm0fnodesize = _calcsize(_fm0node)
156 _fm0fnodesize = _calcsize(_fm0node)
147
157
148 def _fm0readmarkers(data, off):
158 def _fm0readmarkers(data, off):
149 # Loop on markers
159 # Loop on markers
150 l = len(data)
160 l = len(data)
151 while off + _fm0fsize <= l:
161 while off + _fm0fsize <= l:
152 # read fixed part
162 # read fixed part
153 cur = data[off:off + _fm0fsize]
163 cur = data[off:off + _fm0fsize]
154 off += _fm0fsize
164 off += _fm0fsize
155 numsuc, mdsize, flags, pre = _unpack(_fm0fixed, cur)
165 numsuc, mdsize, flags, pre = _unpack(_fm0fixed, cur)
156 # read replacement
166 # read replacement
157 sucs = ()
167 sucs = ()
158 if numsuc:
168 if numsuc:
159 s = (_fm0fnodesize * numsuc)
169 s = (_fm0fnodesize * numsuc)
160 cur = data[off:off + s]
170 cur = data[off:off + s]
161 sucs = _unpack(_fm0node * numsuc, cur)
171 sucs = _unpack(_fm0node * numsuc, cur)
162 off += s
172 off += s
163 # read metadata
173 # read metadata
164 # (metadata will be decoded on demand)
174 # (metadata will be decoded on demand)
165 metadata = data[off:off + mdsize]
175 metadata = data[off:off + mdsize]
166 if len(metadata) != mdsize:
176 if len(metadata) != mdsize:
167 raise error.Abort(_('parsing obsolete marker: metadata is too '
177 raise error.Abort(_('parsing obsolete marker: metadata is too '
168 'short, %d bytes expected, got %d')
178 'short, %d bytes expected, got %d')
169 % (mdsize, len(metadata)))
179 % (mdsize, len(metadata)))
170 off += mdsize
180 off += mdsize
171 metadata = _fm0decodemeta(metadata)
181 metadata = _fm0decodemeta(metadata)
172 try:
182 try:
173 when, offset = metadata.pop('date', '0 0').split(' ')
183 when, offset = metadata.pop('date', '0 0').split(' ')
174 date = float(when), int(offset)
184 date = float(when), int(offset)
175 except ValueError:
185 except ValueError:
176 date = (0., 0)
186 date = (0., 0)
177 parents = None
187 parents = None
178 if 'p2' in metadata:
188 if 'p2' in metadata:
179 parents = (metadata.pop('p1', None), metadata.pop('p2', None))
189 parents = (metadata.pop('p1', None), metadata.pop('p2', None))
180 elif 'p1' in metadata:
190 elif 'p1' in metadata:
181 parents = (metadata.pop('p1', None),)
191 parents = (metadata.pop('p1', None),)
182 elif 'p0' in metadata:
192 elif 'p0' in metadata:
183 parents = ()
193 parents = ()
184 if parents is not None:
194 if parents is not None:
185 try:
195 try:
186 parents = tuple(node.bin(p) for p in parents)
196 parents = tuple(node.bin(p) for p in parents)
187 # if parent content is not a nodeid, drop the data
197 # if parent content is not a nodeid, drop the data
188 for p in parents:
198 for p in parents:
189 if len(p) != 20:
199 if len(p) != 20:
190 parents = None
200 parents = None
191 break
201 break
192 except TypeError:
202 except TypeError:
193 # if content cannot be translated to nodeid drop the data.
203 # if content cannot be translated to nodeid drop the data.
194 parents = None
204 parents = None
195
205
196 metadata = tuple(sorted(metadata.iteritems()))
206 metadata = tuple(sorted(metadata.iteritems()))
197
207
198 yield (pre, sucs, flags, metadata, date, parents)
208 yield (pre, sucs, flags, metadata, date, parents)
199
209
200 def _fm0encodeonemarker(marker):
210 def _fm0encodeonemarker(marker):
201 pre, sucs, flags, metadata, date, parents = marker
211 pre, sucs, flags, metadata, date, parents = marker
202 if flags & usingsha256:
212 if flags & usingsha256:
203 raise error.Abort(_('cannot handle sha256 with old obsstore format'))
213 raise error.Abort(_('cannot handle sha256 with old obsstore format'))
204 metadata = dict(metadata)
214 metadata = dict(metadata)
205 time, tz = date
215 time, tz = date
206 metadata['date'] = '%r %i' % (time, tz)
216 metadata['date'] = '%r %i' % (time, tz)
207 if parents is not None:
217 if parents is not None:
208 if not parents:
218 if not parents:
209 # mark that we explicitly recorded no parents
219 # mark that we explicitly recorded no parents
210 metadata['p0'] = ''
220 metadata['p0'] = ''
211 for i, p in enumerate(parents):
221 for i, p in enumerate(parents):
212 metadata['p%i' % (i + 1)] = node.hex(p)
222 metadata['p%i' % (i + 1)] = node.hex(p)
213 metadata = _fm0encodemeta(metadata)
223 metadata = _fm0encodemeta(metadata)
214 numsuc = len(sucs)
224 numsuc = len(sucs)
215 format = _fm0fixed + (_fm0node * numsuc)
225 format = _fm0fixed + (_fm0node * numsuc)
216 data = [numsuc, len(metadata), flags, pre]
226 data = [numsuc, len(metadata), flags, pre]
217 data.extend(sucs)
227 data.extend(sucs)
218 return _pack(format, *data) + metadata
228 return _pack(format, *data) + metadata
219
229
220 def _fm0encodemeta(meta):
230 def _fm0encodemeta(meta):
221 """Return encoded metadata string to string mapping.
231 """Return encoded metadata string to string mapping.
222
232
223 Assume no ':' in key and no '\0' in both key and value."""
233 Assume no ':' in key and no '\0' in both key and value."""
224 for key, value in meta.iteritems():
234 for key, value in meta.iteritems():
225 if ':' in key or '\0' in key:
235 if ':' in key or '\0' in key:
226 raise ValueError("':' and '\0' are forbidden in metadata key'")
236 raise ValueError("':' and '\0' are forbidden in metadata key'")
227 if '\0' in value:
237 if '\0' in value:
228 raise ValueError("':' is forbidden in metadata value'")
238 raise ValueError("':' is forbidden in metadata value'")
229 return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
239 return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
230
240
231 def _fm0decodemeta(data):
241 def _fm0decodemeta(data):
232 """Return string to string dictionary from encoded version."""
242 """Return string to string dictionary from encoded version."""
233 d = {}
243 d = {}
234 for l in data.split('\0'):
244 for l in data.split('\0'):
235 if l:
245 if l:
236 key, value = l.split(':')
246 key, value = l.split(':')
237 d[key] = value
247 d[key] = value
238 return d
248 return d
239
249
240 ## Parsing and writing of version "1"
250 ## Parsing and writing of version "1"
241 #
251 #
242 # The header is followed by the markers. Each marker is made of:
252 # The header is followed by the markers. Each marker is made of:
243 #
253 #
244 # - uint32: total size of the marker (including this field)
254 # - uint32: total size of the marker (including this field)
245 #
255 #
246 # - float64: date in seconds since epoch
256 # - float64: date in seconds since epoch
247 #
257 #
248 # - int16: timezone offset in minutes
258 # - int16: timezone offset in minutes
249 #
259 #
250 # - uint16: a bit field. It is reserved for flags used in common
260 # - uint16: a bit field. It is reserved for flags used in common
251 # obsolete marker operations, to avoid repeated decoding of metadata
261 # obsolete marker operations, to avoid repeated decoding of metadata
252 # entries.
262 # entries.
253 #
263 #
254 # - uint8: number of successors "N", can be zero.
264 # - uint8: number of successors "N", can be zero.
255 #
265 #
256 # - uint8: number of parents "P", can be zero.
266 # - uint8: number of parents "P", can be zero.
257 #
267 #
258 # 0: parents data stored but no parent,
268 # 0: parents data stored but no parent,
259 # 1: one parent stored,
269 # 1: one parent stored,
260 # 2: two parents stored,
270 # 2: two parents stored,
261 # 3: no parent data stored
271 # 3: no parent data stored
262 #
272 #
263 # - uint8: number of metadata entries M
273 # - uint8: number of metadata entries M
264 #
274 #
265 # - 20 or 32 bytes: precursor changeset identifier.
275 # - 20 or 32 bytes: precursor changeset identifier.
266 #
276 #
267 # - N*(20 or 32) bytes: successors changesets identifiers.
277 # - N*(20 or 32) bytes: successors changesets identifiers.
268 #
278 #
269 # - P*(20 or 32) bytes: parents of the precursors changesets.
279 # - P*(20 or 32) bytes: parents of the precursors changesets.
270 #
280 #
271 # - M*(uint8, uint8): size of all metadata entries (key and value)
281 # - M*(uint8, uint8): size of all metadata entries (key and value)
272 #
282 #
273 # - remaining bytes: the metadata, each (key, value) pair after the other.
283 # - remaining bytes: the metadata, each (key, value) pair after the other.
274 _fm1version = 1
284 _fm1version = 1
275 _fm1fixed = '>IdhHBBB20s'
285 _fm1fixed = '>IdhHBBB20s'
276 _fm1nodesha1 = '20s'
286 _fm1nodesha1 = '20s'
277 _fm1nodesha256 = '32s'
287 _fm1nodesha256 = '32s'
278 _fm1nodesha1size = _calcsize(_fm1nodesha1)
288 _fm1nodesha1size = _calcsize(_fm1nodesha1)
279 _fm1nodesha256size = _calcsize(_fm1nodesha256)
289 _fm1nodesha256size = _calcsize(_fm1nodesha256)
280 _fm1fsize = _calcsize(_fm1fixed)
290 _fm1fsize = _calcsize(_fm1fixed)
281 _fm1parentnone = 3
291 _fm1parentnone = 3
282 _fm1parentshift = 14
292 _fm1parentshift = 14
283 _fm1parentmask = (_fm1parentnone << _fm1parentshift)
293 _fm1parentmask = (_fm1parentnone << _fm1parentshift)
284 _fm1metapair = 'BB'
294 _fm1metapair = 'BB'
285 _fm1metapairsize = _calcsize('BB')
295 _fm1metapairsize = _calcsize('BB')
286
296
287 def _fm1purereadmarkers(data, off):
297 def _fm1purereadmarkers(data, off):
288 # make some global constants local for performance
298 # make some global constants local for performance
289 noneflag = _fm1parentnone
299 noneflag = _fm1parentnone
290 sha2flag = usingsha256
300 sha2flag = usingsha256
291 sha1size = _fm1nodesha1size
301 sha1size = _fm1nodesha1size
292 sha2size = _fm1nodesha256size
302 sha2size = _fm1nodesha256size
293 sha1fmt = _fm1nodesha1
303 sha1fmt = _fm1nodesha1
294 sha2fmt = _fm1nodesha256
304 sha2fmt = _fm1nodesha256
295 metasize = _fm1metapairsize
305 metasize = _fm1metapairsize
296 metafmt = _fm1metapair
306 metafmt = _fm1metapair
297 fsize = _fm1fsize
307 fsize = _fm1fsize
298 unpack = _unpack
308 unpack = _unpack
299
309
300 # Loop on markers
310 # Loop on markers
301 stop = len(data) - _fm1fsize
311 stop = len(data) - _fm1fsize
302 ufixed = struct.Struct(_fm1fixed).unpack
312 ufixed = struct.Struct(_fm1fixed).unpack
303
313
304 while off <= stop:
314 while off <= stop:
305 # read fixed part
315 # read fixed part
306 o1 = off + fsize
316 o1 = off + fsize
307 t, secs, tz, flags, numsuc, numpar, nummeta, prec = ufixed(data[off:o1])
317 t, secs, tz, flags, numsuc, numpar, nummeta, prec = ufixed(data[off:o1])
308
318
309 if flags & sha2flag:
319 if flags & sha2flag:
310 # FIXME: prec was read as a SHA1, needs to be amended
320 # FIXME: prec was read as a SHA1, needs to be amended
311
321
312 # read 0 or more successors
322 # read 0 or more successors
313 if numsuc == 1:
323 if numsuc == 1:
314 o2 = o1 + sha2size
324 o2 = o1 + sha2size
315 sucs = (data[o1:o2],)
325 sucs = (data[o1:o2],)
316 else:
326 else:
317 o2 = o1 + sha2size * numsuc
327 o2 = o1 + sha2size * numsuc
318 sucs = unpack(sha2fmt * numsuc, data[o1:o2])
328 sucs = unpack(sha2fmt * numsuc, data[o1:o2])
319
329
320 # read parents
330 # read parents
321 if numpar == noneflag:
331 if numpar == noneflag:
322 o3 = o2
332 o3 = o2
323 parents = None
333 parents = None
324 elif numpar == 1:
334 elif numpar == 1:
325 o3 = o2 + sha2size
335 o3 = o2 + sha2size
326 parents = (data[o2:o3],)
336 parents = (data[o2:o3],)
327 else:
337 else:
328 o3 = o2 + sha2size * numpar
338 o3 = o2 + sha2size * numpar
329 parents = unpack(sha2fmt * numpar, data[o2:o3])
339 parents = unpack(sha2fmt * numpar, data[o2:o3])
330 else:
340 else:
331 # read 0 or more successors
341 # read 0 or more successors
332 if numsuc == 1:
342 if numsuc == 1:
333 o2 = o1 + sha1size
343 o2 = o1 + sha1size
334 sucs = (data[o1:o2],)
344 sucs = (data[o1:o2],)
335 else:
345 else:
336 o2 = o1 + sha1size * numsuc
346 o2 = o1 + sha1size * numsuc
337 sucs = unpack(sha1fmt * numsuc, data[o1:o2])
347 sucs = unpack(sha1fmt * numsuc, data[o1:o2])
338
348
339 # read parents
349 # read parents
340 if numpar == noneflag:
350 if numpar == noneflag:
341 o3 = o2
351 o3 = o2
342 parents = None
352 parents = None
343 elif numpar == 1:
353 elif numpar == 1:
344 o3 = o2 + sha1size
354 o3 = o2 + sha1size
345 parents = (data[o2:o3],)
355 parents = (data[o2:o3],)
346 else:
356 else:
347 o3 = o2 + sha1size * numpar
357 o3 = o2 + sha1size * numpar
348 parents = unpack(sha1fmt * numpar, data[o2:o3])
358 parents = unpack(sha1fmt * numpar, data[o2:o3])
349
359
350 # read metadata
360 # read metadata
351 off = o3 + metasize * nummeta
361 off = o3 + metasize * nummeta
352 metapairsize = unpack('>' + (metafmt * nummeta), data[o3:off])
362 metapairsize = unpack('>' + (metafmt * nummeta), data[o3:off])
353 metadata = []
363 metadata = []
354 for idx in xrange(0, len(metapairsize), 2):
364 for idx in xrange(0, len(metapairsize), 2):
355 o1 = off + metapairsize[idx]
365 o1 = off + metapairsize[idx]
356 o2 = o1 + metapairsize[idx + 1]
366 o2 = o1 + metapairsize[idx + 1]
357 metadata.append((data[off:o1], data[o1:o2]))
367 metadata.append((data[off:o1], data[o1:o2]))
358 off = o2
368 off = o2
359
369
360 yield (prec, sucs, flags, tuple(metadata), (secs, tz * 60), parents)
370 yield (prec, sucs, flags, tuple(metadata), (secs, tz * 60), parents)
361
371
362 def _fm1encodeonemarker(marker):
372 def _fm1encodeonemarker(marker):
363 pre, sucs, flags, metadata, date, parents = marker
373 pre, sucs, flags, metadata, date, parents = marker
364 # determine node size
374 # determine node size
365 _fm1node = _fm1nodesha1
375 _fm1node = _fm1nodesha1
366 if flags & usingsha256:
376 if flags & usingsha256:
367 _fm1node = _fm1nodesha256
377 _fm1node = _fm1nodesha256
368 numsuc = len(sucs)
378 numsuc = len(sucs)
369 numextranodes = numsuc
379 numextranodes = numsuc
370 if parents is None:
380 if parents is None:
371 numpar = _fm1parentnone
381 numpar = _fm1parentnone
372 else:
382 else:
373 numpar = len(parents)
383 numpar = len(parents)
374 numextranodes += numpar
384 numextranodes += numpar
375 formatnodes = _fm1node * numextranodes
385 formatnodes = _fm1node * numextranodes
376 formatmeta = _fm1metapair * len(metadata)
386 formatmeta = _fm1metapair * len(metadata)
377 format = _fm1fixed + formatnodes + formatmeta
387 format = _fm1fixed + formatnodes + formatmeta
378 # tz is stored in minutes so we divide by 60
388 # tz is stored in minutes so we divide by 60
379 tz = date[1]//60
389 tz = date[1]//60
380 data = [None, date[0], tz, flags, numsuc, numpar, len(metadata), pre]
390 data = [None, date[0], tz, flags, numsuc, numpar, len(metadata), pre]
381 data.extend(sucs)
391 data.extend(sucs)
382 if parents is not None:
392 if parents is not None:
383 data.extend(parents)
393 data.extend(parents)
384 totalsize = _calcsize(format)
394 totalsize = _calcsize(format)
385 for key, value in metadata:
395 for key, value in metadata:
386 lk = len(key)
396 lk = len(key)
387 lv = len(value)
397 lv = len(value)
388 data.append(lk)
398 data.append(lk)
389 data.append(lv)
399 data.append(lv)
390 totalsize += lk + lv
400 totalsize += lk + lv
391 data[0] = totalsize
401 data[0] = totalsize
392 data = [_pack(format, *data)]
402 data = [_pack(format, *data)]
393 for key, value in metadata:
403 for key, value in metadata:
394 data.append(key)
404 data.append(key)
395 data.append(value)
405 data.append(value)
396 return ''.join(data)
406 return ''.join(data)
397
407
398 def _fm1readmarkers(data, off):
408 def _fm1readmarkers(data, off):
399 native = getattr(parsers, 'fm1readmarkers', None)
409 native = getattr(parsers, 'fm1readmarkers', None)
400 if not native:
410 if not native:
401 return _fm1purereadmarkers(data, off)
411 return _fm1purereadmarkers(data, off)
402 stop = len(data) - _fm1fsize
412 stop = len(data) - _fm1fsize
403 return native(data, off, stop)
413 return native(data, off, stop)
404
414
405 # mapping to read/write various marker formats
415 # mapping to read/write various marker formats
406 # <version> -> (decoder, encoder)
416 # <version> -> (decoder, encoder)
407 formats = {_fm0version: (_fm0readmarkers, _fm0encodeonemarker),
417 formats = {_fm0version: (_fm0readmarkers, _fm0encodeonemarker),
408 _fm1version: (_fm1readmarkers, _fm1encodeonemarker)}
418 _fm1version: (_fm1readmarkers, _fm1encodeonemarker)}
409
419
410 @util.nogc
420 @util.nogc
411 def _readmarkers(data):
421 def _readmarkers(data):
412 """Read and enumerate markers from raw data"""
422 """Read and enumerate markers from raw data"""
413 off = 0
423 off = 0
414 diskversion = _unpack('>B', data[off:off + 1])[0]
424 diskversion = _unpack('>B', data[off:off + 1])[0]
415 off += 1
425 off += 1
416 if diskversion not in formats:
426 if diskversion not in formats:
417 raise error.Abort(_('parsing obsolete marker: unknown version %r')
427 raise error.Abort(_('parsing obsolete marker: unknown version %r')
418 % diskversion)
428 % diskversion)
419 return diskversion, formats[diskversion][0](data, off)
429 return diskversion, formats[diskversion][0](data, off)
420
430
421 def encodemarkers(markers, addheader=False, version=_fm0version):
431 def encodemarkers(markers, addheader=False, version=_fm0version):
422 # Kept separate from flushmarkers(), it will be reused for
432 # Kept separate from flushmarkers(), it will be reused for
423 # markers exchange.
433 # markers exchange.
424 encodeone = formats[version][1]
434 encodeone = formats[version][1]
425 if addheader:
435 if addheader:
426 yield _pack('>B', version)
436 yield _pack('>B', version)
427 for marker in markers:
437 for marker in markers:
428 yield encodeone(marker)
438 yield encodeone(marker)
429
439
430
440
431 class marker(object):
441 class marker(object):
432 """Wrap obsolete marker raw data"""
442 """Wrap obsolete marker raw data"""
433
443
434 def __init__(self, repo, data):
444 def __init__(self, repo, data):
435 # the repo argument will be used to create changectx in later version
445 # the repo argument will be used to create changectx in later version
436 self._repo = repo
446 self._repo = repo
437 self._data = data
447 self._data = data
438 self._decodedmeta = None
448 self._decodedmeta = None
439
449
440 def __hash__(self):
450 def __hash__(self):
441 return hash(self._data)
451 return hash(self._data)
442
452
443 def __eq__(self, other):
453 def __eq__(self, other):
444 if type(other) != type(self):
454 if type(other) != type(self):
445 return False
455 return False
446 return self._data == other._data
456 return self._data == other._data
447
457
448 def precnode(self):
458 def precnode(self):
449 """Precursor changeset node identifier"""
459 """Precursor changeset node identifier"""
450 return self._data[0]
460 return self._data[0]
451
461
452 def succnodes(self):
462 def succnodes(self):
453 """List of successor changesets node identifiers"""
463 """List of successor changesets node identifiers"""
454 return self._data[1]
464 return self._data[1]
455
465
456 def parentnodes(self):
466 def parentnodes(self):
457 """Parents of the precursors (None if not recorded)"""
467 """Parents of the precursors (None if not recorded)"""
458 return self._data[5]
468 return self._data[5]
459
469
460 def metadata(self):
470 def metadata(self):
461 """Decoded metadata dictionary"""
471 """Decoded metadata dictionary"""
462 return dict(self._data[3])
472 return dict(self._data[3])
463
473
464 def date(self):
474 def date(self):
465 """Creation date as (unixtime, offset)"""
475 """Creation date as (unixtime, offset)"""
466 return self._data[4]
476 return self._data[4]
467
477
468 def flags(self):
478 def flags(self):
469 """The flags field of the marker"""
479 """The flags field of the marker"""
470 return self._data[2]
480 return self._data[2]
471
481
472 @util.nogc
482 @util.nogc
473 def _addsuccessors(successors, markers):
483 def _addsuccessors(successors, markers):
474 for mark in markers:
484 for mark in markers:
475 successors.setdefault(mark[0], set()).add(mark)
485 successors.setdefault(mark[0], set()).add(mark)
476
486
477 @util.nogc
487 @util.nogc
478 def _addprecursors(precursors, markers):
488 def _addprecursors(precursors, markers):
479 for mark in markers:
489 for mark in markers:
480 for suc in mark[1]:
490 for suc in mark[1]:
481 precursors.setdefault(suc, set()).add(mark)
491 precursors.setdefault(suc, set()).add(mark)
482
492
483 @util.nogc
493 @util.nogc
484 def _addchildren(children, markers):
494 def _addchildren(children, markers):
485 for mark in markers:
495 for mark in markers:
486 parents = mark[5]
496 parents = mark[5]
487 if parents is not None:
497 if parents is not None:
488 for p in parents:
498 for p in parents:
489 children.setdefault(p, set()).add(mark)
499 children.setdefault(p, set()).add(mark)
490
500
491 def _checkinvalidmarkers(markers):
501 def _checkinvalidmarkers(markers):
492 """search for marker with invalid data and raise error if needed
502 """search for marker with invalid data and raise error if needed
493
503
494 Exist as a separated function to allow the evolve extension for a more
504 Exist as a separated function to allow the evolve extension for a more
495 subtle handling.
505 subtle handling.
496 """
506 """
497 for mark in markers:
507 for mark in markers:
498 if node.nullid in mark[1]:
508 if node.nullid in mark[1]:
499 raise error.Abort(_('bad obsolescence marker detected: '
509 raise error.Abort(_('bad obsolescence marker detected: '
500 'invalid successors nullid'))
510 'invalid successors nullid'))
501
511
502 class obsstore(object):
512 class obsstore(object):
503 """Store obsolete markers
513 """Store obsolete markers
504
514
505 Markers can be accessed with two mappings:
515 Markers can be accessed with two mappings:
506 - precursors[x] -> set(markers on precursors edges of x)
516 - precursors[x] -> set(markers on precursors edges of x)
507 - successors[x] -> set(markers on successors edges of x)
517 - successors[x] -> set(markers on successors edges of x)
508 - children[x] -> set(markers on precursors edges of children(x)
518 - children[x] -> set(markers on precursors edges of children(x)
509 """
519 """
510
520
511 fields = ('prec', 'succs', 'flag', 'meta', 'date', 'parents')
521 fields = ('prec', 'succs', 'flag', 'meta', 'date', 'parents')
512 # prec: nodeid, precursor changesets
522 # prec: nodeid, precursor changesets
513 # succs: tuple of nodeid, successor changesets (0-N length)
523 # succs: tuple of nodeid, successor changesets (0-N length)
514 # flag: integer, flag field carrying modifier for the markers (see doc)
524 # flag: integer, flag field carrying modifier for the markers (see doc)
515 # meta: binary blob, encoded metadata dictionary
525 # meta: binary blob, encoded metadata dictionary
516 # date: (float, int) tuple, date of marker creation
526 # date: (float, int) tuple, date of marker creation
517 # parents: (tuple of nodeid) or None, parents of precursors
527 # parents: (tuple of nodeid) or None, parents of precursors
518 # None is used when no data has been recorded
528 # None is used when no data has been recorded
519
529
520 def __init__(self, svfs, defaultformat=_fm1version, readonly=False):
530 def __init__(self, svfs, defaultformat=_fm1version, readonly=False):
521 # caches for various obsolescence related cache
531 # caches for various obsolescence related cache
522 self.caches = {}
532 self.caches = {}
523 self.svfs = svfs
533 self.svfs = svfs
524 self._version = defaultformat
534 self._version = defaultformat
525 self._readonly = readonly
535 self._readonly = readonly
526
536
527 def __iter__(self):
537 def __iter__(self):
528 return iter(self._all)
538 return iter(self._all)
529
539
530 def __len__(self):
540 def __len__(self):
531 return len(self._all)
541 return len(self._all)
532
542
533 def __nonzero__(self):
543 def __nonzero__(self):
534 if not self._cached('_all'):
544 if not self._cached('_all'):
535 try:
545 try:
536 return self.svfs.stat('obsstore').st_size > 1
546 return self.svfs.stat('obsstore').st_size > 1
537 except OSError as inst:
547 except OSError as inst:
538 if inst.errno != errno.ENOENT:
548 if inst.errno != errno.ENOENT:
539 raise
549 raise
540 # just build an empty _all list if no obsstore exists, which
550 # just build an empty _all list if no obsstore exists, which
541 # avoids further stat() syscalls
551 # avoids further stat() syscalls
542 pass
552 pass
543 return bool(self._all)
553 return bool(self._all)
544
554
545 @property
555 @property
546 def readonly(self):
556 def readonly(self):
547 """True if marker creation is disabled
557 """True if marker creation is disabled
548
558
549 Remove me in the future when obsolete marker is always on."""
559 Remove me in the future when obsolete marker is always on."""
550 return self._readonly
560 return self._readonly
551
561
552 def create(self, transaction, prec, succs=(), flag=0, parents=None,
562 def create(self, transaction, prec, succs=(), flag=0, parents=None,
553 date=None, metadata=None):
563 date=None, metadata=None):
554 """obsolete: add a new obsolete marker
564 """obsolete: add a new obsolete marker
555
565
556 * ensuring it is hashable
566 * ensuring it is hashable
557 * check mandatory metadata
567 * check mandatory metadata
558 * encode metadata
568 * encode metadata
559
569
560 If you are a human writing code creating marker you want to use the
570 If you are a human writing code creating marker you want to use the
561 `createmarkers` function in this module instead.
571 `createmarkers` function in this module instead.
562
572
563 return True if a new marker have been added, False if the markers
573 return True if a new marker have been added, False if the markers
564 already existed (no op).
574 already existed (no op).
565 """
575 """
566 if metadata is None:
576 if metadata is None:
567 metadata = {}
577 metadata = {}
568 if date is None:
578 if date is None:
569 if 'date' in metadata:
579 if 'date' in metadata:
570 # as a courtesy for out-of-tree extensions
580 # as a courtesy for out-of-tree extensions
571 date = util.parsedate(metadata.pop('date'))
581 date = util.parsedate(metadata.pop('date'))
572 else:
582 else:
573 date = util.makedate()
583 date = util.makedate()
574 if len(prec) != 20:
584 if len(prec) != 20:
575 raise ValueError(prec)
585 raise ValueError(prec)
576 for succ in succs:
586 for succ in succs:
577 if len(succ) != 20:
587 if len(succ) != 20:
578 raise ValueError(succ)
588 raise ValueError(succ)
579 if prec in succs:
589 if prec in succs:
580 raise ValueError(_('in-marker cycle with %s') % node.hex(prec))
590 raise ValueError(_('in-marker cycle with %s') % node.hex(prec))
581
591
582 metadata = tuple(sorted(metadata.iteritems()))
592 metadata = tuple(sorted(metadata.iteritems()))
583
593
584 marker = (str(prec), tuple(succs), int(flag), metadata, date, parents)
594 marker = (str(prec), tuple(succs), int(flag), metadata, date, parents)
585 return bool(self.add(transaction, [marker]))
595 return bool(self.add(transaction, [marker]))
586
596
587 def add(self, transaction, markers):
597 def add(self, transaction, markers):
588 """Add new markers to the store
598 """Add new markers to the store
589
599
590 Take care of filtering duplicate.
600 Take care of filtering duplicate.
591 Return the number of new marker."""
601 Return the number of new marker."""
592 if self._readonly:
602 if self._readonly:
593 raise error.Abort('creating obsolete markers is not enabled on '
603 raise error.Abort('creating obsolete markers is not enabled on '
594 'this repo')
604 'this repo')
595 known = set(self._all)
605 known = set(self._all)
596 new = []
606 new = []
597 for m in markers:
607 for m in markers:
598 if m not in known:
608 if m not in known:
599 known.add(m)
609 known.add(m)
600 new.append(m)
610 new.append(m)
601 if new:
611 if new:
602 f = self.svfs('obsstore', 'ab')
612 f = self.svfs('obsstore', 'ab')
603 try:
613 try:
604 offset = f.tell()
614 offset = f.tell()
605 transaction.add('obsstore', offset)
615 transaction.add('obsstore', offset)
606 # offset == 0: new file - add the version header
616 # offset == 0: new file - add the version header
607 for bytes in encodemarkers(new, offset == 0, self._version):
617 for bytes in encodemarkers(new, offset == 0, self._version):
608 f.write(bytes)
618 f.write(bytes)
609 finally:
619 finally:
610 # XXX: f.close() == filecache invalidation == obsstore rebuilt.
620 # XXX: f.close() == filecache invalidation == obsstore rebuilt.
611 # call 'filecacheentry.refresh()' here
621 # call 'filecacheentry.refresh()' here
612 f.close()
622 f.close()
613 self._addmarkers(new)
623 self._addmarkers(new)
614 # new marker *may* have changed several set. invalidate the cache.
624 # new marker *may* have changed several set. invalidate the cache.
615 self.caches.clear()
625 self.caches.clear()
616 # records the number of new markers for the transaction hooks
626 # records the number of new markers for the transaction hooks
617 previous = int(transaction.hookargs.get('new_obsmarkers', '0'))
627 previous = int(transaction.hookargs.get('new_obsmarkers', '0'))
618 transaction.hookargs['new_obsmarkers'] = str(previous + len(new))
628 transaction.hookargs['new_obsmarkers'] = str(previous + len(new))
619 return len(new)
629 return len(new)
620
630
621 def mergemarkers(self, transaction, data):
631 def mergemarkers(self, transaction, data):
622 """merge a binary stream of markers inside the obsstore
632 """merge a binary stream of markers inside the obsstore
623
633
624 Returns the number of new markers added."""
634 Returns the number of new markers added."""
625 version, markers = _readmarkers(data)
635 version, markers = _readmarkers(data)
626 return self.add(transaction, markers)
636 return self.add(transaction, markers)
627
637
628 @propertycache
638 @propertycache
629 def _all(self):
639 def _all(self):
630 data = self.svfs.tryread('obsstore')
640 data = self.svfs.tryread('obsstore')
631 if not data:
641 if not data:
632 return []
642 return []
633 self._version, markers = _readmarkers(data)
643 self._version, markers = _readmarkers(data)
634 markers = list(markers)
644 markers = list(markers)
635 _checkinvalidmarkers(markers)
645 _checkinvalidmarkers(markers)
636 return markers
646 return markers
637
647
638 @propertycache
648 @propertycache
639 def successors(self):
649 def successors(self):
640 successors = {}
650 successors = {}
641 _addsuccessors(successors, self._all)
651 _addsuccessors(successors, self._all)
642 return successors
652 return successors
643
653
644 @propertycache
654 @propertycache
645 def precursors(self):
655 def precursors(self):
646 precursors = {}
656 precursors = {}
647 _addprecursors(precursors, self._all)
657 _addprecursors(precursors, self._all)
648 return precursors
658 return precursors
649
659
650 @propertycache
660 @propertycache
651 def children(self):
661 def children(self):
652 children = {}
662 children = {}
653 _addchildren(children, self._all)
663 _addchildren(children, self._all)
654 return children
664 return children
655
665
656 def _cached(self, attr):
666 def _cached(self, attr):
657 return attr in self.__dict__
667 return attr in self.__dict__
658
668
659 def _addmarkers(self, markers):
669 def _addmarkers(self, markers):
660 markers = list(markers) # to allow repeated iteration
670 markers = list(markers) # to allow repeated iteration
661 self._all.extend(markers)
671 self._all.extend(markers)
662 if self._cached('successors'):
672 if self._cached('successors'):
663 _addsuccessors(self.successors, markers)
673 _addsuccessors(self.successors, markers)
664 if self._cached('precursors'):
674 if self._cached('precursors'):
665 _addprecursors(self.precursors, markers)
675 _addprecursors(self.precursors, markers)
666 if self._cached('children'):
676 if self._cached('children'):
667 _addchildren(self.children, markers)
677 _addchildren(self.children, markers)
668 _checkinvalidmarkers(markers)
678 _checkinvalidmarkers(markers)
669
679
670 def relevantmarkers(self, nodes):
680 def relevantmarkers(self, nodes):
671 """return a set of all obsolescence markers relevant to a set of nodes.
681 """return a set of all obsolescence markers relevant to a set of nodes.
672
682
673 "relevant" to a set of nodes mean:
683 "relevant" to a set of nodes mean:
674
684
675 - marker that use this changeset as successor
685 - marker that use this changeset as successor
676 - prune marker of direct children on this changeset
686 - prune marker of direct children on this changeset
677 - recursive application of the two rules on precursors of these markers
687 - recursive application of the two rules on precursors of these markers
678
688
679 It is a set so you cannot rely on order."""
689 It is a set so you cannot rely on order."""
680
690
681 pendingnodes = set(nodes)
691 pendingnodes = set(nodes)
682 seenmarkers = set()
692 seenmarkers = set()
683 seennodes = set(pendingnodes)
693 seennodes = set(pendingnodes)
684 precursorsmarkers = self.precursors
694 precursorsmarkers = self.precursors
685 children = self.children
695 children = self.children
686 while pendingnodes:
696 while pendingnodes:
687 direct = set()
697 direct = set()
688 for current in pendingnodes:
698 for current in pendingnodes:
689 direct.update(precursorsmarkers.get(current, ()))
699 direct.update(precursorsmarkers.get(current, ()))
690 pruned = [m for m in children.get(current, ()) if not m[1]]
700 pruned = [m for m in children.get(current, ()) if not m[1]]
691 direct.update(pruned)
701 direct.update(pruned)
692 direct -= seenmarkers
702 direct -= seenmarkers
693 pendingnodes = set([m[0] for m in direct])
703 pendingnodes = set([m[0] for m in direct])
694 seenmarkers |= direct
704 seenmarkers |= direct
695 pendingnodes -= seennodes
705 pendingnodes -= seennodes
696 seennodes |= pendingnodes
706 seennodes |= pendingnodes
697 return seenmarkers
707 return seenmarkers
698
708
699 def commonversion(versions):
709 def commonversion(versions):
700 """Return the newest version listed in both versions and our local formats.
710 """Return the newest version listed in both versions and our local formats.
701
711
702 Returns None if no common version exists.
712 Returns None if no common version exists.
703 """
713 """
704 versions.sort(reverse=True)
714 versions.sort(reverse=True)
705 # search for highest version known on both side
715 # search for highest version known on both side
706 for v in versions:
716 for v in versions:
707 if v in formats:
717 if v in formats:
708 return v
718 return v
709 return None
719 return None
710
720
711 # arbitrary picked to fit into 8K limit from HTTP server
721 # arbitrary picked to fit into 8K limit from HTTP server
712 # you have to take in account:
722 # you have to take in account:
713 # - the version header
723 # - the version header
714 # - the base85 encoding
724 # - the base85 encoding
715 _maxpayload = 5300
725 _maxpayload = 5300
716
726
717 def _pushkeyescape(markers):
727 def _pushkeyescape(markers):
718 """encode markers into a dict suitable for pushkey exchange
728 """encode markers into a dict suitable for pushkey exchange
719
729
720 - binary data is base85 encoded
730 - binary data is base85 encoded
721 - split in chunks smaller than 5300 bytes"""
731 - split in chunks smaller than 5300 bytes"""
722 keys = {}
732 keys = {}
723 parts = []
733 parts = []
724 currentlen = _maxpayload * 2 # ensure we create a new part
734 currentlen = _maxpayload * 2 # ensure we create a new part
725 for marker in markers:
735 for marker in markers:
726 nextdata = _fm0encodeonemarker(marker)
736 nextdata = _fm0encodeonemarker(marker)
727 if (len(nextdata) + currentlen > _maxpayload):
737 if (len(nextdata) + currentlen > _maxpayload):
728 currentpart = []
738 currentpart = []
729 currentlen = 0
739 currentlen = 0
730 parts.append(currentpart)
740 parts.append(currentpart)
731 currentpart.append(nextdata)
741 currentpart.append(nextdata)
732 currentlen += len(nextdata)
742 currentlen += len(nextdata)
733 for idx, part in enumerate(reversed(parts)):
743 for idx, part in enumerate(reversed(parts)):
734 data = ''.join([_pack('>B', _fm0version)] + part)
744 data = ''.join([_pack('>B', _fm0version)] + part)
735 keys['dump%i' % idx] = base85.b85encode(data)
745 keys['dump%i' % idx] = base85.b85encode(data)
736 return keys
746 return keys
737
747
738 def listmarkers(repo):
748 def listmarkers(repo):
739 """List markers over pushkey"""
749 """List markers over pushkey"""
740 if not repo.obsstore:
750 if not repo.obsstore:
741 return {}
751 return {}
742 return _pushkeyescape(sorted(repo.obsstore))
752 return _pushkeyescape(sorted(repo.obsstore))
743
753
744 def pushmarker(repo, key, old, new):
754 def pushmarker(repo, key, old, new):
745 """Push markers over pushkey"""
755 """Push markers over pushkey"""
746 if not key.startswith('dump'):
756 if not key.startswith('dump'):
747 repo.ui.warn(_('unknown key: %r') % key)
757 repo.ui.warn(_('unknown key: %r') % key)
748 return 0
758 return 0
749 if old:
759 if old:
750 repo.ui.warn(_('unexpected old value for %r') % key)
760 repo.ui.warn(_('unexpected old value for %r') % key)
751 return 0
761 return 0
752 data = base85.b85decode(new)
762 data = base85.b85decode(new)
753 lock = repo.lock()
763 lock = repo.lock()
754 try:
764 try:
755 tr = repo.transaction('pushkey: obsolete markers')
765 tr = repo.transaction('pushkey: obsolete markers')
756 try:
766 try:
757 repo.obsstore.mergemarkers(tr, data)
767 repo.obsstore.mergemarkers(tr, data)
758 tr.close()
768 tr.close()
759 return 1
769 return 1
760 finally:
770 finally:
761 tr.release()
771 tr.release()
762 finally:
772 finally:
763 lock.release()
773 lock.release()
764
774
765 def getmarkers(repo, nodes=None):
775 def getmarkers(repo, nodes=None):
766 """returns markers known in a repository
776 """returns markers known in a repository
767
777
768 If <nodes> is specified, only markers "relevant" to those nodes are are
778 If <nodes> is specified, only markers "relevant" to those nodes are are
769 returned"""
779 returned"""
770 if nodes is None:
780 if nodes is None:
771 rawmarkers = repo.obsstore
781 rawmarkers = repo.obsstore
772 else:
782 else:
773 rawmarkers = repo.obsstore.relevantmarkers(nodes)
783 rawmarkers = repo.obsstore.relevantmarkers(nodes)
774
784
775 for markerdata in rawmarkers:
785 for markerdata in rawmarkers:
776 yield marker(repo, markerdata)
786 yield marker(repo, markerdata)
777
787
778 def relevantmarkers(repo, node):
788 def relevantmarkers(repo, node):
779 """all obsolete markers relevant to some revision"""
789 """all obsolete markers relevant to some revision"""
780 for markerdata in repo.obsstore.relevantmarkers(node):
790 for markerdata in repo.obsstore.relevantmarkers(node):
781 yield marker(repo, markerdata)
791 yield marker(repo, markerdata)
782
792
783
793
784 def precursormarkers(ctx):
794 def precursormarkers(ctx):
785 """obsolete marker marking this changeset as a successors"""
795 """obsolete marker marking this changeset as a successors"""
786 for data in ctx.repo().obsstore.precursors.get(ctx.node(), ()):
796 for data in ctx.repo().obsstore.precursors.get(ctx.node(), ()):
787 yield marker(ctx.repo(), data)
797 yield marker(ctx.repo(), data)
788
798
789 def successormarkers(ctx):
799 def successormarkers(ctx):
790 """obsolete marker making this changeset obsolete"""
800 """obsolete marker making this changeset obsolete"""
791 for data in ctx.repo().obsstore.successors.get(ctx.node(), ()):
801 for data in ctx.repo().obsstore.successors.get(ctx.node(), ()):
792 yield marker(ctx.repo(), data)
802 yield marker(ctx.repo(), data)
793
803
794 def allsuccessors(obsstore, nodes, ignoreflags=0):
804 def allsuccessors(obsstore, nodes, ignoreflags=0):
795 """Yield node for every successor of <nodes>.
805 """Yield node for every successor of <nodes>.
796
806
797 Some successors may be unknown locally.
807 Some successors may be unknown locally.
798
808
799 This is a linear yield unsuited to detecting split changesets. It includes
809 This is a linear yield unsuited to detecting split changesets. It includes
800 initial nodes too."""
810 initial nodes too."""
801 remaining = set(nodes)
811 remaining = set(nodes)
802 seen = set(remaining)
812 seen = set(remaining)
803 while remaining:
813 while remaining:
804 current = remaining.pop()
814 current = remaining.pop()
805 yield current
815 yield current
806 for mark in obsstore.successors.get(current, ()):
816 for mark in obsstore.successors.get(current, ()):
807 # ignore marker flagged with specified flag
817 # ignore marker flagged with specified flag
808 if mark[2] & ignoreflags:
818 if mark[2] & ignoreflags:
809 continue
819 continue
810 for suc in mark[1]:
820 for suc in mark[1]:
811 if suc not in seen:
821 if suc not in seen:
812 seen.add(suc)
822 seen.add(suc)
813 remaining.add(suc)
823 remaining.add(suc)
814
824
815 def allprecursors(obsstore, nodes, ignoreflags=0):
825 def allprecursors(obsstore, nodes, ignoreflags=0):
816 """Yield node for every precursors of <nodes>.
826 """Yield node for every precursors of <nodes>.
817
827
818 Some precursors may be unknown locally.
828 Some precursors may be unknown locally.
819
829
820 This is a linear yield unsuited to detecting folded changesets. It includes
830 This is a linear yield unsuited to detecting folded changesets. It includes
821 initial nodes too."""
831 initial nodes too."""
822
832
823 remaining = set(nodes)
833 remaining = set(nodes)
824 seen = set(remaining)
834 seen = set(remaining)
825 while remaining:
835 while remaining:
826 current = remaining.pop()
836 current = remaining.pop()
827 yield current
837 yield current
828 for mark in obsstore.precursors.get(current, ()):
838 for mark in obsstore.precursors.get(current, ()):
829 # ignore marker flagged with specified flag
839 # ignore marker flagged with specified flag
830 if mark[2] & ignoreflags:
840 if mark[2] & ignoreflags:
831 continue
841 continue
832 suc = mark[0]
842 suc = mark[0]
833 if suc not in seen:
843 if suc not in seen:
834 seen.add(suc)
844 seen.add(suc)
835 remaining.add(suc)
845 remaining.add(suc)
836
846
837 def foreground(repo, nodes):
847 def foreground(repo, nodes):
838 """return all nodes in the "foreground" of other node
848 """return all nodes in the "foreground" of other node
839
849
840 The foreground of a revision is anything reachable using parent -> children
850 The foreground of a revision is anything reachable using parent -> children
841 or precursor -> successor relation. It is very similar to "descendant" but
851 or precursor -> successor relation. It is very similar to "descendant" but
842 augmented with obsolescence information.
852 augmented with obsolescence information.
843
853
844 Beware that possible obsolescence cycle may result if complex situation.
854 Beware that possible obsolescence cycle may result if complex situation.
845 """
855 """
846 repo = repo.unfiltered()
856 repo = repo.unfiltered()
847 foreground = set(repo.set('%ln::', nodes))
857 foreground = set(repo.set('%ln::', nodes))
848 if repo.obsstore:
858 if repo.obsstore:
849 # We only need this complicated logic if there is obsolescence
859 # We only need this complicated logic if there is obsolescence
850 # XXX will probably deserve an optimised revset.
860 # XXX will probably deserve an optimised revset.
851 nm = repo.changelog.nodemap
861 nm = repo.changelog.nodemap
852 plen = -1
862 plen = -1
853 # compute the whole set of successors or descendants
863 # compute the whole set of successors or descendants
854 while len(foreground) != plen:
864 while len(foreground) != plen:
855 plen = len(foreground)
865 plen = len(foreground)
856 succs = set(c.node() for c in foreground)
866 succs = set(c.node() for c in foreground)
857 mutable = [c.node() for c in foreground if c.mutable()]
867 mutable = [c.node() for c in foreground if c.mutable()]
858 succs.update(allsuccessors(repo.obsstore, mutable))
868 succs.update(allsuccessors(repo.obsstore, mutable))
859 known = (n for n in succs if n in nm)
869 known = (n for n in succs if n in nm)
860 foreground = set(repo.set('%ln::', known))
870 foreground = set(repo.set('%ln::', known))
861 return set(c.node() for c in foreground)
871 return set(c.node() for c in foreground)
862
872
863
873
864 def successorssets(repo, initialnode, cache=None):
874 def successorssets(repo, initialnode, cache=None):
865 """Return set of all latest successors of initial nodes
875 """Return set of all latest successors of initial nodes
866
876
867 The successors set of a changeset A are the group of revisions that succeed
877 The successors set of a changeset A are the group of revisions that succeed
868 A. It succeeds A as a consistent whole, each revision being only a partial
878 A. It succeeds A as a consistent whole, each revision being only a partial
869 replacement. The successors set contains non-obsolete changesets only.
879 replacement. The successors set contains non-obsolete changesets only.
870
880
871 This function returns the full list of successor sets which is why it
881 This function returns the full list of successor sets which is why it
872 returns a list of tuples and not just a single tuple. Each tuple is a valid
882 returns a list of tuples and not just a single tuple. Each tuple is a valid
873 successors set. Note that (A,) may be a valid successors set for changeset A
883 successors set. Note that (A,) may be a valid successors set for changeset A
874 (see below).
884 (see below).
875
885
876 In most cases, a changeset A will have a single element (e.g. the changeset
886 In most cases, a changeset A will have a single element (e.g. the changeset
877 A is replaced by A') in its successors set. Though, it is also common for a
887 A is replaced by A') in its successors set. Though, it is also common for a
878 changeset A to have no elements in its successor set (e.g. the changeset
888 changeset A to have no elements in its successor set (e.g. the changeset
879 has been pruned). Therefore, the returned list of successors sets will be
889 has been pruned). Therefore, the returned list of successors sets will be
880 [(A',)] or [], respectively.
890 [(A',)] or [], respectively.
881
891
882 When a changeset A is split into A' and B', however, it will result in a
892 When a changeset A is split into A' and B', however, it will result in a
883 successors set containing more than a single element, i.e. [(A',B')].
893 successors set containing more than a single element, i.e. [(A',B')].
884 Divergent changesets will result in multiple successors sets, i.e. [(A',),
894 Divergent changesets will result in multiple successors sets, i.e. [(A',),
885 (A'')].
895 (A'')].
886
896
887 If a changeset A is not obsolete, then it will conceptually have no
897 If a changeset A is not obsolete, then it will conceptually have no
888 successors set. To distinguish this from a pruned changeset, the successor
898 successors set. To distinguish this from a pruned changeset, the successor
889 set will contain itself only, i.e. [(A,)].
899 set will contain itself only, i.e. [(A,)].
890
900
891 Finally, successors unknown locally are considered to be pruned (obsoleted
901 Finally, successors unknown locally are considered to be pruned (obsoleted
892 without any successors).
902 without any successors).
893
903
894 The optional `cache` parameter is a dictionary that may contain precomputed
904 The optional `cache` parameter is a dictionary that may contain precomputed
895 successors sets. It is meant to reuse the computation of a previous call to
905 successors sets. It is meant to reuse the computation of a previous call to
896 `successorssets` when multiple calls are made at the same time. The cache
906 `successorssets` when multiple calls are made at the same time. The cache
897 dictionary is updated in place. The caller is responsible for its life
907 dictionary is updated in place. The caller is responsible for its life
898 span. Code that makes multiple calls to `successorssets` *must* use this
908 span. Code that makes multiple calls to `successorssets` *must* use this
899 cache mechanism or suffer terrible performance.
909 cache mechanism or suffer terrible performance.
900 """
910 """
901
911
902 succmarkers = repo.obsstore.successors
912 succmarkers = repo.obsstore.successors
903
913
904 # Stack of nodes we search successors sets for
914 # Stack of nodes we search successors sets for
905 toproceed = [initialnode]
915 toproceed = [initialnode]
906 # set version of above list for fast loop detection
916 # set version of above list for fast loop detection
907 # element added to "toproceed" must be added here
917 # element added to "toproceed" must be added here
908 stackedset = set(toproceed)
918 stackedset = set(toproceed)
909 if cache is None:
919 if cache is None:
910 cache = {}
920 cache = {}
911
921
912 # This while loop is the flattened version of a recursive search for
922 # This while loop is the flattened version of a recursive search for
913 # successors sets
923 # successors sets
914 #
924 #
915 # def successorssets(x):
925 # def successorssets(x):
916 # successors = directsuccessors(x)
926 # successors = directsuccessors(x)
917 # ss = [[]]
927 # ss = [[]]
918 # for succ in directsuccessors(x):
928 # for succ in directsuccessors(x):
919 # # product as in itertools cartesian product
929 # # product as in itertools cartesian product
920 # ss = product(ss, successorssets(succ))
930 # ss = product(ss, successorssets(succ))
921 # return ss
931 # return ss
922 #
932 #
923 # But we can not use plain recursive calls here:
933 # But we can not use plain recursive calls here:
924 # - that would blow the python call stack
934 # - that would blow the python call stack
925 # - obsolescence markers may have cycles, we need to handle them.
935 # - obsolescence markers may have cycles, we need to handle them.
926 #
936 #
927 # The `toproceed` list act as our call stack. Every node we search
937 # The `toproceed` list act as our call stack. Every node we search
928 # successors set for are stacked there.
938 # successors set for are stacked there.
929 #
939 #
930 # The `stackedset` is set version of this stack used to check if a node is
940 # The `stackedset` is set version of this stack used to check if a node is
931 # already stacked. This check is used to detect cycles and prevent infinite
941 # already stacked. This check is used to detect cycles and prevent infinite
932 # loop.
942 # loop.
933 #
943 #
934 # successors set of all nodes are stored in the `cache` dictionary.
944 # successors set of all nodes are stored in the `cache` dictionary.
935 #
945 #
936 # After this while loop ends we use the cache to return the successors sets
946 # After this while loop ends we use the cache to return the successors sets
937 # for the node requested by the caller.
947 # for the node requested by the caller.
938 while toproceed:
948 while toproceed:
939 # Every iteration tries to compute the successors sets of the topmost
949 # Every iteration tries to compute the successors sets of the topmost
940 # node of the stack: CURRENT.
950 # node of the stack: CURRENT.
941 #
951 #
942 # There are four possible outcomes:
952 # There are four possible outcomes:
943 #
953 #
944 # 1) We already know the successors sets of CURRENT:
954 # 1) We already know the successors sets of CURRENT:
945 # -> mission accomplished, pop it from the stack.
955 # -> mission accomplished, pop it from the stack.
946 # 2) Node is not obsolete:
956 # 2) Node is not obsolete:
947 # -> the node is its own successors sets. Add it to the cache.
957 # -> the node is its own successors sets. Add it to the cache.
948 # 3) We do not know successors set of direct successors of CURRENT:
958 # 3) We do not know successors set of direct successors of CURRENT:
949 # -> We add those successors to the stack.
959 # -> We add those successors to the stack.
950 # 4) We know successors sets of all direct successors of CURRENT:
960 # 4) We know successors sets of all direct successors of CURRENT:
951 # -> We can compute CURRENT successors set and add it to the
961 # -> We can compute CURRENT successors set and add it to the
952 # cache.
962 # cache.
953 #
963 #
954 current = toproceed[-1]
964 current = toproceed[-1]
955 if current in cache:
965 if current in cache:
956 # case (1): We already know the successors sets
966 # case (1): We already know the successors sets
957 stackedset.remove(toproceed.pop())
967 stackedset.remove(toproceed.pop())
958 elif current not in succmarkers:
968 elif current not in succmarkers:
959 # case (2): The node is not obsolete.
969 # case (2): The node is not obsolete.
960 if current in repo:
970 if current in repo:
961 # We have a valid last successors.
971 # We have a valid last successors.
962 cache[current] = [(current,)]
972 cache[current] = [(current,)]
963 else:
973 else:
964 # Final obsolete version is unknown locally.
974 # Final obsolete version is unknown locally.
965 # Do not count that as a valid successors
975 # Do not count that as a valid successors
966 cache[current] = []
976 cache[current] = []
967 else:
977 else:
968 # cases (3) and (4)
978 # cases (3) and (4)
969 #
979 #
970 # We proceed in two phases. Phase 1 aims to distinguish case (3)
980 # We proceed in two phases. Phase 1 aims to distinguish case (3)
971 # from case (4):
981 # from case (4):
972 #
982 #
973 # For each direct successors of CURRENT, we check whether its
983 # For each direct successors of CURRENT, we check whether its
974 # successors sets are known. If they are not, we stack the
984 # successors sets are known. If they are not, we stack the
975 # unknown node and proceed to the next iteration of the while
985 # unknown node and proceed to the next iteration of the while
976 # loop. (case 3)
986 # loop. (case 3)
977 #
987 #
978 # During this step, we may detect obsolescence cycles: a node
988 # During this step, we may detect obsolescence cycles: a node
979 # with unknown successors sets but already in the call stack.
989 # with unknown successors sets but already in the call stack.
980 # In such a situation, we arbitrary set the successors sets of
990 # In such a situation, we arbitrary set the successors sets of
981 # the node to nothing (node pruned) to break the cycle.
991 # the node to nothing (node pruned) to break the cycle.
982 #
992 #
983 # If no break was encountered we proceed to phase 2.
993 # If no break was encountered we proceed to phase 2.
984 #
994 #
985 # Phase 2 computes successors sets of CURRENT (case 4); see details
995 # Phase 2 computes successors sets of CURRENT (case 4); see details
986 # in phase 2 itself.
996 # in phase 2 itself.
987 #
997 #
988 # Note the two levels of iteration in each phase.
998 # Note the two levels of iteration in each phase.
989 # - The first one handles obsolescence markers using CURRENT as
999 # - The first one handles obsolescence markers using CURRENT as
990 # precursor (successors markers of CURRENT).
1000 # precursor (successors markers of CURRENT).
991 #
1001 #
992 # Having multiple entry here means divergence.
1002 # Having multiple entry here means divergence.
993 #
1003 #
994 # - The second one handles successors defined in each marker.
1004 # - The second one handles successors defined in each marker.
995 #
1005 #
996 # Having none means pruned node, multiple successors means split,
1006 # Having none means pruned node, multiple successors means split,
997 # single successors are standard replacement.
1007 # single successors are standard replacement.
998 #
1008 #
999 for mark in sorted(succmarkers[current]):
1009 for mark in sorted(succmarkers[current]):
1000 for suc in mark[1]:
1010 for suc in mark[1]:
1001 if suc not in cache:
1011 if suc not in cache:
1002 if suc in stackedset:
1012 if suc in stackedset:
1003 # cycle breaking
1013 # cycle breaking
1004 cache[suc] = []
1014 cache[suc] = []
1005 else:
1015 else:
1006 # case (3) If we have not computed successors sets
1016 # case (3) If we have not computed successors sets
1007 # of one of those successors we add it to the
1017 # of one of those successors we add it to the
1008 # `toproceed` stack and stop all work for this
1018 # `toproceed` stack and stop all work for this
1009 # iteration.
1019 # iteration.
1010 toproceed.append(suc)
1020 toproceed.append(suc)
1011 stackedset.add(suc)
1021 stackedset.add(suc)
1012 break
1022 break
1013 else:
1023 else:
1014 continue
1024 continue
1015 break
1025 break
1016 else:
1026 else:
1017 # case (4): we know all successors sets of all direct
1027 # case (4): we know all successors sets of all direct
1018 # successors
1028 # successors
1019 #
1029 #
1020 # Successors set contributed by each marker depends on the
1030 # Successors set contributed by each marker depends on the
1021 # successors sets of all its "successors" node.
1031 # successors sets of all its "successors" node.
1022 #
1032 #
1023 # Each different marker is a divergence in the obsolescence
1033 # Each different marker is a divergence in the obsolescence
1024 # history. It contributes successors sets distinct from other
1034 # history. It contributes successors sets distinct from other
1025 # markers.
1035 # markers.
1026 #
1036 #
1027 # Within a marker, a successor may have divergent successors
1037 # Within a marker, a successor may have divergent successors
1028 # sets. In such a case, the marker will contribute multiple
1038 # sets. In such a case, the marker will contribute multiple
1029 # divergent successors sets. If multiple successors have
1039 # divergent successors sets. If multiple successors have
1030 # divergent successors sets, a Cartesian product is used.
1040 # divergent successors sets, a Cartesian product is used.
1031 #
1041 #
1032 # At the end we post-process successors sets to remove
1042 # At the end we post-process successors sets to remove
1033 # duplicated entry and successors set that are strict subset of
1043 # duplicated entry and successors set that are strict subset of
1034 # another one.
1044 # another one.
1035 succssets = []
1045 succssets = []
1036 for mark in sorted(succmarkers[current]):
1046 for mark in sorted(succmarkers[current]):
1037 # successors sets contributed by this marker
1047 # successors sets contributed by this marker
1038 markss = [[]]
1048 markss = [[]]
1039 for suc in mark[1]:
1049 for suc in mark[1]:
1040 # cardinal product with previous successors
1050 # cardinal product with previous successors
1041 productresult = []
1051 productresult = []
1042 for prefix in markss:
1052 for prefix in markss:
1043 for suffix in cache[suc]:
1053 for suffix in cache[suc]:
1044 newss = list(prefix)
1054 newss = list(prefix)
1045 for part in suffix:
1055 for part in suffix:
1046 # do not duplicated entry in successors set
1056 # do not duplicated entry in successors set
1047 # first entry wins.
1057 # first entry wins.
1048 if part not in newss:
1058 if part not in newss:
1049 newss.append(part)
1059 newss.append(part)
1050 productresult.append(newss)
1060 productresult.append(newss)
1051 markss = productresult
1061 markss = productresult
1052 succssets.extend(markss)
1062 succssets.extend(markss)
1053 # remove duplicated and subset
1063 # remove duplicated and subset
1054 seen = []
1064 seen = []
1055 final = []
1065 final = []
1056 candidate = sorted(((set(s), s) for s in succssets if s),
1066 candidate = sorted(((set(s), s) for s in succssets if s),
1057 key=lambda x: len(x[1]), reverse=True)
1067 key=lambda x: len(x[1]), reverse=True)
1058 for setversion, listversion in candidate:
1068 for setversion, listversion in candidate:
1059 for seenset in seen:
1069 for seenset in seen:
1060 if setversion.issubset(seenset):
1070 if setversion.issubset(seenset):
1061 break
1071 break
1062 else:
1072 else:
1063 final.append(listversion)
1073 final.append(listversion)
1064 seen.append(setversion)
1074 seen.append(setversion)
1065 final.reverse() # put small successors set first
1075 final.reverse() # put small successors set first
1066 cache[current] = final
1076 cache[current] = final
1067 return cache[initialnode]
1077 return cache[initialnode]
1068
1078
1069 # mapping of 'set-name' -> <function to compute this set>
1079 # mapping of 'set-name' -> <function to compute this set>
1070 cachefuncs = {}
1080 cachefuncs = {}
1071 def cachefor(name):
1081 def cachefor(name):
1072 """Decorator to register a function as computing the cache for a set"""
1082 """Decorator to register a function as computing the cache for a set"""
1073 def decorator(func):
1083 def decorator(func):
1074 assert name not in cachefuncs
1084 assert name not in cachefuncs
1075 cachefuncs[name] = func
1085 cachefuncs[name] = func
1076 return func
1086 return func
1077 return decorator
1087 return decorator
1078
1088
1079 def getrevs(repo, name):
1089 def getrevs(repo, name):
1080 """Return the set of revision that belong to the <name> set
1090 """Return the set of revision that belong to the <name> set
1081
1091
1082 Such access may compute the set and cache it for future use"""
1092 Such access may compute the set and cache it for future use"""
1083 repo = repo.unfiltered()
1093 repo = repo.unfiltered()
1084 if not repo.obsstore:
1094 if not repo.obsstore:
1085 return frozenset()
1095 return frozenset()
1086 if name not in repo.obsstore.caches:
1096 if name not in repo.obsstore.caches:
1087 repo.obsstore.caches[name] = cachefuncs[name](repo)
1097 repo.obsstore.caches[name] = cachefuncs[name](repo)
1088 return repo.obsstore.caches[name]
1098 return repo.obsstore.caches[name]
1089
1099
1090 # To be simple we need to invalidate obsolescence cache when:
1100 # To be simple we need to invalidate obsolescence cache when:
1091 #
1101 #
1092 # - new changeset is added:
1102 # - new changeset is added:
1093 # - public phase is changed
1103 # - public phase is changed
1094 # - obsolescence marker are added
1104 # - obsolescence marker are added
1095 # - strip is used a repo
1105 # - strip is used a repo
1096 def clearobscaches(repo):
1106 def clearobscaches(repo):
1097 """Remove all obsolescence related cache from a repo
1107 """Remove all obsolescence related cache from a repo
1098
1108
1099 This remove all cache in obsstore is the obsstore already exist on the
1109 This remove all cache in obsstore is the obsstore already exist on the
1100 repo.
1110 repo.
1101
1111
1102 (We could be smarter here given the exact event that trigger the cache
1112 (We could be smarter here given the exact event that trigger the cache
1103 clearing)"""
1113 clearing)"""
1104 # only clear cache is there is obsstore data in this repo
1114 # only clear cache is there is obsstore data in this repo
1105 if 'obsstore' in repo._filecache:
1115 if 'obsstore' in repo._filecache:
1106 repo.obsstore.caches.clear()
1116 repo.obsstore.caches.clear()
1107
1117
1108 @cachefor('obsolete')
1118 @cachefor('obsolete')
1109 def _computeobsoleteset(repo):
1119 def _computeobsoleteset(repo):
1110 """the set of obsolete revisions"""
1120 """the set of obsolete revisions"""
1111 obs = set()
1121 obs = set()
1112 getrev = repo.changelog.nodemap.get
1122 getrev = repo.changelog.nodemap.get
1113 getphase = repo._phasecache.phase
1123 getphase = repo._phasecache.phase
1114 for n in repo.obsstore.successors:
1124 for n in repo.obsstore.successors:
1115 rev = getrev(n)
1125 rev = getrev(n)
1116 if rev is not None and getphase(repo, rev):
1126 if rev is not None and getphase(repo, rev):
1117 obs.add(rev)
1127 obs.add(rev)
1118 return obs
1128 return obs
1119
1129
1120 @cachefor('unstable')
1130 @cachefor('unstable')
1121 def _computeunstableset(repo):
1131 def _computeunstableset(repo):
1122 """the set of non obsolete revisions with obsolete parents"""
1132 """the set of non obsolete revisions with obsolete parents"""
1123 revs = [(ctx.rev(), ctx) for ctx in
1133 revs = [(ctx.rev(), ctx) for ctx in
1124 repo.set('(not public()) and (not obsolete())')]
1134 repo.set('(not public()) and (not obsolete())')]
1125 revs.sort(key=lambda x:x[0])
1135 revs.sort(key=lambda x:x[0])
1126 unstable = set()
1136 unstable = set()
1127 for rev, ctx in revs:
1137 for rev, ctx in revs:
1128 # A rev is unstable if one of its parent is obsolete or unstable
1138 # A rev is unstable if one of its parent is obsolete or unstable
1129 # this works since we traverse following growing rev order
1139 # this works since we traverse following growing rev order
1130 if any((x.obsolete() or (x.rev() in unstable))
1140 if any((x.obsolete() or (x.rev() in unstable))
1131 for x in ctx.parents()):
1141 for x in ctx.parents()):
1132 unstable.add(rev)
1142 unstable.add(rev)
1133 return unstable
1143 return unstable
1134
1144
1135 @cachefor('suspended')
1145 @cachefor('suspended')
1136 def _computesuspendedset(repo):
1146 def _computesuspendedset(repo):
1137 """the set of obsolete parents with non obsolete descendants"""
1147 """the set of obsolete parents with non obsolete descendants"""
1138 suspended = repo.changelog.ancestors(getrevs(repo, 'unstable'))
1148 suspended = repo.changelog.ancestors(getrevs(repo, 'unstable'))
1139 return set(r for r in getrevs(repo, 'obsolete') if r in suspended)
1149 return set(r for r in getrevs(repo, 'obsolete') if r in suspended)
1140
1150
1141 @cachefor('extinct')
1151 @cachefor('extinct')
1142 def _computeextinctset(repo):
1152 def _computeextinctset(repo):
1143 """the set of obsolete parents without non obsolete descendants"""
1153 """the set of obsolete parents without non obsolete descendants"""
1144 return getrevs(repo, 'obsolete') - getrevs(repo, 'suspended')
1154 return getrevs(repo, 'obsolete') - getrevs(repo, 'suspended')
1145
1155
1146
1156
1147 @cachefor('bumped')
1157 @cachefor('bumped')
1148 def _computebumpedset(repo):
1158 def _computebumpedset(repo):
1149 """the set of revs trying to obsolete public revisions"""
1159 """the set of revs trying to obsolete public revisions"""
1150 bumped = set()
1160 bumped = set()
1151 # util function (avoid attribute lookup in the loop)
1161 # util function (avoid attribute lookup in the loop)
1152 phase = repo._phasecache.phase # would be faster to grab the full list
1162 phase = repo._phasecache.phase # would be faster to grab the full list
1153 public = phases.public
1163 public = phases.public
1154 cl = repo.changelog
1164 cl = repo.changelog
1155 torev = cl.nodemap.get
1165 torev = cl.nodemap.get
1156 for ctx in repo.set('(not public()) and (not obsolete())'):
1166 for ctx in repo.set('(not public()) and (not obsolete())'):
1157 rev = ctx.rev()
1167 rev = ctx.rev()
1158 # We only evaluate mutable, non-obsolete revision
1168 # We only evaluate mutable, non-obsolete revision
1159 node = ctx.node()
1169 node = ctx.node()
1160 # (future) A cache of precursors may worth if split is very common
1170 # (future) A cache of precursors may worth if split is very common
1161 for pnode in allprecursors(repo.obsstore, [node],
1171 for pnode in allprecursors(repo.obsstore, [node],
1162 ignoreflags=bumpedfix):
1172 ignoreflags=bumpedfix):
1163 prev = torev(pnode) # unfiltered! but so is phasecache
1173 prev = torev(pnode) # unfiltered! but so is phasecache
1164 if (prev is not None) and (phase(repo, prev) <= public):
1174 if (prev is not None) and (phase(repo, prev) <= public):
1165 # we have a public precursors
1175 # we have a public precursors
1166 bumped.add(rev)
1176 bumped.add(rev)
1167 break # Next draft!
1177 break # Next draft!
1168 return bumped
1178 return bumped
1169
1179
1170 @cachefor('divergent')
1180 @cachefor('divergent')
1171 def _computedivergentset(repo):
1181 def _computedivergentset(repo):
1172 """the set of rev that compete to be the final successors of some revision.
1182 """the set of rev that compete to be the final successors of some revision.
1173 """
1183 """
1174 divergent = set()
1184 divergent = set()
1175 obsstore = repo.obsstore
1185 obsstore = repo.obsstore
1176 newermap = {}
1186 newermap = {}
1177 for ctx in repo.set('(not public()) - obsolete()'):
1187 for ctx in repo.set('(not public()) - obsolete()'):
1178 mark = obsstore.precursors.get(ctx.node(), ())
1188 mark = obsstore.precursors.get(ctx.node(), ())
1179 toprocess = set(mark)
1189 toprocess = set(mark)
1180 seen = set()
1190 seen = set()
1181 while toprocess:
1191 while toprocess:
1182 prec = toprocess.pop()[0]
1192 prec = toprocess.pop()[0]
1183 if prec in seen:
1193 if prec in seen:
1184 continue # emergency cycle hanging prevention
1194 continue # emergency cycle hanging prevention
1185 seen.add(prec)
1195 seen.add(prec)
1186 if prec not in newermap:
1196 if prec not in newermap:
1187 successorssets(repo, prec, newermap)
1197 successorssets(repo, prec, newermap)
1188 newer = [n for n in newermap[prec] if n]
1198 newer = [n for n in newermap[prec] if n]
1189 if len(newer) > 1:
1199 if len(newer) > 1:
1190 divergent.add(ctx.rev())
1200 divergent.add(ctx.rev())
1191 break
1201 break
1192 toprocess.update(obsstore.precursors.get(prec, ()))
1202 toprocess.update(obsstore.precursors.get(prec, ()))
1193 return divergent
1203 return divergent
1194
1204
1195
1205
1196 def createmarkers(repo, relations, flag=0, date=None, metadata=None):
1206 def createmarkers(repo, relations, flag=0, date=None, metadata=None):
1197 """Add obsolete markers between changesets in a repo
1207 """Add obsolete markers between changesets in a repo
1198
1208
1199 <relations> must be an iterable of (<old>, (<new>, ...)[,{metadata}])
1209 <relations> must be an iterable of (<old>, (<new>, ...)[,{metadata}])
1200 tuple. `old` and `news` are changectx. metadata is an optional dictionary
1210 tuple. `old` and `news` are changectx. metadata is an optional dictionary
1201 containing metadata for this marker only. It is merged with the global
1211 containing metadata for this marker only. It is merged with the global
1202 metadata specified through the `metadata` argument of this function,
1212 metadata specified through the `metadata` argument of this function,
1203
1213
1204 Trying to obsolete a public changeset will raise an exception.
1214 Trying to obsolete a public changeset will raise an exception.
1205
1215
1206 Current user and date are used except if specified otherwise in the
1216 Current user and date are used except if specified otherwise in the
1207 metadata attribute.
1217 metadata attribute.
1208
1218
1209 This function operates within a transaction of its own, but does
1219 This function operates within a transaction of its own, but does
1210 not take any lock on the repo.
1220 not take any lock on the repo.
1211 """
1221 """
1212 # prepare metadata
1222 # prepare metadata
1213 if metadata is None:
1223 if metadata is None:
1214 metadata = {}
1224 metadata = {}
1215 if 'user' not in metadata:
1225 if 'user' not in metadata:
1216 metadata['user'] = repo.ui.username()
1226 metadata['user'] = repo.ui.username()
1217 tr = repo.transaction('add-obsolescence-marker')
1227 tr = repo.transaction('add-obsolescence-marker')
1218 try:
1228 try:
1219 for rel in relations:
1229 for rel in relations:
1220 prec = rel[0]
1230 prec = rel[0]
1221 sucs = rel[1]
1231 sucs = rel[1]
1222 localmetadata = metadata.copy()
1232 localmetadata = metadata.copy()
1223 if 2 < len(rel):
1233 if 2 < len(rel):
1224 localmetadata.update(rel[2])
1234 localmetadata.update(rel[2])
1225
1235
1226 if not prec.mutable():
1236 if not prec.mutable():
1227 raise error.Abort("cannot obsolete public changeset: %s"
1237 raise error.Abort("cannot obsolete public changeset: %s"
1228 % prec,
1238 % prec,
1229 hint='see "hg help phases" for details')
1239 hint='see "hg help phases" for details')
1230 nprec = prec.node()
1240 nprec = prec.node()
1231 nsucs = tuple(s.node() for s in sucs)
1241 nsucs = tuple(s.node() for s in sucs)
1232 npare = None
1242 npare = None
1233 if not nsucs:
1243 if not nsucs:
1234 npare = tuple(p.node() for p in prec.parents())
1244 npare = tuple(p.node() for p in prec.parents())
1235 if nprec in nsucs:
1245 if nprec in nsucs:
1236 raise error.Abort("changeset %s cannot obsolete itself" % prec)
1246 raise error.Abort("changeset %s cannot obsolete itself" % prec)
1237 repo.obsstore.create(tr, nprec, nsucs, flag, parents=npare,
1247 repo.obsstore.create(tr, nprec, nsucs, flag, parents=npare,
1238 date=date, metadata=localmetadata)
1248 date=date, metadata=localmetadata)
1239 repo.filteredrevcache.clear()
1249 repo.filteredrevcache.clear()
1240 tr.close()
1250 tr.close()
1241 finally:
1251 finally:
1242 tr.release()
1252 tr.release()
1243
1253
1244 def isenabled(repo, option):
1254 def isenabled(repo, option):
1245 """Returns True if the given repository has the given obsolete option
1255 """Returns True if the given repository has the given obsolete option
1246 enabled.
1256 enabled.
1247 """
1257 """
1248 result = set(repo.ui.configlist('experimental', 'evolution'))
1258 result = set(repo.ui.configlist('experimental', 'evolution'))
1249 if 'all' in result:
1259 if 'all' in result:
1250 return True
1260 return True
1251
1261
1252 # For migration purposes, temporarily return true if the config hasn't been
1262 # For migration purposes, temporarily return true if the config hasn't been
1253 # set but _enabled is true.
1263 # set but _enabled is true.
1254 if len(result) == 0 and _enabled:
1264 if len(result) == 0 and _enabled:
1255 return True
1265 return True
1256
1266
1257 # createmarkers must be enabled if other options are enabled
1267 # createmarkers must be enabled if other options are enabled
1258 if ((allowunstableopt in result or exchangeopt in result) and
1268 if ((allowunstableopt in result or exchangeopt in result) and
1259 not createmarkersopt in result):
1269 not createmarkersopt in result):
1260 raise error.Abort(_("'createmarkers' obsolete option must be enabled "
1270 raise error.Abort(_("'createmarkers' obsolete option must be enabled "
1261 "if other obsolete options are enabled"))
1271 "if other obsolete options are enabled"))
1262
1272
1263 return option in result
1273 return option in result
@@ -1,221 +1,220 b''
1 #require test-repo
1 #require test-repo
2
2
3 $ cd "$TESTDIR"/..
3 $ cd "$TESTDIR"/..
4
4
5 $ hg files 'set:(**.py)' | xargs python contrib/check-py3-compat.py
5 $ hg files 'set:(**.py)' | xargs python contrib/check-py3-compat.py
6 contrib/casesmash.py not using absolute_import
6 contrib/casesmash.py not using absolute_import
7 contrib/check-code.py not using absolute_import
7 contrib/check-code.py not using absolute_import
8 contrib/check-code.py requires print_function
8 contrib/check-code.py requires print_function
9 contrib/check-config.py not using absolute_import
9 contrib/check-config.py not using absolute_import
10 contrib/check-config.py requires print_function
10 contrib/check-config.py requires print_function
11 contrib/debugcmdserver.py not using absolute_import
11 contrib/debugcmdserver.py not using absolute_import
12 contrib/debugcmdserver.py requires print_function
12 contrib/debugcmdserver.py requires print_function
13 contrib/debugshell.py not using absolute_import
13 contrib/debugshell.py not using absolute_import
14 contrib/fixpax.py not using absolute_import
14 contrib/fixpax.py not using absolute_import
15 contrib/fixpax.py requires print_function
15 contrib/fixpax.py requires print_function
16 contrib/hgclient.py not using absolute_import
16 contrib/hgclient.py not using absolute_import
17 contrib/hgclient.py requires print_function
17 contrib/hgclient.py requires print_function
18 contrib/hgfixes/fix_bytes.py not using absolute_import
18 contrib/hgfixes/fix_bytes.py not using absolute_import
19 contrib/hgfixes/fix_bytesmod.py not using absolute_import
19 contrib/hgfixes/fix_bytesmod.py not using absolute_import
20 contrib/hgfixes/fix_leftover_imports.py not using absolute_import
20 contrib/hgfixes/fix_leftover_imports.py not using absolute_import
21 contrib/import-checker.py not using absolute_import
21 contrib/import-checker.py not using absolute_import
22 contrib/import-checker.py requires print_function
22 contrib/import-checker.py requires print_function
23 contrib/memory.py not using absolute_import
23 contrib/memory.py not using absolute_import
24 contrib/perf.py not using absolute_import
24 contrib/perf.py not using absolute_import
25 contrib/python-hook-examples.py not using absolute_import
25 contrib/python-hook-examples.py not using absolute_import
26 contrib/revsetbenchmarks.py not using absolute_import
26 contrib/revsetbenchmarks.py not using absolute_import
27 contrib/revsetbenchmarks.py requires print_function
27 contrib/revsetbenchmarks.py requires print_function
28 contrib/showstack.py not using absolute_import
28 contrib/showstack.py not using absolute_import
29 contrib/synthrepo.py not using absolute_import
29 contrib/synthrepo.py not using absolute_import
30 contrib/win32/hgwebdir_wsgi.py not using absolute_import
30 contrib/win32/hgwebdir_wsgi.py not using absolute_import
31 doc/check-seclevel.py not using absolute_import
31 doc/check-seclevel.py not using absolute_import
32 doc/gendoc.py not using absolute_import
32 doc/gendoc.py not using absolute_import
33 doc/hgmanpage.py not using absolute_import
33 doc/hgmanpage.py not using absolute_import
34 hgext/__init__.py not using absolute_import
34 hgext/__init__.py not using absolute_import
35 hgext/acl.py not using absolute_import
35 hgext/acl.py not using absolute_import
36 hgext/blackbox.py not using absolute_import
36 hgext/blackbox.py not using absolute_import
37 hgext/bugzilla.py not using absolute_import
37 hgext/bugzilla.py not using absolute_import
38 hgext/censor.py not using absolute_import
38 hgext/censor.py not using absolute_import
39 hgext/children.py not using absolute_import
39 hgext/children.py not using absolute_import
40 hgext/churn.py not using absolute_import
40 hgext/churn.py not using absolute_import
41 hgext/clonebundles.py not using absolute_import
41 hgext/clonebundles.py not using absolute_import
42 hgext/color.py not using absolute_import
42 hgext/color.py not using absolute_import
43 hgext/convert/__init__.py not using absolute_import
43 hgext/convert/__init__.py not using absolute_import
44 hgext/convert/bzr.py not using absolute_import
44 hgext/convert/bzr.py not using absolute_import
45 hgext/convert/common.py not using absolute_import
45 hgext/convert/common.py not using absolute_import
46 hgext/convert/convcmd.py not using absolute_import
46 hgext/convert/convcmd.py not using absolute_import
47 hgext/convert/cvs.py not using absolute_import
47 hgext/convert/cvs.py not using absolute_import
48 hgext/convert/cvsps.py not using absolute_import
48 hgext/convert/cvsps.py not using absolute_import
49 hgext/convert/darcs.py not using absolute_import
49 hgext/convert/darcs.py not using absolute_import
50 hgext/convert/filemap.py not using absolute_import
50 hgext/convert/filemap.py not using absolute_import
51 hgext/convert/git.py not using absolute_import
51 hgext/convert/git.py not using absolute_import
52 hgext/convert/gnuarch.py not using absolute_import
52 hgext/convert/gnuarch.py not using absolute_import
53 hgext/convert/hg.py not using absolute_import
53 hgext/convert/hg.py not using absolute_import
54 hgext/convert/monotone.py not using absolute_import
54 hgext/convert/monotone.py not using absolute_import
55 hgext/convert/p4.py not using absolute_import
55 hgext/convert/p4.py not using absolute_import
56 hgext/convert/subversion.py not using absolute_import
56 hgext/convert/subversion.py not using absolute_import
57 hgext/convert/transport.py not using absolute_import
57 hgext/convert/transport.py not using absolute_import
58 hgext/eol.py not using absolute_import
58 hgext/eol.py not using absolute_import
59 hgext/extdiff.py not using absolute_import
59 hgext/extdiff.py not using absolute_import
60 hgext/factotum.py not using absolute_import
60 hgext/factotum.py not using absolute_import
61 hgext/fetch.py not using absolute_import
61 hgext/fetch.py not using absolute_import
62 hgext/gpg.py not using absolute_import
62 hgext/gpg.py not using absolute_import
63 hgext/graphlog.py not using absolute_import
63 hgext/graphlog.py not using absolute_import
64 hgext/hgcia.py not using absolute_import
64 hgext/hgcia.py not using absolute_import
65 hgext/hgk.py not using absolute_import
65 hgext/hgk.py not using absolute_import
66 hgext/highlight/__init__.py not using absolute_import
66 hgext/highlight/__init__.py not using absolute_import
67 hgext/highlight/highlight.py not using absolute_import
67 hgext/highlight/highlight.py not using absolute_import
68 hgext/histedit.py not using absolute_import
68 hgext/histedit.py not using absolute_import
69 hgext/keyword.py not using absolute_import
69 hgext/keyword.py not using absolute_import
70 hgext/largefiles/__init__.py not using absolute_import
70 hgext/largefiles/__init__.py not using absolute_import
71 hgext/largefiles/basestore.py not using absolute_import
71 hgext/largefiles/basestore.py not using absolute_import
72 hgext/largefiles/lfcommands.py not using absolute_import
72 hgext/largefiles/lfcommands.py not using absolute_import
73 hgext/largefiles/lfutil.py not using absolute_import
73 hgext/largefiles/lfutil.py not using absolute_import
74 hgext/largefiles/localstore.py not using absolute_import
74 hgext/largefiles/localstore.py not using absolute_import
75 hgext/largefiles/overrides.py not using absolute_import
75 hgext/largefiles/overrides.py not using absolute_import
76 hgext/largefiles/proto.py not using absolute_import
76 hgext/largefiles/proto.py not using absolute_import
77 hgext/largefiles/remotestore.py not using absolute_import
77 hgext/largefiles/remotestore.py not using absolute_import
78 hgext/largefiles/reposetup.py not using absolute_import
78 hgext/largefiles/reposetup.py not using absolute_import
79 hgext/largefiles/uisetup.py not using absolute_import
79 hgext/largefiles/uisetup.py not using absolute_import
80 hgext/largefiles/wirestore.py not using absolute_import
80 hgext/largefiles/wirestore.py not using absolute_import
81 hgext/mq.py not using absolute_import
81 hgext/mq.py not using absolute_import
82 hgext/notify.py not using absolute_import
82 hgext/notify.py not using absolute_import
83 hgext/pager.py not using absolute_import
83 hgext/pager.py not using absolute_import
84 hgext/patchbomb.py not using absolute_import
84 hgext/patchbomb.py not using absolute_import
85 hgext/purge.py not using absolute_import
85 hgext/purge.py not using absolute_import
86 hgext/rebase.py not using absolute_import
86 hgext/rebase.py not using absolute_import
87 hgext/record.py not using absolute_import
87 hgext/record.py not using absolute_import
88 hgext/relink.py not using absolute_import
88 hgext/relink.py not using absolute_import
89 hgext/schemes.py not using absolute_import
89 hgext/schemes.py not using absolute_import
90 hgext/share.py not using absolute_import
90 hgext/share.py not using absolute_import
91 hgext/shelve.py not using absolute_import
91 hgext/shelve.py not using absolute_import
92 hgext/strip.py not using absolute_import
92 hgext/strip.py not using absolute_import
93 hgext/transplant.py not using absolute_import
93 hgext/transplant.py not using absolute_import
94 hgext/win32mbcs.py not using absolute_import
94 hgext/win32mbcs.py not using absolute_import
95 hgext/win32text.py not using absolute_import
95 hgext/win32text.py not using absolute_import
96 hgext/zeroconf/Zeroconf.py not using absolute_import
96 hgext/zeroconf/Zeroconf.py not using absolute_import
97 hgext/zeroconf/Zeroconf.py requires print_function
97 hgext/zeroconf/Zeroconf.py requires print_function
98 hgext/zeroconf/__init__.py not using absolute_import
98 hgext/zeroconf/__init__.py not using absolute_import
99 i18n/check-translation.py not using absolute_import
99 i18n/check-translation.py not using absolute_import
100 i18n/polib.py not using absolute_import
100 i18n/polib.py not using absolute_import
101 mercurial/byterange.py not using absolute_import
101 mercurial/byterange.py not using absolute_import
102 mercurial/cmdutil.py not using absolute_import
102 mercurial/cmdutil.py not using absolute_import
103 mercurial/commands.py not using absolute_import
103 mercurial/commands.py not using absolute_import
104 mercurial/commandserver.py not using absolute_import
104 mercurial/commandserver.py not using absolute_import
105 mercurial/context.py not using absolute_import
105 mercurial/context.py not using absolute_import
106 mercurial/destutil.py not using absolute_import
106 mercurial/destutil.py not using absolute_import
107 mercurial/dirstate.py not using absolute_import
107 mercurial/dirstate.py not using absolute_import
108 mercurial/dispatch.py requires print_function
108 mercurial/dispatch.py requires print_function
109 mercurial/encoding.py not using absolute_import
109 mercurial/encoding.py not using absolute_import
110 mercurial/exchange.py not using absolute_import
110 mercurial/exchange.py not using absolute_import
111 mercurial/help.py not using absolute_import
111 mercurial/help.py not using absolute_import
112 mercurial/httpclient/__init__.py not using absolute_import
112 mercurial/httpclient/__init__.py not using absolute_import
113 mercurial/httpclient/_readers.py not using absolute_import
113 mercurial/httpclient/_readers.py not using absolute_import
114 mercurial/httpclient/socketutil.py not using absolute_import
114 mercurial/httpclient/socketutil.py not using absolute_import
115 mercurial/httpconnection.py not using absolute_import
115 mercurial/httpconnection.py not using absolute_import
116 mercurial/keepalive.py not using absolute_import
116 mercurial/keepalive.py not using absolute_import
117 mercurial/keepalive.py requires print_function
117 mercurial/keepalive.py requires print_function
118 mercurial/localrepo.py not using absolute_import
118 mercurial/localrepo.py not using absolute_import
119 mercurial/lsprof.py requires print_function
119 mercurial/lsprof.py requires print_function
120 mercurial/lsprofcalltree.py not using absolute_import
120 mercurial/lsprofcalltree.py not using absolute_import
121 mercurial/lsprofcalltree.py requires print_function
121 mercurial/lsprofcalltree.py requires print_function
122 mercurial/mail.py requires print_function
122 mercurial/mail.py requires print_function
123 mercurial/manifest.py not using absolute_import
123 mercurial/manifest.py not using absolute_import
124 mercurial/mdiff.py not using absolute_import
124 mercurial/mdiff.py not using absolute_import
125 mercurial/obsolete.py not using absolute_import
126 mercurial/patch.py not using absolute_import
125 mercurial/patch.py not using absolute_import
127 mercurial/pure/base85.py not using absolute_import
126 mercurial/pure/base85.py not using absolute_import
128 mercurial/pure/bdiff.py not using absolute_import
127 mercurial/pure/bdiff.py not using absolute_import
129 mercurial/pure/diffhelpers.py not using absolute_import
128 mercurial/pure/diffhelpers.py not using absolute_import
130 mercurial/pure/mpatch.py not using absolute_import
129 mercurial/pure/mpatch.py not using absolute_import
131 mercurial/pure/osutil.py not using absolute_import
130 mercurial/pure/osutil.py not using absolute_import
132 mercurial/pure/parsers.py not using absolute_import
131 mercurial/pure/parsers.py not using absolute_import
133 mercurial/pvec.py not using absolute_import
132 mercurial/pvec.py not using absolute_import
134 mercurial/py3kcompat.py not using absolute_import
133 mercurial/py3kcompat.py not using absolute_import
135 mercurial/revlog.py not using absolute_import
134 mercurial/revlog.py not using absolute_import
136 mercurial/scmposix.py not using absolute_import
135 mercurial/scmposix.py not using absolute_import
137 mercurial/scmutil.py not using absolute_import
136 mercurial/scmutil.py not using absolute_import
138 mercurial/scmwindows.py not using absolute_import
137 mercurial/scmwindows.py not using absolute_import
139 mercurial/similar.py not using absolute_import
138 mercurial/similar.py not using absolute_import
140 mercurial/store.py not using absolute_import
139 mercurial/store.py not using absolute_import
141 mercurial/util.py not using absolute_import
140 mercurial/util.py not using absolute_import
142 mercurial/windows.py not using absolute_import
141 mercurial/windows.py not using absolute_import
143 setup.py not using absolute_import
142 setup.py not using absolute_import
144 tests/filterpyflakes.py requires print_function
143 tests/filterpyflakes.py requires print_function
145 tests/generate-working-copy-states.py requires print_function
144 tests/generate-working-copy-states.py requires print_function
146 tests/get-with-headers.py requires print_function
145 tests/get-with-headers.py requires print_function
147 tests/heredoctest.py requires print_function
146 tests/heredoctest.py requires print_function
148 tests/hypothesishelpers.py not using absolute_import
147 tests/hypothesishelpers.py not using absolute_import
149 tests/hypothesishelpers.py requires print_function
148 tests/hypothesishelpers.py requires print_function
150 tests/killdaemons.py not using absolute_import
149 tests/killdaemons.py not using absolute_import
151 tests/md5sum.py not using absolute_import
150 tests/md5sum.py not using absolute_import
152 tests/mockblackbox.py not using absolute_import
151 tests/mockblackbox.py not using absolute_import
153 tests/printenv.py not using absolute_import
152 tests/printenv.py not using absolute_import
154 tests/readlink.py not using absolute_import
153 tests/readlink.py not using absolute_import
155 tests/readlink.py requires print_function
154 tests/readlink.py requires print_function
156 tests/revlog-formatv0.py not using absolute_import
155 tests/revlog-formatv0.py not using absolute_import
157 tests/run-tests.py not using absolute_import
156 tests/run-tests.py not using absolute_import
158 tests/seq.py not using absolute_import
157 tests/seq.py not using absolute_import
159 tests/seq.py requires print_function
158 tests/seq.py requires print_function
160 tests/silenttestrunner.py not using absolute_import
159 tests/silenttestrunner.py not using absolute_import
161 tests/silenttestrunner.py requires print_function
160 tests/silenttestrunner.py requires print_function
162 tests/sitecustomize.py not using absolute_import
161 tests/sitecustomize.py not using absolute_import
163 tests/svn-safe-append.py not using absolute_import
162 tests/svn-safe-append.py not using absolute_import
164 tests/svnxml.py not using absolute_import
163 tests/svnxml.py not using absolute_import
165 tests/test-ancestor.py requires print_function
164 tests/test-ancestor.py requires print_function
166 tests/test-atomictempfile.py not using absolute_import
165 tests/test-atomictempfile.py not using absolute_import
167 tests/test-batching.py not using absolute_import
166 tests/test-batching.py not using absolute_import
168 tests/test-batching.py requires print_function
167 tests/test-batching.py requires print_function
169 tests/test-bdiff.py not using absolute_import
168 tests/test-bdiff.py not using absolute_import
170 tests/test-bdiff.py requires print_function
169 tests/test-bdiff.py requires print_function
171 tests/test-context.py not using absolute_import
170 tests/test-context.py not using absolute_import
172 tests/test-context.py requires print_function
171 tests/test-context.py requires print_function
173 tests/test-demandimport.py not using absolute_import
172 tests/test-demandimport.py not using absolute_import
174 tests/test-demandimport.py requires print_function
173 tests/test-demandimport.py requires print_function
175 tests/test-dispatch.py not using absolute_import
174 tests/test-dispatch.py not using absolute_import
176 tests/test-dispatch.py requires print_function
175 tests/test-dispatch.py requires print_function
177 tests/test-doctest.py not using absolute_import
176 tests/test-doctest.py not using absolute_import
178 tests/test-duplicateoptions.py not using absolute_import
177 tests/test-duplicateoptions.py not using absolute_import
179 tests/test-duplicateoptions.py requires print_function
178 tests/test-duplicateoptions.py requires print_function
180 tests/test-filecache.py not using absolute_import
179 tests/test-filecache.py not using absolute_import
181 tests/test-filecache.py requires print_function
180 tests/test-filecache.py requires print_function
182 tests/test-filelog.py not using absolute_import
181 tests/test-filelog.py not using absolute_import
183 tests/test-filelog.py requires print_function
182 tests/test-filelog.py requires print_function
184 tests/test-hg-parseurl.py not using absolute_import
183 tests/test-hg-parseurl.py not using absolute_import
185 tests/test-hg-parseurl.py requires print_function
184 tests/test-hg-parseurl.py requires print_function
186 tests/test-hgweb-auth.py not using absolute_import
185 tests/test-hgweb-auth.py not using absolute_import
187 tests/test-hgweb-auth.py requires print_function
186 tests/test-hgweb-auth.py requires print_function
188 tests/test-hgwebdir-paths.py not using absolute_import
187 tests/test-hgwebdir-paths.py not using absolute_import
189 tests/test-hybridencode.py not using absolute_import
188 tests/test-hybridencode.py not using absolute_import
190 tests/test-hybridencode.py requires print_function
189 tests/test-hybridencode.py requires print_function
191 tests/test-lrucachedict.py not using absolute_import
190 tests/test-lrucachedict.py not using absolute_import
192 tests/test-lrucachedict.py requires print_function
191 tests/test-lrucachedict.py requires print_function
193 tests/test-manifest.py not using absolute_import
192 tests/test-manifest.py not using absolute_import
194 tests/test-minirst.py not using absolute_import
193 tests/test-minirst.py not using absolute_import
195 tests/test-minirst.py requires print_function
194 tests/test-minirst.py requires print_function
196 tests/test-parseindex2.py not using absolute_import
195 tests/test-parseindex2.py not using absolute_import
197 tests/test-parseindex2.py requires print_function
196 tests/test-parseindex2.py requires print_function
198 tests/test-pathencode.py not using absolute_import
197 tests/test-pathencode.py not using absolute_import
199 tests/test-pathencode.py requires print_function
198 tests/test-pathencode.py requires print_function
200 tests/test-propertycache.py not using absolute_import
199 tests/test-propertycache.py not using absolute_import
201 tests/test-propertycache.py requires print_function
200 tests/test-propertycache.py requires print_function
202 tests/test-revlog-ancestry.py not using absolute_import
201 tests/test-revlog-ancestry.py not using absolute_import
203 tests/test-revlog-ancestry.py requires print_function
202 tests/test-revlog-ancestry.py requires print_function
204 tests/test-run-tests.py not using absolute_import
203 tests/test-run-tests.py not using absolute_import
205 tests/test-simplemerge.py not using absolute_import
204 tests/test-simplemerge.py not using absolute_import
206 tests/test-status-inprocess.py not using absolute_import
205 tests/test-status-inprocess.py not using absolute_import
207 tests/test-status-inprocess.py requires print_function
206 tests/test-status-inprocess.py requires print_function
208 tests/test-symlink-os-yes-fs-no.py not using absolute_import
207 tests/test-symlink-os-yes-fs-no.py not using absolute_import
209 tests/test-trusted.py not using absolute_import
208 tests/test-trusted.py not using absolute_import
210 tests/test-trusted.py requires print_function
209 tests/test-trusted.py requires print_function
211 tests/test-ui-color.py not using absolute_import
210 tests/test-ui-color.py not using absolute_import
212 tests/test-ui-color.py requires print_function
211 tests/test-ui-color.py requires print_function
213 tests/test-ui-config.py not using absolute_import
212 tests/test-ui-config.py not using absolute_import
214 tests/test-ui-config.py requires print_function
213 tests/test-ui-config.py requires print_function
215 tests/test-ui-verbosity.py not using absolute_import
214 tests/test-ui-verbosity.py not using absolute_import
216 tests/test-ui-verbosity.py requires print_function
215 tests/test-ui-verbosity.py requires print_function
217 tests/test-url.py not using absolute_import
216 tests/test-url.py not using absolute_import
218 tests/test-url.py requires print_function
217 tests/test-url.py requires print_function
219 tests/test-walkrepo.py requires print_function
218 tests/test-walkrepo.py requires print_function
220 tests/test-wireproto.py requires print_function
219 tests/test-wireproto.py requires print_function
221 tests/tinyproxy.py requires print_function
220 tests/tinyproxy.py requires print_function
General Comments 0
You need to be logged in to leave comments. Login now