##// END OF EJS Templates
delta-find: add debug information about reuse of cached data...
marmoute -
r50504:78ba4187 default
parent child Browse files
Show More
@@ -1,1332 +1,1351 b''
1 # revlogdeltas.py - Logic around delta computation for revlog
1 # revlogdeltas.py - Logic around delta computation for revlog
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 # Copyright 2018 Octobus <contact@octobus.net>
4 # Copyright 2018 Octobus <contact@octobus.net>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8 """Helper class to compute deltas stored inside revlogs"""
8 """Helper class to compute deltas stored inside revlogs"""
9
9
10
10
11 import collections
11 import collections
12 import struct
12 import struct
13
13
14 # import stuff from node for others to import from revlog
14 # import stuff from node for others to import from revlog
15 from ..node import nullrev
15 from ..node import nullrev
16 from ..i18n import _
16 from ..i18n import _
17 from ..pycompat import getattr
17 from ..pycompat import getattr
18
18
19 from .constants import (
19 from .constants import (
20 COMP_MODE_DEFAULT,
20 COMP_MODE_DEFAULT,
21 COMP_MODE_INLINE,
21 COMP_MODE_INLINE,
22 COMP_MODE_PLAIN,
22 COMP_MODE_PLAIN,
23 KIND_CHANGELOG,
23 KIND_CHANGELOG,
24 KIND_FILELOG,
24 KIND_FILELOG,
25 KIND_MANIFESTLOG,
25 KIND_MANIFESTLOG,
26 REVIDX_ISCENSORED,
26 REVIDX_ISCENSORED,
27 REVIDX_RAWTEXT_CHANGING_FLAGS,
27 REVIDX_RAWTEXT_CHANGING_FLAGS,
28 )
28 )
29
29
30 from ..thirdparty import attr
30 from ..thirdparty import attr
31
31
32 from .. import (
32 from .. import (
33 error,
33 error,
34 mdiff,
34 mdiff,
35 util,
35 util,
36 )
36 )
37
37
38 from . import flagutil
38 from . import flagutil
39
39
40 # maximum <delta-chain-data>/<revision-text-length> ratio
40 # maximum <delta-chain-data>/<revision-text-length> ratio
41 LIMIT_DELTA2TEXT = 2
41 LIMIT_DELTA2TEXT = 2
42
42
43
43
44 class _testrevlog:
44 class _testrevlog:
45 """minimalist fake revlog to use in doctests"""
45 """minimalist fake revlog to use in doctests"""
46
46
47 def __init__(self, data, density=0.5, mingap=0, snapshot=()):
47 def __init__(self, data, density=0.5, mingap=0, snapshot=()):
48 """data is an list of revision payload boundaries"""
48 """data is an list of revision payload boundaries"""
49 self._data = data
49 self._data = data
50 self._srdensitythreshold = density
50 self._srdensitythreshold = density
51 self._srmingapsize = mingap
51 self._srmingapsize = mingap
52 self._snapshot = set(snapshot)
52 self._snapshot = set(snapshot)
53 self.index = None
53 self.index = None
54
54
55 def start(self, rev):
55 def start(self, rev):
56 if rev == nullrev:
56 if rev == nullrev:
57 return 0
57 return 0
58 if rev == 0:
58 if rev == 0:
59 return 0
59 return 0
60 return self._data[rev - 1]
60 return self._data[rev - 1]
61
61
62 def end(self, rev):
62 def end(self, rev):
63 if rev == nullrev:
63 if rev == nullrev:
64 return 0
64 return 0
65 return self._data[rev]
65 return self._data[rev]
66
66
67 def length(self, rev):
67 def length(self, rev):
68 return self.end(rev) - self.start(rev)
68 return self.end(rev) - self.start(rev)
69
69
70 def __len__(self):
70 def __len__(self):
71 return len(self._data)
71 return len(self._data)
72
72
73 def issnapshot(self, rev):
73 def issnapshot(self, rev):
74 if rev == nullrev:
74 if rev == nullrev:
75 return True
75 return True
76 return rev in self._snapshot
76 return rev in self._snapshot
77
77
78
78
79 def slicechunk(revlog, revs, targetsize=None):
79 def slicechunk(revlog, revs, targetsize=None):
80 """slice revs to reduce the amount of unrelated data to be read from disk.
80 """slice revs to reduce the amount of unrelated data to be read from disk.
81
81
82 ``revs`` is sliced into groups that should be read in one time.
82 ``revs`` is sliced into groups that should be read in one time.
83 Assume that revs are sorted.
83 Assume that revs are sorted.
84
84
85 The initial chunk is sliced until the overall density (payload/chunks-span
85 The initial chunk is sliced until the overall density (payload/chunks-span
86 ratio) is above `revlog._srdensitythreshold`. No gap smaller than
86 ratio) is above `revlog._srdensitythreshold`. No gap smaller than
87 `revlog._srmingapsize` is skipped.
87 `revlog._srmingapsize` is skipped.
88
88
89 If `targetsize` is set, no chunk larger than `targetsize` will be yield.
89 If `targetsize` is set, no chunk larger than `targetsize` will be yield.
90 For consistency with other slicing choice, this limit won't go lower than
90 For consistency with other slicing choice, this limit won't go lower than
91 `revlog._srmingapsize`.
91 `revlog._srmingapsize`.
92
92
93 If individual revisions chunk are larger than this limit, they will still
93 If individual revisions chunk are larger than this limit, they will still
94 be raised individually.
94 be raised individually.
95
95
96 >>> data = [
96 >>> data = [
97 ... 5, #00 (5)
97 ... 5, #00 (5)
98 ... 10, #01 (5)
98 ... 10, #01 (5)
99 ... 12, #02 (2)
99 ... 12, #02 (2)
100 ... 12, #03 (empty)
100 ... 12, #03 (empty)
101 ... 27, #04 (15)
101 ... 27, #04 (15)
102 ... 31, #05 (4)
102 ... 31, #05 (4)
103 ... 31, #06 (empty)
103 ... 31, #06 (empty)
104 ... 42, #07 (11)
104 ... 42, #07 (11)
105 ... 47, #08 (5)
105 ... 47, #08 (5)
106 ... 47, #09 (empty)
106 ... 47, #09 (empty)
107 ... 48, #10 (1)
107 ... 48, #10 (1)
108 ... 51, #11 (3)
108 ... 51, #11 (3)
109 ... 74, #12 (23)
109 ... 74, #12 (23)
110 ... 85, #13 (11)
110 ... 85, #13 (11)
111 ... 86, #14 (1)
111 ... 86, #14 (1)
112 ... 91, #15 (5)
112 ... 91, #15 (5)
113 ... ]
113 ... ]
114 >>> revlog = _testrevlog(data, snapshot=range(16))
114 >>> revlog = _testrevlog(data, snapshot=range(16))
115
115
116 >>> list(slicechunk(revlog, list(range(16))))
116 >>> list(slicechunk(revlog, list(range(16))))
117 [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]]
117 [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]]
118 >>> list(slicechunk(revlog, [0, 15]))
118 >>> list(slicechunk(revlog, [0, 15]))
119 [[0], [15]]
119 [[0], [15]]
120 >>> list(slicechunk(revlog, [0, 11, 15]))
120 >>> list(slicechunk(revlog, [0, 11, 15]))
121 [[0], [11], [15]]
121 [[0], [11], [15]]
122 >>> list(slicechunk(revlog, [0, 11, 13, 15]))
122 >>> list(slicechunk(revlog, [0, 11, 13, 15]))
123 [[0], [11, 13, 15]]
123 [[0], [11, 13, 15]]
124 >>> list(slicechunk(revlog, [1, 2, 3, 5, 8, 10, 11, 14]))
124 >>> list(slicechunk(revlog, [1, 2, 3, 5, 8, 10, 11, 14]))
125 [[1, 2], [5, 8, 10, 11], [14]]
125 [[1, 2], [5, 8, 10, 11], [14]]
126
126
127 Slicing with a maximum chunk size
127 Slicing with a maximum chunk size
128 >>> list(slicechunk(revlog, [0, 11, 13, 15], targetsize=15))
128 >>> list(slicechunk(revlog, [0, 11, 13, 15], targetsize=15))
129 [[0], [11], [13], [15]]
129 [[0], [11], [13], [15]]
130 >>> list(slicechunk(revlog, [0, 11, 13, 15], targetsize=20))
130 >>> list(slicechunk(revlog, [0, 11, 13, 15], targetsize=20))
131 [[0], [11], [13, 15]]
131 [[0], [11], [13, 15]]
132
132
133 Slicing involving nullrev
133 Slicing involving nullrev
134 >>> list(slicechunk(revlog, [-1, 0, 11, 13, 15], targetsize=20))
134 >>> list(slicechunk(revlog, [-1, 0, 11, 13, 15], targetsize=20))
135 [[-1, 0], [11], [13, 15]]
135 [[-1, 0], [11], [13, 15]]
136 >>> list(slicechunk(revlog, [-1, 13, 15], targetsize=5))
136 >>> list(slicechunk(revlog, [-1, 13, 15], targetsize=5))
137 [[-1], [13], [15]]
137 [[-1], [13], [15]]
138 """
138 """
139 if targetsize is not None:
139 if targetsize is not None:
140 targetsize = max(targetsize, revlog._srmingapsize)
140 targetsize = max(targetsize, revlog._srmingapsize)
141 # targetsize should not be specified when evaluating delta candidates:
141 # targetsize should not be specified when evaluating delta candidates:
142 # * targetsize is used to ensure we stay within specification when reading,
142 # * targetsize is used to ensure we stay within specification when reading,
143 densityslicing = getattr(revlog.index, 'slicechunktodensity', None)
143 densityslicing = getattr(revlog.index, 'slicechunktodensity', None)
144 if densityslicing is None:
144 if densityslicing is None:
145 densityslicing = lambda x, y, z: _slicechunktodensity(revlog, x, y, z)
145 densityslicing = lambda x, y, z: _slicechunktodensity(revlog, x, y, z)
146 for chunk in densityslicing(
146 for chunk in densityslicing(
147 revs, revlog._srdensitythreshold, revlog._srmingapsize
147 revs, revlog._srdensitythreshold, revlog._srmingapsize
148 ):
148 ):
149 for subchunk in _slicechunktosize(revlog, chunk, targetsize):
149 for subchunk in _slicechunktosize(revlog, chunk, targetsize):
150 yield subchunk
150 yield subchunk
151
151
152
152
153 def _slicechunktosize(revlog, revs, targetsize=None):
153 def _slicechunktosize(revlog, revs, targetsize=None):
154 """slice revs to match the target size
154 """slice revs to match the target size
155
155
156 This is intended to be used on chunk that density slicing selected by that
156 This is intended to be used on chunk that density slicing selected by that
157 are still too large compared to the read garantee of revlog. This might
157 are still too large compared to the read garantee of revlog. This might
158 happens when "minimal gap size" interrupted the slicing or when chain are
158 happens when "minimal gap size" interrupted the slicing or when chain are
159 built in a way that create large blocks next to each other.
159 built in a way that create large blocks next to each other.
160
160
161 >>> data = [
161 >>> data = [
162 ... 3, #0 (3)
162 ... 3, #0 (3)
163 ... 5, #1 (2)
163 ... 5, #1 (2)
164 ... 6, #2 (1)
164 ... 6, #2 (1)
165 ... 8, #3 (2)
165 ... 8, #3 (2)
166 ... 8, #4 (empty)
166 ... 8, #4 (empty)
167 ... 11, #5 (3)
167 ... 11, #5 (3)
168 ... 12, #6 (1)
168 ... 12, #6 (1)
169 ... 13, #7 (1)
169 ... 13, #7 (1)
170 ... 14, #8 (1)
170 ... 14, #8 (1)
171 ... ]
171 ... ]
172
172
173 == All snapshots cases ==
173 == All snapshots cases ==
174 >>> revlog = _testrevlog(data, snapshot=range(9))
174 >>> revlog = _testrevlog(data, snapshot=range(9))
175
175
176 Cases where chunk is already small enough
176 Cases where chunk is already small enough
177 >>> list(_slicechunktosize(revlog, [0], 3))
177 >>> list(_slicechunktosize(revlog, [0], 3))
178 [[0]]
178 [[0]]
179 >>> list(_slicechunktosize(revlog, [6, 7], 3))
179 >>> list(_slicechunktosize(revlog, [6, 7], 3))
180 [[6, 7]]
180 [[6, 7]]
181 >>> list(_slicechunktosize(revlog, [0], None))
181 >>> list(_slicechunktosize(revlog, [0], None))
182 [[0]]
182 [[0]]
183 >>> list(_slicechunktosize(revlog, [6, 7], None))
183 >>> list(_slicechunktosize(revlog, [6, 7], None))
184 [[6, 7]]
184 [[6, 7]]
185
185
186 cases where we need actual slicing
186 cases where we need actual slicing
187 >>> list(_slicechunktosize(revlog, [0, 1], 3))
187 >>> list(_slicechunktosize(revlog, [0, 1], 3))
188 [[0], [1]]
188 [[0], [1]]
189 >>> list(_slicechunktosize(revlog, [1, 3], 3))
189 >>> list(_slicechunktosize(revlog, [1, 3], 3))
190 [[1], [3]]
190 [[1], [3]]
191 >>> list(_slicechunktosize(revlog, [1, 2, 3], 3))
191 >>> list(_slicechunktosize(revlog, [1, 2, 3], 3))
192 [[1, 2], [3]]
192 [[1, 2], [3]]
193 >>> list(_slicechunktosize(revlog, [3, 5], 3))
193 >>> list(_slicechunktosize(revlog, [3, 5], 3))
194 [[3], [5]]
194 [[3], [5]]
195 >>> list(_slicechunktosize(revlog, [3, 4, 5], 3))
195 >>> list(_slicechunktosize(revlog, [3, 4, 5], 3))
196 [[3], [5]]
196 [[3], [5]]
197 >>> list(_slicechunktosize(revlog, [5, 6, 7, 8], 3))
197 >>> list(_slicechunktosize(revlog, [5, 6, 7, 8], 3))
198 [[5], [6, 7, 8]]
198 [[5], [6, 7, 8]]
199 >>> list(_slicechunktosize(revlog, [0, 1, 2, 3, 4, 5, 6, 7, 8], 3))
199 >>> list(_slicechunktosize(revlog, [0, 1, 2, 3, 4, 5, 6, 7, 8], 3))
200 [[0], [1, 2], [3], [5], [6, 7, 8]]
200 [[0], [1, 2], [3], [5], [6, 7, 8]]
201
201
202 Case with too large individual chunk (must return valid chunk)
202 Case with too large individual chunk (must return valid chunk)
203 >>> list(_slicechunktosize(revlog, [0, 1], 2))
203 >>> list(_slicechunktosize(revlog, [0, 1], 2))
204 [[0], [1]]
204 [[0], [1]]
205 >>> list(_slicechunktosize(revlog, [1, 3], 1))
205 >>> list(_slicechunktosize(revlog, [1, 3], 1))
206 [[1], [3]]
206 [[1], [3]]
207 >>> list(_slicechunktosize(revlog, [3, 4, 5], 2))
207 >>> list(_slicechunktosize(revlog, [3, 4, 5], 2))
208 [[3], [5]]
208 [[3], [5]]
209
209
210 == No Snapshot cases ==
210 == No Snapshot cases ==
211 >>> revlog = _testrevlog(data)
211 >>> revlog = _testrevlog(data)
212
212
213 Cases where chunk is already small enough
213 Cases where chunk is already small enough
214 >>> list(_slicechunktosize(revlog, [0], 3))
214 >>> list(_slicechunktosize(revlog, [0], 3))
215 [[0]]
215 [[0]]
216 >>> list(_slicechunktosize(revlog, [6, 7], 3))
216 >>> list(_slicechunktosize(revlog, [6, 7], 3))
217 [[6, 7]]
217 [[6, 7]]
218 >>> list(_slicechunktosize(revlog, [0], None))
218 >>> list(_slicechunktosize(revlog, [0], None))
219 [[0]]
219 [[0]]
220 >>> list(_slicechunktosize(revlog, [6, 7], None))
220 >>> list(_slicechunktosize(revlog, [6, 7], None))
221 [[6, 7]]
221 [[6, 7]]
222
222
223 cases where we need actual slicing
223 cases where we need actual slicing
224 >>> list(_slicechunktosize(revlog, [0, 1], 3))
224 >>> list(_slicechunktosize(revlog, [0, 1], 3))
225 [[0], [1]]
225 [[0], [1]]
226 >>> list(_slicechunktosize(revlog, [1, 3], 3))
226 >>> list(_slicechunktosize(revlog, [1, 3], 3))
227 [[1], [3]]
227 [[1], [3]]
228 >>> list(_slicechunktosize(revlog, [1, 2, 3], 3))
228 >>> list(_slicechunktosize(revlog, [1, 2, 3], 3))
229 [[1], [2, 3]]
229 [[1], [2, 3]]
230 >>> list(_slicechunktosize(revlog, [3, 5], 3))
230 >>> list(_slicechunktosize(revlog, [3, 5], 3))
231 [[3], [5]]
231 [[3], [5]]
232 >>> list(_slicechunktosize(revlog, [3, 4, 5], 3))
232 >>> list(_slicechunktosize(revlog, [3, 4, 5], 3))
233 [[3], [4, 5]]
233 [[3], [4, 5]]
234 >>> list(_slicechunktosize(revlog, [5, 6, 7, 8], 3))
234 >>> list(_slicechunktosize(revlog, [5, 6, 7, 8], 3))
235 [[5], [6, 7, 8]]
235 [[5], [6, 7, 8]]
236 >>> list(_slicechunktosize(revlog, [0, 1, 2, 3, 4, 5, 6, 7, 8], 3))
236 >>> list(_slicechunktosize(revlog, [0, 1, 2, 3, 4, 5, 6, 7, 8], 3))
237 [[0], [1, 2], [3], [5], [6, 7, 8]]
237 [[0], [1, 2], [3], [5], [6, 7, 8]]
238
238
239 Case with too large individual chunk (must return valid chunk)
239 Case with too large individual chunk (must return valid chunk)
240 >>> list(_slicechunktosize(revlog, [0, 1], 2))
240 >>> list(_slicechunktosize(revlog, [0, 1], 2))
241 [[0], [1]]
241 [[0], [1]]
242 >>> list(_slicechunktosize(revlog, [1, 3], 1))
242 >>> list(_slicechunktosize(revlog, [1, 3], 1))
243 [[1], [3]]
243 [[1], [3]]
244 >>> list(_slicechunktosize(revlog, [3, 4, 5], 2))
244 >>> list(_slicechunktosize(revlog, [3, 4, 5], 2))
245 [[3], [5]]
245 [[3], [5]]
246
246
247 == mixed case ==
247 == mixed case ==
248 >>> revlog = _testrevlog(data, snapshot=[0, 1, 2])
248 >>> revlog = _testrevlog(data, snapshot=[0, 1, 2])
249 >>> list(_slicechunktosize(revlog, list(range(9)), 5))
249 >>> list(_slicechunktosize(revlog, list(range(9)), 5))
250 [[0, 1], [2], [3, 4, 5], [6, 7, 8]]
250 [[0, 1], [2], [3, 4, 5], [6, 7, 8]]
251 """
251 """
252 assert targetsize is None or 0 <= targetsize
252 assert targetsize is None or 0 <= targetsize
253 startdata = revlog.start(revs[0])
253 startdata = revlog.start(revs[0])
254 enddata = revlog.end(revs[-1])
254 enddata = revlog.end(revs[-1])
255 fullspan = enddata - startdata
255 fullspan = enddata - startdata
256 if targetsize is None or fullspan <= targetsize:
256 if targetsize is None or fullspan <= targetsize:
257 yield revs
257 yield revs
258 return
258 return
259
259
260 startrevidx = 0
260 startrevidx = 0
261 endrevidx = 1
261 endrevidx = 1
262 iterrevs = enumerate(revs)
262 iterrevs = enumerate(revs)
263 next(iterrevs) # skip first rev.
263 next(iterrevs) # skip first rev.
264 # first step: get snapshots out of the way
264 # first step: get snapshots out of the way
265 for idx, r in iterrevs:
265 for idx, r in iterrevs:
266 span = revlog.end(r) - startdata
266 span = revlog.end(r) - startdata
267 snapshot = revlog.issnapshot(r)
267 snapshot = revlog.issnapshot(r)
268 if span <= targetsize and snapshot:
268 if span <= targetsize and snapshot:
269 endrevidx = idx + 1
269 endrevidx = idx + 1
270 else:
270 else:
271 chunk = _trimchunk(revlog, revs, startrevidx, endrevidx)
271 chunk = _trimchunk(revlog, revs, startrevidx, endrevidx)
272 if chunk:
272 if chunk:
273 yield chunk
273 yield chunk
274 startrevidx = idx
274 startrevidx = idx
275 startdata = revlog.start(r)
275 startdata = revlog.start(r)
276 endrevidx = idx + 1
276 endrevidx = idx + 1
277 if not snapshot:
277 if not snapshot:
278 break
278 break
279
279
280 # for the others, we use binary slicing to quickly converge toward valid
280 # for the others, we use binary slicing to quickly converge toward valid
281 # chunks (otherwise, we might end up looking for start/end of many
281 # chunks (otherwise, we might end up looking for start/end of many
282 # revisions). This logic is not looking for the perfect slicing point, it
282 # revisions). This logic is not looking for the perfect slicing point, it
283 # focuses on quickly converging toward valid chunks.
283 # focuses on quickly converging toward valid chunks.
284 nbitem = len(revs)
284 nbitem = len(revs)
285 while (enddata - startdata) > targetsize:
285 while (enddata - startdata) > targetsize:
286 endrevidx = nbitem
286 endrevidx = nbitem
287 if nbitem - startrevidx <= 1:
287 if nbitem - startrevidx <= 1:
288 break # protect against individual chunk larger than limit
288 break # protect against individual chunk larger than limit
289 localenddata = revlog.end(revs[endrevidx - 1])
289 localenddata = revlog.end(revs[endrevidx - 1])
290 span = localenddata - startdata
290 span = localenddata - startdata
291 while span > targetsize:
291 while span > targetsize:
292 if endrevidx - startrevidx <= 1:
292 if endrevidx - startrevidx <= 1:
293 break # protect against individual chunk larger than limit
293 break # protect against individual chunk larger than limit
294 endrevidx -= (endrevidx - startrevidx) // 2
294 endrevidx -= (endrevidx - startrevidx) // 2
295 localenddata = revlog.end(revs[endrevidx - 1])
295 localenddata = revlog.end(revs[endrevidx - 1])
296 span = localenddata - startdata
296 span = localenddata - startdata
297 chunk = _trimchunk(revlog, revs, startrevidx, endrevidx)
297 chunk = _trimchunk(revlog, revs, startrevidx, endrevidx)
298 if chunk:
298 if chunk:
299 yield chunk
299 yield chunk
300 startrevidx = endrevidx
300 startrevidx = endrevidx
301 startdata = revlog.start(revs[startrevidx])
301 startdata = revlog.start(revs[startrevidx])
302
302
303 chunk = _trimchunk(revlog, revs, startrevidx)
303 chunk = _trimchunk(revlog, revs, startrevidx)
304 if chunk:
304 if chunk:
305 yield chunk
305 yield chunk
306
306
307
307
308 def _slicechunktodensity(revlog, revs, targetdensity=0.5, mingapsize=0):
308 def _slicechunktodensity(revlog, revs, targetdensity=0.5, mingapsize=0):
309 """slice revs to reduce the amount of unrelated data to be read from disk.
309 """slice revs to reduce the amount of unrelated data to be read from disk.
310
310
311 ``revs`` is sliced into groups that should be read in one time.
311 ``revs`` is sliced into groups that should be read in one time.
312 Assume that revs are sorted.
312 Assume that revs are sorted.
313
313
314 The initial chunk is sliced until the overall density (payload/chunks-span
314 The initial chunk is sliced until the overall density (payload/chunks-span
315 ratio) is above `targetdensity`. No gap smaller than `mingapsize` is
315 ratio) is above `targetdensity`. No gap smaller than `mingapsize` is
316 skipped.
316 skipped.
317
317
318 >>> revlog = _testrevlog([
318 >>> revlog = _testrevlog([
319 ... 5, #00 (5)
319 ... 5, #00 (5)
320 ... 10, #01 (5)
320 ... 10, #01 (5)
321 ... 12, #02 (2)
321 ... 12, #02 (2)
322 ... 12, #03 (empty)
322 ... 12, #03 (empty)
323 ... 27, #04 (15)
323 ... 27, #04 (15)
324 ... 31, #05 (4)
324 ... 31, #05 (4)
325 ... 31, #06 (empty)
325 ... 31, #06 (empty)
326 ... 42, #07 (11)
326 ... 42, #07 (11)
327 ... 47, #08 (5)
327 ... 47, #08 (5)
328 ... 47, #09 (empty)
328 ... 47, #09 (empty)
329 ... 48, #10 (1)
329 ... 48, #10 (1)
330 ... 51, #11 (3)
330 ... 51, #11 (3)
331 ... 74, #12 (23)
331 ... 74, #12 (23)
332 ... 85, #13 (11)
332 ... 85, #13 (11)
333 ... 86, #14 (1)
333 ... 86, #14 (1)
334 ... 91, #15 (5)
334 ... 91, #15 (5)
335 ... ])
335 ... ])
336
336
337 >>> list(_slicechunktodensity(revlog, list(range(16))))
337 >>> list(_slicechunktodensity(revlog, list(range(16))))
338 [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]]
338 [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]]
339 >>> list(_slicechunktodensity(revlog, [0, 15]))
339 >>> list(_slicechunktodensity(revlog, [0, 15]))
340 [[0], [15]]
340 [[0], [15]]
341 >>> list(_slicechunktodensity(revlog, [0, 11, 15]))
341 >>> list(_slicechunktodensity(revlog, [0, 11, 15]))
342 [[0], [11], [15]]
342 [[0], [11], [15]]
343 >>> list(_slicechunktodensity(revlog, [0, 11, 13, 15]))
343 >>> list(_slicechunktodensity(revlog, [0, 11, 13, 15]))
344 [[0], [11, 13, 15]]
344 [[0], [11, 13, 15]]
345 >>> list(_slicechunktodensity(revlog, [1, 2, 3, 5, 8, 10, 11, 14]))
345 >>> list(_slicechunktodensity(revlog, [1, 2, 3, 5, 8, 10, 11, 14]))
346 [[1, 2], [5, 8, 10, 11], [14]]
346 [[1, 2], [5, 8, 10, 11], [14]]
347 >>> list(_slicechunktodensity(revlog, [1, 2, 3, 5, 8, 10, 11, 14],
347 >>> list(_slicechunktodensity(revlog, [1, 2, 3, 5, 8, 10, 11, 14],
348 ... mingapsize=20))
348 ... mingapsize=20))
349 [[1, 2, 3, 5, 8, 10, 11], [14]]
349 [[1, 2, 3, 5, 8, 10, 11], [14]]
350 >>> list(_slicechunktodensity(revlog, [1, 2, 3, 5, 8, 10, 11, 14],
350 >>> list(_slicechunktodensity(revlog, [1, 2, 3, 5, 8, 10, 11, 14],
351 ... targetdensity=0.95))
351 ... targetdensity=0.95))
352 [[1, 2], [5], [8, 10, 11], [14]]
352 [[1, 2], [5], [8, 10, 11], [14]]
353 >>> list(_slicechunktodensity(revlog, [1, 2, 3, 5, 8, 10, 11, 14],
353 >>> list(_slicechunktodensity(revlog, [1, 2, 3, 5, 8, 10, 11, 14],
354 ... targetdensity=0.95, mingapsize=12))
354 ... targetdensity=0.95, mingapsize=12))
355 [[1, 2], [5, 8, 10, 11], [14]]
355 [[1, 2], [5, 8, 10, 11], [14]]
356 """
356 """
357 start = revlog.start
357 start = revlog.start
358 length = revlog.length
358 length = revlog.length
359
359
360 if len(revs) <= 1:
360 if len(revs) <= 1:
361 yield revs
361 yield revs
362 return
362 return
363
363
364 deltachainspan = segmentspan(revlog, revs)
364 deltachainspan = segmentspan(revlog, revs)
365
365
366 if deltachainspan < mingapsize:
366 if deltachainspan < mingapsize:
367 yield revs
367 yield revs
368 return
368 return
369
369
370 readdata = deltachainspan
370 readdata = deltachainspan
371 chainpayload = sum(length(r) for r in revs)
371 chainpayload = sum(length(r) for r in revs)
372
372
373 if deltachainspan:
373 if deltachainspan:
374 density = chainpayload / float(deltachainspan)
374 density = chainpayload / float(deltachainspan)
375 else:
375 else:
376 density = 1.0
376 density = 1.0
377
377
378 if density >= targetdensity:
378 if density >= targetdensity:
379 yield revs
379 yield revs
380 return
380 return
381
381
382 # Store the gaps in a heap to have them sorted by decreasing size
382 # Store the gaps in a heap to have them sorted by decreasing size
383 gaps = []
383 gaps = []
384 prevend = None
384 prevend = None
385 for i, rev in enumerate(revs):
385 for i, rev in enumerate(revs):
386 revstart = start(rev)
386 revstart = start(rev)
387 revlen = length(rev)
387 revlen = length(rev)
388
388
389 # Skip empty revisions to form larger holes
389 # Skip empty revisions to form larger holes
390 if revlen == 0:
390 if revlen == 0:
391 continue
391 continue
392
392
393 if prevend is not None:
393 if prevend is not None:
394 gapsize = revstart - prevend
394 gapsize = revstart - prevend
395 # only consider holes that are large enough
395 # only consider holes that are large enough
396 if gapsize > mingapsize:
396 if gapsize > mingapsize:
397 gaps.append((gapsize, i))
397 gaps.append((gapsize, i))
398
398
399 prevend = revstart + revlen
399 prevend = revstart + revlen
400 # sort the gaps to pop them from largest to small
400 # sort the gaps to pop them from largest to small
401 gaps.sort()
401 gaps.sort()
402
402
403 # Collect the indices of the largest holes until the density is acceptable
403 # Collect the indices of the largest holes until the density is acceptable
404 selected = []
404 selected = []
405 while gaps and density < targetdensity:
405 while gaps and density < targetdensity:
406 gapsize, gapidx = gaps.pop()
406 gapsize, gapidx = gaps.pop()
407
407
408 selected.append(gapidx)
408 selected.append(gapidx)
409
409
410 # the gap sizes are stored as negatives to be sorted decreasingly
410 # the gap sizes are stored as negatives to be sorted decreasingly
411 # by the heap
411 # by the heap
412 readdata -= gapsize
412 readdata -= gapsize
413 if readdata > 0:
413 if readdata > 0:
414 density = chainpayload / float(readdata)
414 density = chainpayload / float(readdata)
415 else:
415 else:
416 density = 1.0
416 density = 1.0
417 selected.sort()
417 selected.sort()
418
418
419 # Cut the revs at collected indices
419 # Cut the revs at collected indices
420 previdx = 0
420 previdx = 0
421 for idx in selected:
421 for idx in selected:
422
422
423 chunk = _trimchunk(revlog, revs, previdx, idx)
423 chunk = _trimchunk(revlog, revs, previdx, idx)
424 if chunk:
424 if chunk:
425 yield chunk
425 yield chunk
426
426
427 previdx = idx
427 previdx = idx
428
428
429 chunk = _trimchunk(revlog, revs, previdx)
429 chunk = _trimchunk(revlog, revs, previdx)
430 if chunk:
430 if chunk:
431 yield chunk
431 yield chunk
432
432
433
433
434 def _trimchunk(revlog, revs, startidx, endidx=None):
434 def _trimchunk(revlog, revs, startidx, endidx=None):
435 """returns revs[startidx:endidx] without empty trailing revs
435 """returns revs[startidx:endidx] without empty trailing revs
436
436
437 Doctest Setup
437 Doctest Setup
438 >>> revlog = _testrevlog([
438 >>> revlog = _testrevlog([
439 ... 5, #0
439 ... 5, #0
440 ... 10, #1
440 ... 10, #1
441 ... 12, #2
441 ... 12, #2
442 ... 12, #3 (empty)
442 ... 12, #3 (empty)
443 ... 17, #4
443 ... 17, #4
444 ... 21, #5
444 ... 21, #5
445 ... 21, #6 (empty)
445 ... 21, #6 (empty)
446 ... ])
446 ... ])
447
447
448 Contiguous cases:
448 Contiguous cases:
449 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 0)
449 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 0)
450 [0, 1, 2, 3, 4, 5]
450 [0, 1, 2, 3, 4, 5]
451 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 0, 5)
451 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 0, 5)
452 [0, 1, 2, 3, 4]
452 [0, 1, 2, 3, 4]
453 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 0, 4)
453 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 0, 4)
454 [0, 1, 2]
454 [0, 1, 2]
455 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 2, 4)
455 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 2, 4)
456 [2]
456 [2]
457 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 3)
457 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 3)
458 [3, 4, 5]
458 [3, 4, 5]
459 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 3, 5)
459 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 3, 5)
460 [3, 4]
460 [3, 4]
461
461
462 Discontiguous cases:
462 Discontiguous cases:
463 >>> _trimchunk(revlog, [1, 3, 5, 6], 0)
463 >>> _trimchunk(revlog, [1, 3, 5, 6], 0)
464 [1, 3, 5]
464 [1, 3, 5]
465 >>> _trimchunk(revlog, [1, 3, 5, 6], 0, 2)
465 >>> _trimchunk(revlog, [1, 3, 5, 6], 0, 2)
466 [1]
466 [1]
467 >>> _trimchunk(revlog, [1, 3, 5, 6], 1, 3)
467 >>> _trimchunk(revlog, [1, 3, 5, 6], 1, 3)
468 [3, 5]
468 [3, 5]
469 >>> _trimchunk(revlog, [1, 3, 5, 6], 1)
469 >>> _trimchunk(revlog, [1, 3, 5, 6], 1)
470 [3, 5]
470 [3, 5]
471 """
471 """
472 length = revlog.length
472 length = revlog.length
473
473
474 if endidx is None:
474 if endidx is None:
475 endidx = len(revs)
475 endidx = len(revs)
476
476
477 # If we have a non-emtpy delta candidate, there are nothing to trim
477 # If we have a non-emtpy delta candidate, there are nothing to trim
478 if revs[endidx - 1] < len(revlog):
478 if revs[endidx - 1] < len(revlog):
479 # Trim empty revs at the end, except the very first revision of a chain
479 # Trim empty revs at the end, except the very first revision of a chain
480 while (
480 while (
481 endidx > 1 and endidx > startidx and length(revs[endidx - 1]) == 0
481 endidx > 1 and endidx > startidx and length(revs[endidx - 1]) == 0
482 ):
482 ):
483 endidx -= 1
483 endidx -= 1
484
484
485 return revs[startidx:endidx]
485 return revs[startidx:endidx]
486
486
487
487
488 def segmentspan(revlog, revs):
488 def segmentspan(revlog, revs):
489 """Get the byte span of a segment of revisions
489 """Get the byte span of a segment of revisions
490
490
491 revs is a sorted array of revision numbers
491 revs is a sorted array of revision numbers
492
492
493 >>> revlog = _testrevlog([
493 >>> revlog = _testrevlog([
494 ... 5, #0
494 ... 5, #0
495 ... 10, #1
495 ... 10, #1
496 ... 12, #2
496 ... 12, #2
497 ... 12, #3 (empty)
497 ... 12, #3 (empty)
498 ... 17, #4
498 ... 17, #4
499 ... ])
499 ... ])
500
500
501 >>> segmentspan(revlog, [0, 1, 2, 3, 4])
501 >>> segmentspan(revlog, [0, 1, 2, 3, 4])
502 17
502 17
503 >>> segmentspan(revlog, [0, 4])
503 >>> segmentspan(revlog, [0, 4])
504 17
504 17
505 >>> segmentspan(revlog, [3, 4])
505 >>> segmentspan(revlog, [3, 4])
506 5
506 5
507 >>> segmentspan(revlog, [1, 2, 3,])
507 >>> segmentspan(revlog, [1, 2, 3,])
508 7
508 7
509 >>> segmentspan(revlog, [1, 3])
509 >>> segmentspan(revlog, [1, 3])
510 7
510 7
511 """
511 """
512 if not revs:
512 if not revs:
513 return 0
513 return 0
514 end = revlog.end(revs[-1])
514 end = revlog.end(revs[-1])
515 return end - revlog.start(revs[0])
515 return end - revlog.start(revs[0])
516
516
517
517
518 def _textfromdelta(fh, revlog, baserev, delta, p1, p2, flags, expectednode):
518 def _textfromdelta(fh, revlog, baserev, delta, p1, p2, flags, expectednode):
519 """build full text from a (base, delta) pair and other metadata"""
519 """build full text from a (base, delta) pair and other metadata"""
520 # special case deltas which replace entire base; no need to decode
520 # special case deltas which replace entire base; no need to decode
521 # base revision. this neatly avoids censored bases, which throw when
521 # base revision. this neatly avoids censored bases, which throw when
522 # they're decoded.
522 # they're decoded.
523 hlen = struct.calcsize(b">lll")
523 hlen = struct.calcsize(b">lll")
524 if delta[:hlen] == mdiff.replacediffheader(
524 if delta[:hlen] == mdiff.replacediffheader(
525 revlog.rawsize(baserev), len(delta) - hlen
525 revlog.rawsize(baserev), len(delta) - hlen
526 ):
526 ):
527 fulltext = delta[hlen:]
527 fulltext = delta[hlen:]
528 else:
528 else:
529 # deltabase is rawtext before changed by flag processors, which is
529 # deltabase is rawtext before changed by flag processors, which is
530 # equivalent to non-raw text
530 # equivalent to non-raw text
531 basetext = revlog.revision(baserev, _df=fh)
531 basetext = revlog.revision(baserev, _df=fh)
532 fulltext = mdiff.patch(basetext, delta)
532 fulltext = mdiff.patch(basetext, delta)
533
533
534 try:
534 try:
535 validatehash = flagutil.processflagsraw(revlog, fulltext, flags)
535 validatehash = flagutil.processflagsraw(revlog, fulltext, flags)
536 if validatehash:
536 if validatehash:
537 revlog.checkhash(fulltext, expectednode, p1=p1, p2=p2)
537 revlog.checkhash(fulltext, expectednode, p1=p1, p2=p2)
538 if flags & REVIDX_ISCENSORED:
538 if flags & REVIDX_ISCENSORED:
539 raise error.StorageError(
539 raise error.StorageError(
540 _(b'node %s is not censored') % expectednode
540 _(b'node %s is not censored') % expectednode
541 )
541 )
542 except error.CensoredNodeError:
542 except error.CensoredNodeError:
543 # must pass the censored index flag to add censored revisions
543 # must pass the censored index flag to add censored revisions
544 if not flags & REVIDX_ISCENSORED:
544 if not flags & REVIDX_ISCENSORED:
545 raise
545 raise
546 return fulltext
546 return fulltext
547
547
548
548
549 @attr.s(slots=True, frozen=True)
549 @attr.s(slots=True, frozen=True)
550 class _deltainfo:
550 class _deltainfo:
551 distance = attr.ib()
551 distance = attr.ib()
552 deltalen = attr.ib()
552 deltalen = attr.ib()
553 data = attr.ib()
553 data = attr.ib()
554 base = attr.ib()
554 base = attr.ib()
555 chainbase = attr.ib()
555 chainbase = attr.ib()
556 chainlen = attr.ib()
556 chainlen = attr.ib()
557 compresseddeltalen = attr.ib()
557 compresseddeltalen = attr.ib()
558 snapshotdepth = attr.ib()
558 snapshotdepth = attr.ib()
559
559
560
560
561 def drop_u_compression(delta):
561 def drop_u_compression(delta):
562 """turn into a "u" (no-compression) into no-compression without header
562 """turn into a "u" (no-compression) into no-compression without header
563
563
564 This is useful for revlog format that has better compression method.
564 This is useful for revlog format that has better compression method.
565 """
565 """
566 assert delta.data[0] == b'u', delta.data[0]
566 assert delta.data[0] == b'u', delta.data[0]
567 return _deltainfo(
567 return _deltainfo(
568 delta.distance,
568 delta.distance,
569 delta.deltalen - 1,
569 delta.deltalen - 1,
570 (b'', delta.data[1]),
570 (b'', delta.data[1]),
571 delta.base,
571 delta.base,
572 delta.chainbase,
572 delta.chainbase,
573 delta.chainlen,
573 delta.chainlen,
574 delta.compresseddeltalen,
574 delta.compresseddeltalen,
575 delta.snapshotdepth,
575 delta.snapshotdepth,
576 )
576 )
577
577
578
578
579 def isgooddeltainfo(revlog, deltainfo, revinfo):
579 def isgooddeltainfo(revlog, deltainfo, revinfo):
580 """Returns True if the given delta is good. Good means that it is within
580 """Returns True if the given delta is good. Good means that it is within
581 the disk span, disk size, and chain length bounds that we know to be
581 the disk span, disk size, and chain length bounds that we know to be
582 performant."""
582 performant."""
583 if deltainfo is None:
583 if deltainfo is None:
584 return False
584 return False
585
585
586 # - 'deltainfo.distance' is the distance from the base revision --
586 # - 'deltainfo.distance' is the distance from the base revision --
587 # bounding it limits the amount of I/O we need to do.
587 # bounding it limits the amount of I/O we need to do.
588 # - 'deltainfo.compresseddeltalen' is the sum of the total size of
588 # - 'deltainfo.compresseddeltalen' is the sum of the total size of
589 # deltas we need to apply -- bounding it limits the amount of CPU
589 # deltas we need to apply -- bounding it limits the amount of CPU
590 # we consume.
590 # we consume.
591
591
592 textlen = revinfo.textlen
592 textlen = revinfo.textlen
593 defaultmax = textlen * 4
593 defaultmax = textlen * 4
594 maxdist = revlog._maxdeltachainspan
594 maxdist = revlog._maxdeltachainspan
595 if not maxdist:
595 if not maxdist:
596 maxdist = deltainfo.distance # ensure the conditional pass
596 maxdist = deltainfo.distance # ensure the conditional pass
597 maxdist = max(maxdist, defaultmax)
597 maxdist = max(maxdist, defaultmax)
598
598
599 # Bad delta from read span:
599 # Bad delta from read span:
600 #
600 #
601 # If the span of data read is larger than the maximum allowed.
601 # If the span of data read is larger than the maximum allowed.
602 #
602 #
603 # In the sparse-revlog case, we rely on the associated "sparse reading"
603 # In the sparse-revlog case, we rely on the associated "sparse reading"
604 # to avoid issue related to the span of data. In theory, it would be
604 # to avoid issue related to the span of data. In theory, it would be
605 # possible to build pathological revlog where delta pattern would lead
605 # possible to build pathological revlog where delta pattern would lead
606 # to too many reads. However, they do not happen in practice at all. So
606 # to too many reads. However, they do not happen in practice at all. So
607 # we skip the span check entirely.
607 # we skip the span check entirely.
608 if not revlog._sparserevlog and maxdist < deltainfo.distance:
608 if not revlog._sparserevlog and maxdist < deltainfo.distance:
609 return False
609 return False
610
610
611 # Bad delta from new delta size:
611 # Bad delta from new delta size:
612 #
612 #
613 # If the delta size is larger than the target text, storing the
613 # If the delta size is larger than the target text, storing the
614 # delta will be inefficient.
614 # delta will be inefficient.
615 if textlen < deltainfo.deltalen:
615 if textlen < deltainfo.deltalen:
616 return False
616 return False
617
617
618 # Bad delta from cumulated payload size:
618 # Bad delta from cumulated payload size:
619 #
619 #
620 # If the sum of delta get larger than K * target text length.
620 # If the sum of delta get larger than K * target text length.
621 if textlen * LIMIT_DELTA2TEXT < deltainfo.compresseddeltalen:
621 if textlen * LIMIT_DELTA2TEXT < deltainfo.compresseddeltalen:
622 return False
622 return False
623
623
624 # Bad delta from chain length:
624 # Bad delta from chain length:
625 #
625 #
626 # If the number of delta in the chain gets too high.
626 # If the number of delta in the chain gets too high.
627 if revlog._maxchainlen and revlog._maxchainlen < deltainfo.chainlen:
627 if revlog._maxchainlen and revlog._maxchainlen < deltainfo.chainlen:
628 return False
628 return False
629
629
630 # bad delta from intermediate snapshot size limit
630 # bad delta from intermediate snapshot size limit
631 #
631 #
632 # If an intermediate snapshot size is higher than the limit. The
632 # If an intermediate snapshot size is higher than the limit. The
633 # limit exist to prevent endless chain of intermediate delta to be
633 # limit exist to prevent endless chain of intermediate delta to be
634 # created.
634 # created.
635 if (
635 if (
636 deltainfo.snapshotdepth is not None
636 deltainfo.snapshotdepth is not None
637 and (textlen >> deltainfo.snapshotdepth) < deltainfo.deltalen
637 and (textlen >> deltainfo.snapshotdepth) < deltainfo.deltalen
638 ):
638 ):
639 return False
639 return False
640
640
641 # bad delta if new intermediate snapshot is larger than the previous
641 # bad delta if new intermediate snapshot is larger than the previous
642 # snapshot
642 # snapshot
643 if (
643 if (
644 deltainfo.snapshotdepth
644 deltainfo.snapshotdepth
645 and revlog.length(deltainfo.base) < deltainfo.deltalen
645 and revlog.length(deltainfo.base) < deltainfo.deltalen
646 ):
646 ):
647 return False
647 return False
648
648
649 return True
649 return True
650
650
651
651
652 # If a revision's full text is that much bigger than a base candidate full
652 # If a revision's full text is that much bigger than a base candidate full
653 # text's, it is very unlikely that it will produce a valid delta. We no longer
653 # text's, it is very unlikely that it will produce a valid delta. We no longer
654 # consider these candidates.
654 # consider these candidates.
655 LIMIT_BASE2TEXT = 500
655 LIMIT_BASE2TEXT = 500
656
656
657
657
658 def _candidategroups(revlog, textlen, p1, p2, cachedelta):
658 def _candidategroups(revlog, textlen, p1, p2, cachedelta):
659 """Provides group of revision to be tested as delta base
659 """Provides group of revision to be tested as delta base
660
660
661 This top level function focus on emitting groups with unique and worthwhile
661 This top level function focus on emitting groups with unique and worthwhile
662 content. See _raw_candidate_groups for details about the group order.
662 content. See _raw_candidate_groups for details about the group order.
663 """
663 """
664 # should we try to build a delta?
664 # should we try to build a delta?
665 if not (len(revlog) and revlog._storedeltachains):
665 if not (len(revlog) and revlog._storedeltachains):
666 yield None
666 yield None
667 return
667 return
668
668
669 deltalength = revlog.length
669 deltalength = revlog.length
670 deltaparent = revlog.deltaparent
670 deltaparent = revlog.deltaparent
671 sparse = revlog._sparserevlog
671 sparse = revlog._sparserevlog
672 good = None
672 good = None
673
673
674 deltas_limit = textlen * LIMIT_DELTA2TEXT
674 deltas_limit = textlen * LIMIT_DELTA2TEXT
675
675
676 tested = {nullrev}
676 tested = {nullrev}
677 candidates = _refinedgroups(revlog, p1, p2, cachedelta)
677 candidates = _refinedgroups(revlog, p1, p2, cachedelta)
678 while True:
678 while True:
679 temptative = candidates.send(good)
679 temptative = candidates.send(good)
680 if temptative is None:
680 if temptative is None:
681 break
681 break
682 group = []
682 group = []
683 for rev in temptative:
683 for rev in temptative:
684 # skip over empty delta (no need to include them in a chain)
684 # skip over empty delta (no need to include them in a chain)
685 while revlog._generaldelta and not (
685 while revlog._generaldelta and not (
686 rev == nullrev or rev in tested or deltalength(rev)
686 rev == nullrev or rev in tested or deltalength(rev)
687 ):
687 ):
688 tested.add(rev)
688 tested.add(rev)
689 rev = deltaparent(rev)
689 rev = deltaparent(rev)
690 # no need to try a delta against nullrev, this will be done as a
690 # no need to try a delta against nullrev, this will be done as a
691 # last resort.
691 # last resort.
692 if rev == nullrev:
692 if rev == nullrev:
693 continue
693 continue
694 # filter out revision we tested already
694 # filter out revision we tested already
695 if rev in tested:
695 if rev in tested:
696 continue
696 continue
697 tested.add(rev)
697 tested.add(rev)
698 # filter out delta base that will never produce good delta
698 # filter out delta base that will never produce good delta
699 if deltas_limit < revlog.length(rev):
699 if deltas_limit < revlog.length(rev):
700 continue
700 continue
701 if sparse and revlog.rawsize(rev) < (textlen // LIMIT_BASE2TEXT):
701 if sparse and revlog.rawsize(rev) < (textlen // LIMIT_BASE2TEXT):
702 continue
702 continue
703 # no delta for rawtext-changing revs (see "candelta" for why)
703 # no delta for rawtext-changing revs (see "candelta" for why)
704 if revlog.flags(rev) & REVIDX_RAWTEXT_CHANGING_FLAGS:
704 if revlog.flags(rev) & REVIDX_RAWTEXT_CHANGING_FLAGS:
705 continue
705 continue
706 # If we reach here, we are about to build and test a delta.
706 # If we reach here, we are about to build and test a delta.
707 # The delta building process will compute the chaininfo in all
707 # The delta building process will compute the chaininfo in all
708 # case, since that computation is cached, it is fine to access it
708 # case, since that computation is cached, it is fine to access it
709 # here too.
709 # here too.
710 chainlen, chainsize = revlog._chaininfo(rev)
710 chainlen, chainsize = revlog._chaininfo(rev)
711 # if chain will be too long, skip base
711 # if chain will be too long, skip base
712 if revlog._maxchainlen and chainlen >= revlog._maxchainlen:
712 if revlog._maxchainlen and chainlen >= revlog._maxchainlen:
713 continue
713 continue
714 # if chain already have too much data, skip base
714 # if chain already have too much data, skip base
715 if deltas_limit < chainsize:
715 if deltas_limit < chainsize:
716 continue
716 continue
717 if sparse and revlog.upperboundcomp is not None:
717 if sparse and revlog.upperboundcomp is not None:
718 maxcomp = revlog.upperboundcomp
718 maxcomp = revlog.upperboundcomp
719 basenotsnap = (p1, p2, nullrev)
719 basenotsnap = (p1, p2, nullrev)
720 if rev not in basenotsnap and revlog.issnapshot(rev):
720 if rev not in basenotsnap and revlog.issnapshot(rev):
721 snapshotdepth = revlog.snapshotdepth(rev)
721 snapshotdepth = revlog.snapshotdepth(rev)
722 # If text is significantly larger than the base, we can
722 # If text is significantly larger than the base, we can
723 # expect the resulting delta to be proportional to the size
723 # expect the resulting delta to be proportional to the size
724 # difference
724 # difference
725 revsize = revlog.rawsize(rev)
725 revsize = revlog.rawsize(rev)
726 rawsizedistance = max(textlen - revsize, 0)
726 rawsizedistance = max(textlen - revsize, 0)
727 # use an estimate of the compression upper bound.
727 # use an estimate of the compression upper bound.
728 lowestrealisticdeltalen = rawsizedistance // maxcomp
728 lowestrealisticdeltalen = rawsizedistance // maxcomp
729
729
730 # check the absolute constraint on the delta size
730 # check the absolute constraint on the delta size
731 snapshotlimit = textlen >> snapshotdepth
731 snapshotlimit = textlen >> snapshotdepth
732 if snapshotlimit < lowestrealisticdeltalen:
732 if snapshotlimit < lowestrealisticdeltalen:
733 # delta lower bound is larger than accepted upper bound
733 # delta lower bound is larger than accepted upper bound
734 continue
734 continue
735
735
736 # check the relative constraint on the delta size
736 # check the relative constraint on the delta size
737 revlength = revlog.length(rev)
737 revlength = revlog.length(rev)
738 if revlength < lowestrealisticdeltalen:
738 if revlength < lowestrealisticdeltalen:
739 # delta probable lower bound is larger than target base
739 # delta probable lower bound is larger than target base
740 continue
740 continue
741
741
742 group.append(rev)
742 group.append(rev)
743 if group:
743 if group:
744 # XXX: in the sparse revlog case, group can become large,
744 # XXX: in the sparse revlog case, group can become large,
745 # impacting performances. Some bounding or slicing mecanism
745 # impacting performances. Some bounding or slicing mecanism
746 # would help to reduce this impact.
746 # would help to reduce this impact.
747 good = yield tuple(group)
747 good = yield tuple(group)
748 yield None
748 yield None
749
749
750
750
751 def _findsnapshots(revlog, cache, start_rev):
751 def _findsnapshots(revlog, cache, start_rev):
752 """find snapshot from start_rev to tip"""
752 """find snapshot from start_rev to tip"""
753 if util.safehasattr(revlog.index, b'findsnapshots'):
753 if util.safehasattr(revlog.index, b'findsnapshots'):
754 revlog.index.findsnapshots(cache, start_rev)
754 revlog.index.findsnapshots(cache, start_rev)
755 else:
755 else:
756 deltaparent = revlog.deltaparent
756 deltaparent = revlog.deltaparent
757 issnapshot = revlog.issnapshot
757 issnapshot = revlog.issnapshot
758 for rev in revlog.revs(start_rev):
758 for rev in revlog.revs(start_rev):
759 if issnapshot(rev):
759 if issnapshot(rev):
760 cache[deltaparent(rev)].append(rev)
760 cache[deltaparent(rev)].append(rev)
761
761
762
762
763 def _refinedgroups(revlog, p1, p2, cachedelta):
763 def _refinedgroups(revlog, p1, p2, cachedelta):
764 good = None
764 good = None
765 # First we try to reuse a the delta contained in the bundle.
765 # First we try to reuse a the delta contained in the bundle.
766 # (or from the source revlog)
766 # (or from the source revlog)
767 #
767 #
768 # This logic only applies to general delta repositories and can be disabled
768 # This logic only applies to general delta repositories and can be disabled
769 # through configuration. Disabling reuse source delta is useful when
769 # through configuration. Disabling reuse source delta is useful when
770 # we want to make sure we recomputed "optimal" deltas.
770 # we want to make sure we recomputed "optimal" deltas.
771 if cachedelta and revlog._generaldelta and revlog._lazydeltabase:
771 if cachedelta and revlog._generaldelta and revlog._lazydeltabase:
772 # Assume what we received from the server is a good choice
772 # Assume what we received from the server is a good choice
773 # build delta will reuse the cache
773 # build delta will reuse the cache
774 good = yield (cachedelta[0],)
774 good = yield (cachedelta[0],)
775 if good is not None:
775 if good is not None:
776 yield None
776 yield None
777 return
777 return
778 snapshots = collections.defaultdict(list)
778 snapshots = collections.defaultdict(list)
779 for candidates in _rawgroups(revlog, p1, p2, cachedelta, snapshots):
779 for candidates in _rawgroups(revlog, p1, p2, cachedelta, snapshots):
780 good = yield candidates
780 good = yield candidates
781 if good is not None:
781 if good is not None:
782 break
782 break
783
783
784 # If sparse revlog is enabled, we can try to refine the available deltas
784 # If sparse revlog is enabled, we can try to refine the available deltas
785 if not revlog._sparserevlog:
785 if not revlog._sparserevlog:
786 yield None
786 yield None
787 return
787 return
788
788
789 # if we have a refinable value, try to refine it
789 # if we have a refinable value, try to refine it
790 if good is not None and good not in (p1, p2) and revlog.issnapshot(good):
790 if good is not None and good not in (p1, p2) and revlog.issnapshot(good):
791 # refine snapshot down
791 # refine snapshot down
792 previous = None
792 previous = None
793 while previous != good:
793 while previous != good:
794 previous = good
794 previous = good
795 base = revlog.deltaparent(good)
795 base = revlog.deltaparent(good)
796 if base == nullrev:
796 if base == nullrev:
797 break
797 break
798 good = yield (base,)
798 good = yield (base,)
799 # refine snapshot up
799 # refine snapshot up
800 if not snapshots:
800 if not snapshots:
801 _findsnapshots(revlog, snapshots, good + 1)
801 _findsnapshots(revlog, snapshots, good + 1)
802 previous = None
802 previous = None
803 while good != previous:
803 while good != previous:
804 previous = good
804 previous = good
805 children = tuple(sorted(c for c in snapshots[good]))
805 children = tuple(sorted(c for c in snapshots[good]))
806 good = yield children
806 good = yield children
807
807
808 # we have found nothing
808 # we have found nothing
809 yield None
809 yield None
810
810
811
811
812 def _rawgroups(revlog, p1, p2, cachedelta, snapshots=None):
812 def _rawgroups(revlog, p1, p2, cachedelta, snapshots=None):
813 """Provides group of revision to be tested as delta base
813 """Provides group of revision to be tested as delta base
814
814
815 This lower level function focus on emitting delta theorically interresting
815 This lower level function focus on emitting delta theorically interresting
816 without looking it any practical details.
816 without looking it any practical details.
817
817
818 The group order aims at providing fast or small candidates first.
818 The group order aims at providing fast or small candidates first.
819 """
819 """
820 gdelta = revlog._generaldelta
820 gdelta = revlog._generaldelta
821 # gate sparse behind general-delta because of issue6056
821 # gate sparse behind general-delta because of issue6056
822 sparse = gdelta and revlog._sparserevlog
822 sparse = gdelta and revlog._sparserevlog
823 curr = len(revlog)
823 curr = len(revlog)
824 prev = curr - 1
824 prev = curr - 1
825 deltachain = lambda rev: revlog._deltachain(rev)[0]
825 deltachain = lambda rev: revlog._deltachain(rev)[0]
826
826
827 if gdelta:
827 if gdelta:
828 # exclude already lazy tested base if any
828 # exclude already lazy tested base if any
829 parents = [p for p in (p1, p2) if p != nullrev]
829 parents = [p for p in (p1, p2) if p != nullrev]
830
830
831 if not revlog._deltabothparents and len(parents) == 2:
831 if not revlog._deltabothparents and len(parents) == 2:
832 parents.sort()
832 parents.sort()
833 # To minimize the chance of having to build a fulltext,
833 # To minimize the chance of having to build a fulltext,
834 # pick first whichever parent is closest to us (max rev)
834 # pick first whichever parent is closest to us (max rev)
835 yield (parents[1],)
835 yield (parents[1],)
836 # then the other one (min rev) if the first did not fit
836 # then the other one (min rev) if the first did not fit
837 yield (parents[0],)
837 yield (parents[0],)
838 elif len(parents) > 0:
838 elif len(parents) > 0:
839 # Test all parents (1 or 2), and keep the best candidate
839 # Test all parents (1 or 2), and keep the best candidate
840 yield parents
840 yield parents
841
841
842 if sparse and parents:
842 if sparse and parents:
843 if snapshots is None:
843 if snapshots is None:
844 # map: base-rev: snapshot-rev
844 # map: base-rev: snapshot-rev
845 snapshots = collections.defaultdict(list)
845 snapshots = collections.defaultdict(list)
846 # See if we can use an existing snapshot in the parent chains to use as
846 # See if we can use an existing snapshot in the parent chains to use as
847 # a base for a new intermediate-snapshot
847 # a base for a new intermediate-snapshot
848 #
848 #
849 # search for snapshot in parents delta chain
849 # search for snapshot in parents delta chain
850 # map: snapshot-level: snapshot-rev
850 # map: snapshot-level: snapshot-rev
851 parents_snaps = collections.defaultdict(set)
851 parents_snaps = collections.defaultdict(set)
852 candidate_chains = [deltachain(p) for p in parents]
852 candidate_chains = [deltachain(p) for p in parents]
853 for chain in candidate_chains:
853 for chain in candidate_chains:
854 for idx, s in enumerate(chain):
854 for idx, s in enumerate(chain):
855 if not revlog.issnapshot(s):
855 if not revlog.issnapshot(s):
856 break
856 break
857 parents_snaps[idx].add(s)
857 parents_snaps[idx].add(s)
858 snapfloor = min(parents_snaps[0]) + 1
858 snapfloor = min(parents_snaps[0]) + 1
859 _findsnapshots(revlog, snapshots, snapfloor)
859 _findsnapshots(revlog, snapshots, snapfloor)
860 # search for the highest "unrelated" revision
860 # search for the highest "unrelated" revision
861 #
861 #
862 # Adding snapshots used by "unrelated" revision increase the odd we
862 # Adding snapshots used by "unrelated" revision increase the odd we
863 # reuse an independant, yet better snapshot chain.
863 # reuse an independant, yet better snapshot chain.
864 #
864 #
865 # XXX instead of building a set of revisions, we could lazily enumerate
865 # XXX instead of building a set of revisions, we could lazily enumerate
866 # over the chains. That would be more efficient, however we stick to
866 # over the chains. That would be more efficient, however we stick to
867 # simple code for now.
867 # simple code for now.
868 all_revs = set()
868 all_revs = set()
869 for chain in candidate_chains:
869 for chain in candidate_chains:
870 all_revs.update(chain)
870 all_revs.update(chain)
871 other = None
871 other = None
872 for r in revlog.revs(prev, snapfloor):
872 for r in revlog.revs(prev, snapfloor):
873 if r not in all_revs:
873 if r not in all_revs:
874 other = r
874 other = r
875 break
875 break
876 if other is not None:
876 if other is not None:
877 # To avoid unfair competition, we won't use unrelated intermediate
877 # To avoid unfair competition, we won't use unrelated intermediate
878 # snapshot that are deeper than the ones from the parent delta
878 # snapshot that are deeper than the ones from the parent delta
879 # chain.
879 # chain.
880 max_depth = max(parents_snaps.keys())
880 max_depth = max(parents_snaps.keys())
881 chain = deltachain(other)
881 chain = deltachain(other)
882 for idx, s in enumerate(chain):
882 for idx, s in enumerate(chain):
883 if s < snapfloor:
883 if s < snapfloor:
884 continue
884 continue
885 if max_depth < idx:
885 if max_depth < idx:
886 break
886 break
887 if not revlog.issnapshot(s):
887 if not revlog.issnapshot(s):
888 break
888 break
889 parents_snaps[idx].add(s)
889 parents_snaps[idx].add(s)
890 # Test them as possible intermediate snapshot base
890 # Test them as possible intermediate snapshot base
891 # We test them from highest to lowest level. High level one are more
891 # We test them from highest to lowest level. High level one are more
892 # likely to result in small delta
892 # likely to result in small delta
893 floor = None
893 floor = None
894 for idx, snaps in sorted(parents_snaps.items(), reverse=True):
894 for idx, snaps in sorted(parents_snaps.items(), reverse=True):
895 siblings = set()
895 siblings = set()
896 for s in snaps:
896 for s in snaps:
897 siblings.update(snapshots[s])
897 siblings.update(snapshots[s])
898 # Before considering making a new intermediate snapshot, we check
898 # Before considering making a new intermediate snapshot, we check
899 # if an existing snapshot, children of base we consider, would be
899 # if an existing snapshot, children of base we consider, would be
900 # suitable.
900 # suitable.
901 #
901 #
902 # It give a change to reuse a delta chain "unrelated" to the
902 # It give a change to reuse a delta chain "unrelated" to the
903 # current revision instead of starting our own. Without such
903 # current revision instead of starting our own. Without such
904 # re-use, topological branches would keep reopening new chains.
904 # re-use, topological branches would keep reopening new chains.
905 # Creating more and more snapshot as the repository grow.
905 # Creating more and more snapshot as the repository grow.
906
906
907 if floor is not None:
907 if floor is not None:
908 # We only do this for siblings created after the one in our
908 # We only do this for siblings created after the one in our
909 # parent's delta chain. Those created before has less chances
909 # parent's delta chain. Those created before has less chances
910 # to be valid base since our ancestors had to create a new
910 # to be valid base since our ancestors had to create a new
911 # snapshot.
911 # snapshot.
912 siblings = [r for r in siblings if floor < r]
912 siblings = [r for r in siblings if floor < r]
913 yield tuple(sorted(siblings))
913 yield tuple(sorted(siblings))
914 # then test the base from our parent's delta chain.
914 # then test the base from our parent's delta chain.
915 yield tuple(sorted(snaps))
915 yield tuple(sorted(snaps))
916 floor = min(snaps)
916 floor = min(snaps)
917 # No suitable base found in the parent chain, search if any full
917 # No suitable base found in the parent chain, search if any full
918 # snapshots emitted since parent's base would be a suitable base for an
918 # snapshots emitted since parent's base would be a suitable base for an
919 # intermediate snapshot.
919 # intermediate snapshot.
920 #
920 #
921 # It give a chance to reuse a delta chain unrelated to the current
921 # It give a chance to reuse a delta chain unrelated to the current
922 # revisions instead of starting our own. Without such re-use,
922 # revisions instead of starting our own. Without such re-use,
923 # topological branches would keep reopening new full chains. Creating
923 # topological branches would keep reopening new full chains. Creating
924 # more and more snapshot as the repository grow.
924 # more and more snapshot as the repository grow.
925 yield tuple(snapshots[nullrev])
925 yield tuple(snapshots[nullrev])
926
926
927 if not sparse:
927 if not sparse:
928 # other approach failed try against prev to hopefully save us a
928 # other approach failed try against prev to hopefully save us a
929 # fulltext.
929 # fulltext.
930 yield (prev,)
930 yield (prev,)
931
931
932
932
933 class deltacomputer:
933 class deltacomputer:
934 def __init__(self, revlog, write_debug=None, debug_search=False):
934 def __init__(self, revlog, write_debug=None, debug_search=False):
935 self.revlog = revlog
935 self.revlog = revlog
936 self._write_debug = write_debug
936 self._write_debug = write_debug
937 self._debug_search = debug_search
937 self._debug_search = debug_search
938
938
939 def buildtext(self, revinfo, fh):
939 def buildtext(self, revinfo, fh):
940 """Builds a fulltext version of a revision
940 """Builds a fulltext version of a revision
941
941
942 revinfo: revisioninfo instance that contains all needed info
942 revinfo: revisioninfo instance that contains all needed info
943 fh: file handle to either the .i or the .d revlog file,
943 fh: file handle to either the .i or the .d revlog file,
944 depending on whether it is inlined or not
944 depending on whether it is inlined or not
945 """
945 """
946 btext = revinfo.btext
946 btext = revinfo.btext
947 if btext[0] is not None:
947 if btext[0] is not None:
948 return btext[0]
948 return btext[0]
949
949
950 revlog = self.revlog
950 revlog = self.revlog
951 cachedelta = revinfo.cachedelta
951 cachedelta = revinfo.cachedelta
952 baserev = cachedelta[0]
952 baserev = cachedelta[0]
953 delta = cachedelta[1]
953 delta = cachedelta[1]
954
954
955 fulltext = btext[0] = _textfromdelta(
955 fulltext = btext[0] = _textfromdelta(
956 fh,
956 fh,
957 revlog,
957 revlog,
958 baserev,
958 baserev,
959 delta,
959 delta,
960 revinfo.p1,
960 revinfo.p1,
961 revinfo.p2,
961 revinfo.p2,
962 revinfo.flags,
962 revinfo.flags,
963 revinfo.node,
963 revinfo.node,
964 )
964 )
965 return fulltext
965 return fulltext
966
966
967 def _builddeltadiff(self, base, revinfo, fh):
967 def _builddeltadiff(self, base, revinfo, fh):
968 revlog = self.revlog
968 revlog = self.revlog
969 t = self.buildtext(revinfo, fh)
969 t = self.buildtext(revinfo, fh)
970 if revlog.iscensored(base):
970 if revlog.iscensored(base):
971 # deltas based on a censored revision must replace the
971 # deltas based on a censored revision must replace the
972 # full content in one patch, so delta works everywhere
972 # full content in one patch, so delta works everywhere
973 header = mdiff.replacediffheader(revlog.rawsize(base), len(t))
973 header = mdiff.replacediffheader(revlog.rawsize(base), len(t))
974 delta = header + t
974 delta = header + t
975 else:
975 else:
976 ptext = revlog.rawdata(base, _df=fh)
976 ptext = revlog.rawdata(base, _df=fh)
977 delta = mdiff.textdiff(ptext, t)
977 delta = mdiff.textdiff(ptext, t)
978
978
979 return delta
979 return delta
980
980
981 def _builddeltainfo(self, revinfo, base, fh):
981 def _builddeltainfo(self, revinfo, base, fh):
982 # can we use the cached delta?
982 # can we use the cached delta?
983 revlog = self.revlog
983 revlog = self.revlog
984 debug_search = self._write_debug is not None and self._debug_search
984 debug_search = self._write_debug is not None and self._debug_search
985 chainbase = revlog.chainbase(base)
985 chainbase = revlog.chainbase(base)
986 if revlog._generaldelta:
986 if revlog._generaldelta:
987 deltabase = base
987 deltabase = base
988 else:
988 else:
989 deltabase = chainbase
989 deltabase = chainbase
990 snapshotdepth = None
990 snapshotdepth = None
991 if revlog._sparserevlog and deltabase == nullrev:
991 if revlog._sparserevlog and deltabase == nullrev:
992 snapshotdepth = 0
992 snapshotdepth = 0
993 elif revlog._sparserevlog and revlog.issnapshot(deltabase):
993 elif revlog._sparserevlog and revlog.issnapshot(deltabase):
994 # A delta chain should always be one full snapshot,
994 # A delta chain should always be one full snapshot,
995 # zero or more semi-snapshots, and zero or more deltas
995 # zero or more semi-snapshots, and zero or more deltas
996 p1, p2 = revlog.rev(revinfo.p1), revlog.rev(revinfo.p2)
996 p1, p2 = revlog.rev(revinfo.p1), revlog.rev(revinfo.p2)
997 if deltabase not in (p1, p2) and revlog.issnapshot(deltabase):
997 if deltabase not in (p1, p2) and revlog.issnapshot(deltabase):
998 snapshotdepth = len(revlog._deltachain(deltabase)[0])
998 snapshotdepth = len(revlog._deltachain(deltabase)[0])
999 delta = None
999 delta = None
1000 if revinfo.cachedelta:
1000 if revinfo.cachedelta:
1001 cachebase, cachediff = revinfo.cachedelta
1001 cachebase, cachediff = revinfo.cachedelta
1002 # check if the diff still apply
1002 # check if the diff still apply
1003 currentbase = cachebase
1003 currentbase = cachebase
1004 while (
1004 while (
1005 currentbase != nullrev
1005 currentbase != nullrev
1006 and currentbase != base
1006 and currentbase != base
1007 and self.revlog.length(currentbase) == 0
1007 and self.revlog.length(currentbase) == 0
1008 ):
1008 ):
1009 currentbase = self.revlog.deltaparent(currentbase)
1009 currentbase = self.revlog.deltaparent(currentbase)
1010 if self.revlog._lazydelta and currentbase == base:
1010 if self.revlog._lazydelta and currentbase == base:
1011 delta = revinfo.cachedelta[1]
1011 delta = revinfo.cachedelta[1]
1012 if delta is None:
1012 if delta is None:
1013 delta = self._builddeltadiff(base, revinfo, fh)
1013 delta = self._builddeltadiff(base, revinfo, fh)
1014 if debug_search:
1014 if debug_search:
1015 msg = b"DBG-DELTAS-SEARCH: uncompressed-delta-size=%d\n"
1015 msg = b"DBG-DELTAS-SEARCH: uncompressed-delta-size=%d\n"
1016 msg %= len(delta)
1016 msg %= len(delta)
1017 self._write_debug(msg)
1017 self._write_debug(msg)
1018 # snapshotdept need to be neither None nor 0 level snapshot
1018 # snapshotdept need to be neither None nor 0 level snapshot
1019 if revlog.upperboundcomp is not None and snapshotdepth:
1019 if revlog.upperboundcomp is not None and snapshotdepth:
1020 lowestrealisticdeltalen = len(delta) // revlog.upperboundcomp
1020 lowestrealisticdeltalen = len(delta) // revlog.upperboundcomp
1021 snapshotlimit = revinfo.textlen >> snapshotdepth
1021 snapshotlimit = revinfo.textlen >> snapshotdepth
1022 if debug_search:
1022 if debug_search:
1023 msg = b"DBG-DELTAS-SEARCH: projected-lower-size=%d\n"
1023 msg = b"DBG-DELTAS-SEARCH: projected-lower-size=%d\n"
1024 msg %= lowestrealisticdeltalen
1024 msg %= lowestrealisticdeltalen
1025 self._write_debug(msg)
1025 self._write_debug(msg)
1026 if snapshotlimit < lowestrealisticdeltalen:
1026 if snapshotlimit < lowestrealisticdeltalen:
1027 if debug_search:
1027 if debug_search:
1028 msg = b"DBG-DELTAS-SEARCH: DISCARDED (snapshot limit)\n"
1028 msg = b"DBG-DELTAS-SEARCH: DISCARDED (snapshot limit)\n"
1029 self._write_debug(msg)
1029 self._write_debug(msg)
1030 return None
1030 return None
1031 if revlog.length(base) < lowestrealisticdeltalen:
1031 if revlog.length(base) < lowestrealisticdeltalen:
1032 if debug_search:
1032 if debug_search:
1033 msg = b"DBG-DELTAS-SEARCH: DISCARDED (prev size)\n"
1033 msg = b"DBG-DELTAS-SEARCH: DISCARDED (prev size)\n"
1034 self._write_debug(msg)
1034 self._write_debug(msg)
1035 return None
1035 return None
1036 header, data = revlog.compress(delta)
1036 header, data = revlog.compress(delta)
1037 deltalen = len(header) + len(data)
1037 deltalen = len(header) + len(data)
1038 offset = revlog.end(len(revlog) - 1)
1038 offset = revlog.end(len(revlog) - 1)
1039 dist = deltalen + offset - revlog.start(chainbase)
1039 dist = deltalen + offset - revlog.start(chainbase)
1040 chainlen, compresseddeltalen = revlog._chaininfo(base)
1040 chainlen, compresseddeltalen = revlog._chaininfo(base)
1041 chainlen += 1
1041 chainlen += 1
1042 compresseddeltalen += deltalen
1042 compresseddeltalen += deltalen
1043
1043
1044 return _deltainfo(
1044 return _deltainfo(
1045 dist,
1045 dist,
1046 deltalen,
1046 deltalen,
1047 (header, data),
1047 (header, data),
1048 deltabase,
1048 deltabase,
1049 chainbase,
1049 chainbase,
1050 chainlen,
1050 chainlen,
1051 compresseddeltalen,
1051 compresseddeltalen,
1052 snapshotdepth,
1052 snapshotdepth,
1053 )
1053 )
1054
1054
1055 def _fullsnapshotinfo(self, fh, revinfo, curr):
1055 def _fullsnapshotinfo(self, fh, revinfo, curr):
1056 rawtext = self.buildtext(revinfo, fh)
1056 rawtext = self.buildtext(revinfo, fh)
1057 data = self.revlog.compress(rawtext)
1057 data = self.revlog.compress(rawtext)
1058 compresseddeltalen = deltalen = dist = len(data[1]) + len(data[0])
1058 compresseddeltalen = deltalen = dist = len(data[1]) + len(data[0])
1059 deltabase = chainbase = curr
1059 deltabase = chainbase = curr
1060 snapshotdepth = 0
1060 snapshotdepth = 0
1061 chainlen = 1
1061 chainlen = 1
1062
1062
1063 return _deltainfo(
1063 return _deltainfo(
1064 dist,
1064 dist,
1065 deltalen,
1065 deltalen,
1066 data,
1066 data,
1067 deltabase,
1067 deltabase,
1068 chainbase,
1068 chainbase,
1069 chainlen,
1069 chainlen,
1070 compresseddeltalen,
1070 compresseddeltalen,
1071 snapshotdepth,
1071 snapshotdepth,
1072 )
1072 )
1073
1073
1074 def finddeltainfo(self, revinfo, fh, excluded_bases=None, target_rev=None):
1074 def finddeltainfo(self, revinfo, fh, excluded_bases=None, target_rev=None):
1075 """Find an acceptable delta against a candidate revision
1075 """Find an acceptable delta against a candidate revision
1076
1076
1077 revinfo: information about the revision (instance of _revisioninfo)
1077 revinfo: information about the revision (instance of _revisioninfo)
1078 fh: file handle to either the .i or the .d revlog file,
1078 fh: file handle to either the .i or the .d revlog file,
1079 depending on whether it is inlined or not
1079 depending on whether it is inlined or not
1080
1080
1081 Returns the first acceptable candidate revision, as ordered by
1081 Returns the first acceptable candidate revision, as ordered by
1082 _candidategroups
1082 _candidategroups
1083
1083
1084 If no suitable deltabase is found, we return delta info for a full
1084 If no suitable deltabase is found, we return delta info for a full
1085 snapshot.
1085 snapshot.
1086
1086
1087 `excluded_bases` is an optional set of revision that cannot be used as
1087 `excluded_bases` is an optional set of revision that cannot be used as
1088 a delta base. Use this to recompute delta suitable in censor or strip
1088 a delta base. Use this to recompute delta suitable in censor or strip
1089 context.
1089 context.
1090 """
1090 """
1091 if target_rev is None:
1091 if target_rev is None:
1092 target_rev = len(self.revlog)
1092 target_rev = len(self.revlog)
1093
1093
1094 if not revinfo.textlen:
1094 if not revinfo.textlen:
1095 return self._fullsnapshotinfo(fh, revinfo, target_rev)
1095 return self._fullsnapshotinfo(fh, revinfo, target_rev)
1096
1096
1097 if excluded_bases is None:
1097 if excluded_bases is None:
1098 excluded_bases = set()
1098 excluded_bases = set()
1099
1099
1100 # no delta for flag processor revision (see "candelta" for why)
1100 # no delta for flag processor revision (see "candelta" for why)
1101 # not calling candelta since only one revision needs test, also to
1101 # not calling candelta since only one revision needs test, also to
1102 # avoid overhead fetching flags again.
1102 # avoid overhead fetching flags again.
1103 if revinfo.flags & REVIDX_RAWTEXT_CHANGING_FLAGS:
1103 if revinfo.flags & REVIDX_RAWTEXT_CHANGING_FLAGS:
1104 return self._fullsnapshotinfo(fh, revinfo, target_rev)
1104 return self._fullsnapshotinfo(fh, revinfo, target_rev)
1105
1105
1106 if self._write_debug is not None:
1106 if self._write_debug is not None:
1107 start = util.timer()
1107 start = util.timer()
1108
1108
1109 debug_search = self._write_debug is not None and self._debug_search
1109 debug_search = self._write_debug is not None and self._debug_search
1110
1110
1111 # count the number of different delta we tried (for debug purpose)
1111 # count the number of different delta we tried (for debug purpose)
1112 dbg_try_count = 0
1112 dbg_try_count = 0
1113 # count the number of "search round" we did. (for debug purpose)
1113 # count the number of "search round" we did. (for debug purpose)
1114 dbg_try_rounds = 0
1114 dbg_try_rounds = 0
1115 dbg_type = b'unknown'
1115 dbg_type = b'unknown'
1116
1116
1117 cachedelta = revinfo.cachedelta
1117 cachedelta = revinfo.cachedelta
1118 p1 = revinfo.p1
1118 p1 = revinfo.p1
1119 p2 = revinfo.p2
1119 p2 = revinfo.p2
1120 revlog = self.revlog
1120 revlog = self.revlog
1121
1121
1122 deltainfo = None
1122 deltainfo = None
1123 p1r, p2r = revlog.rev(p1), revlog.rev(p2)
1123 p1r, p2r = revlog.rev(p1), revlog.rev(p2)
1124
1124
1125 if self._write_debug is not None:
1125 if self._write_debug is not None:
1126 if p1r != nullrev:
1126 if p1r != nullrev:
1127 p1_chain_len = revlog._chaininfo(p1r)[0]
1127 p1_chain_len = revlog._chaininfo(p1r)[0]
1128 else:
1128 else:
1129 p1_chain_len = -1
1129 p1_chain_len = -1
1130 if p2r != nullrev:
1130 if p2r != nullrev:
1131 p2_chain_len = revlog._chaininfo(p2r)[0]
1131 p2_chain_len = revlog._chaininfo(p2r)[0]
1132 else:
1132 else:
1133 p2_chain_len = -1
1133 p2_chain_len = -1
1134 if debug_search:
1134 if debug_search:
1135 msg = b"DBG-DELTAS-SEARCH: SEARCH rev=%d\n"
1135 msg = b"DBG-DELTAS-SEARCH: SEARCH rev=%d\n"
1136 msg %= target_rev
1136 msg %= target_rev
1137 self._write_debug(msg)
1137 self._write_debug(msg)
1138
1138
1139 groups = _candidategroups(
1139 groups = _candidategroups(
1140 self.revlog, revinfo.textlen, p1r, p2r, cachedelta
1140 self.revlog, revinfo.textlen, p1r, p2r, cachedelta
1141 )
1141 )
1142 candidaterevs = next(groups)
1142 candidaterevs = next(groups)
1143 while candidaterevs is not None:
1143 while candidaterevs is not None:
1144 dbg_try_rounds += 1
1144 dbg_try_rounds += 1
1145 if debug_search:
1145 if debug_search:
1146 prev = None
1146 prev = None
1147 if deltainfo is not None:
1147 if deltainfo is not None:
1148 prev = deltainfo.base
1148 prev = deltainfo.base
1149
1149
1150 if p1 in candidaterevs or p2 in candidaterevs:
1150 if (
1151 cachedelta is not None
1152 and len(candidaterevs) == 1
1153 and cachedelta[0] in candidaterevs
1154 ):
1155 round_type = b"cached-delta"
1156 elif p1 in candidaterevs or p2 in candidaterevs:
1151 round_type = b"parents"
1157 round_type = b"parents"
1152 elif prev is not None and all(c < prev for c in candidaterevs):
1158 elif prev is not None and all(c < prev for c in candidaterevs):
1153 round_type = b"refine-down"
1159 round_type = b"refine-down"
1154 elif prev is not None and all(c > prev for c in candidaterevs):
1160 elif prev is not None and all(c > prev for c in candidaterevs):
1155 round_type = b"refine-up"
1161 round_type = b"refine-up"
1156 else:
1162 else:
1157 round_type = b"search-down"
1163 round_type = b"search-down"
1158 msg = b"DBG-DELTAS-SEARCH: ROUND #%d - %d candidates - %s\n"
1164 msg = b"DBG-DELTAS-SEARCH: ROUND #%d - %d candidates - %s\n"
1159 msg %= (dbg_try_rounds, len(candidaterevs), round_type)
1165 msg %= (dbg_try_rounds, len(candidaterevs), round_type)
1160 self._write_debug(msg)
1166 self._write_debug(msg)
1161 nominateddeltas = []
1167 nominateddeltas = []
1162 if deltainfo is not None:
1168 if deltainfo is not None:
1163 if debug_search:
1169 if debug_search:
1164 msg = (
1170 msg = (
1165 b"DBG-DELTAS-SEARCH: CONTENDER: rev=%d - length=%d\n"
1171 b"DBG-DELTAS-SEARCH: CONTENDER: rev=%d - length=%d\n"
1166 )
1172 )
1167 msg %= (deltainfo.base, deltainfo.deltalen)
1173 msg %= (deltainfo.base, deltainfo.deltalen)
1168 self._write_debug(msg)
1174 self._write_debug(msg)
1169 # if we already found a good delta,
1175 # if we already found a good delta,
1170 # challenge it against refined candidates
1176 # challenge it against refined candidates
1171 nominateddeltas.append(deltainfo)
1177 nominateddeltas.append(deltainfo)
1172 for candidaterev in candidaterevs:
1178 for candidaterev in candidaterevs:
1173 if debug_search:
1179 if debug_search:
1174 msg = b"DBG-DELTAS-SEARCH: CANDIDATE: rev=%d\n"
1180 msg = b"DBG-DELTAS-SEARCH: CANDIDATE: rev=%d\n"
1175 msg %= candidaterev
1181 msg %= candidaterev
1176 self._write_debug(msg)
1182 self._write_debug(msg)
1177 candidate_type = None
1183 candidate_type = None
1178 if candidaterev == p1:
1184 if candidaterev == p1:
1179 candidate_type = b"p1"
1185 candidate_type = b"p1"
1180 elif candidaterev == p2:
1186 elif candidaterev == p2:
1181 candidate_type = b"p2"
1187 candidate_type = b"p2"
1182 elif self.revlog.issnapshot(candidaterev):
1188 elif self.revlog.issnapshot(candidaterev):
1183 candidate_type = b"snapshot-%d"
1189 candidate_type = b"snapshot-%d"
1184 candidate_type %= self.revlog.snapshotdepth(
1190 candidate_type %= self.revlog.snapshotdepth(
1185 candidaterev
1191 candidaterev
1186 )
1192 )
1187
1193
1188 if candidate_type is not None:
1194 if candidate_type is not None:
1189 msg = b"DBG-DELTAS-SEARCH: type=%s\n"
1195 msg = b"DBG-DELTAS-SEARCH: type=%s\n"
1190 msg %= candidate_type
1196 msg %= candidate_type
1191 self._write_debug(msg)
1197 self._write_debug(msg)
1192 msg = b"DBG-DELTAS-SEARCH: size=%d\n"
1198 msg = b"DBG-DELTAS-SEARCH: size=%d\n"
1193 msg %= self.revlog.length(candidaterev)
1199 msg %= self.revlog.length(candidaterev)
1194 self._write_debug(msg)
1200 self._write_debug(msg)
1195 msg = b"DBG-DELTAS-SEARCH: base=%d\n"
1201 msg = b"DBG-DELTAS-SEARCH: base=%d\n"
1196 msg %= self.revlog.deltaparent(candidaterev)
1202 msg %= self.revlog.deltaparent(candidaterev)
1197 self._write_debug(msg)
1203 self._write_debug(msg)
1198 if candidaterev in excluded_bases:
1204 if candidaterev in excluded_bases:
1199 if debug_search:
1205 if debug_search:
1200 msg = b"DBG-DELTAS-SEARCH: EXCLUDED\n"
1206 msg = b"DBG-DELTAS-SEARCH: EXCLUDED\n"
1201 self._write_debug(msg)
1207 self._write_debug(msg)
1202 continue
1208 continue
1203 if candidaterev >= target_rev:
1209 if candidaterev >= target_rev:
1204 if debug_search:
1210 if debug_search:
1205 msg = b"DBG-DELTAS-SEARCH: TOO-HIGH\n"
1211 msg = b"DBG-DELTAS-SEARCH: TOO-HIGH\n"
1206 self._write_debug(msg)
1212 self._write_debug(msg)
1207 continue
1213 continue
1208 dbg_try_count += 1
1214 dbg_try_count += 1
1209
1215
1210 if debug_search:
1216 if debug_search:
1211 delta_start = util.timer()
1217 delta_start = util.timer()
1212 candidatedelta = self._builddeltainfo(revinfo, candidaterev, fh)
1218 candidatedelta = self._builddeltainfo(revinfo, candidaterev, fh)
1213 if debug_search:
1219 if debug_search:
1214 delta_end = util.timer()
1220 delta_end = util.timer()
1215 msg = b"DBG-DELTAS-SEARCH: delta-search-time=%f\n"
1221 msg = b"DBG-DELTAS-SEARCH: delta-search-time=%f\n"
1216 msg %= delta_end - delta_start
1222 msg %= delta_end - delta_start
1217 self._write_debug(msg)
1223 self._write_debug(msg)
1218 if candidatedelta is not None:
1224 if candidatedelta is not None:
1219 if isgooddeltainfo(self.revlog, candidatedelta, revinfo):
1225 if isgooddeltainfo(self.revlog, candidatedelta, revinfo):
1220 if debug_search:
1226 if debug_search:
1221 msg = b"DBG-DELTAS-SEARCH: DELTA: length=%d (GOOD)\n"
1227 msg = b"DBG-DELTAS-SEARCH: DELTA: length=%d (GOOD)\n"
1222 msg %= candidatedelta.deltalen
1228 msg %= candidatedelta.deltalen
1223 self._write_debug(msg)
1229 self._write_debug(msg)
1224 nominateddeltas.append(candidatedelta)
1230 nominateddeltas.append(candidatedelta)
1225 elif debug_search:
1231 elif debug_search:
1226 msg = b"DBG-DELTAS-SEARCH: DELTA: length=%d (BAD)\n"
1232 msg = b"DBG-DELTAS-SEARCH: DELTA: length=%d (BAD)\n"
1227 msg %= candidatedelta.deltalen
1233 msg %= candidatedelta.deltalen
1228 self._write_debug(msg)
1234 self._write_debug(msg)
1229 elif debug_search:
1235 elif debug_search:
1230 msg = b"DBG-DELTAS-SEARCH: NO-DELTA\n"
1236 msg = b"DBG-DELTAS-SEARCH: NO-DELTA\n"
1231 self._write_debug(msg)
1237 self._write_debug(msg)
1232 if nominateddeltas:
1238 if nominateddeltas:
1233 deltainfo = min(nominateddeltas, key=lambda x: x.deltalen)
1239 deltainfo = min(nominateddeltas, key=lambda x: x.deltalen)
1234 if deltainfo is not None:
1240 if deltainfo is not None:
1235 candidaterevs = groups.send(deltainfo.base)
1241 candidaterevs = groups.send(deltainfo.base)
1236 else:
1242 else:
1237 candidaterevs = next(groups)
1243 candidaterevs = next(groups)
1238
1244
1239 if deltainfo is None:
1245 if deltainfo is None:
1240 dbg_type = b"full"
1246 dbg_type = b"full"
1241 deltainfo = self._fullsnapshotinfo(fh, revinfo, target_rev)
1247 deltainfo = self._fullsnapshotinfo(fh, revinfo, target_rev)
1242 elif deltainfo.snapshotdepth: # pytype: disable=attribute-error
1248 elif deltainfo.snapshotdepth: # pytype: disable=attribute-error
1243 dbg_type = b"snapshot"
1249 dbg_type = b"snapshot"
1244 else:
1250 else:
1245 dbg_type = b"delta"
1251 dbg_type = b"delta"
1246
1252
1247 if self._write_debug is not None:
1253 if self._write_debug is not None:
1248 end = util.timer()
1254 end = util.timer()
1255 assert deltainfo is not None # please pytype
1256 used_cached = (
1257 cachedelta is not None
1258 and dbg_try_rounds == 1
1259 and dbg_try_count == 1
1260 and deltainfo.base == cachedelta[0]
1261 )
1249 dbg = {
1262 dbg = {
1250 'duration': end - start,
1263 'duration': end - start,
1251 'revision': target_rev,
1264 'revision': target_rev,
1265 'delta-base': deltainfo.base,
1252 'search_round_count': dbg_try_rounds,
1266 'search_round_count': dbg_try_rounds,
1267 'using-cached-base': used_cached,
1253 'delta_try_count': dbg_try_count,
1268 'delta_try_count': dbg_try_count,
1254 'type': dbg_type,
1269 'type': dbg_type,
1255 'p1-chain-len': p1_chain_len,
1270 'p1-chain-len': p1_chain_len,
1256 'p2-chain-len': p2_chain_len,
1271 'p2-chain-len': p2_chain_len,
1257 }
1272 }
1258 if (
1273 if (
1259 deltainfo.snapshotdepth # pytype: disable=attribute-error
1274 deltainfo.snapshotdepth # pytype: disable=attribute-error
1260 is not None
1275 is not None
1261 ):
1276 ):
1262 dbg[
1277 dbg[
1263 'snapshot-depth'
1278 'snapshot-depth'
1264 ] = deltainfo.snapshotdepth # pytype: disable=attribute-error
1279 ] = deltainfo.snapshotdepth # pytype: disable=attribute-error
1265 else:
1280 else:
1266 dbg['snapshot-depth'] = 0
1281 dbg['snapshot-depth'] = 0
1267 target_revlog = b"UNKNOWN"
1282 target_revlog = b"UNKNOWN"
1268 target_type = self.revlog.target[0]
1283 target_type = self.revlog.target[0]
1269 target_key = self.revlog.target[1]
1284 target_key = self.revlog.target[1]
1270 if target_type == KIND_CHANGELOG:
1285 if target_type == KIND_CHANGELOG:
1271 target_revlog = b'CHANGELOG:'
1286 target_revlog = b'CHANGELOG:'
1272 elif target_type == KIND_MANIFESTLOG:
1287 elif target_type == KIND_MANIFESTLOG:
1273 target_revlog = b'MANIFESTLOG:'
1288 target_revlog = b'MANIFESTLOG:'
1274 if target_key:
1289 if target_key:
1275 target_revlog += b'%s:' % target_key
1290 target_revlog += b'%s:' % target_key
1276 elif target_type == KIND_FILELOG:
1291 elif target_type == KIND_FILELOG:
1277 target_revlog = b'FILELOG:'
1292 target_revlog = b'FILELOG:'
1278 if target_key:
1293 if target_key:
1279 target_revlog += b'%s:' % target_key
1294 target_revlog += b'%s:' % target_key
1280 dbg['target-revlog'] = target_revlog
1295 dbg['target-revlog'] = target_revlog
1281
1296
1282 msg = (
1297 msg = (
1283 b"DBG-DELTAS:"
1298 b"DBG-DELTAS:"
1284 b" %-12s"
1299 b" %-12s"
1285 b" rev=%d:"
1300 b" rev=%d:"
1286 b" search-rounds=%d"
1301 b" delta-base=%d"
1302 b" is-cached=%d"
1303 b" - search-rounds=%d"
1287 b" try-count=%d"
1304 b" try-count=%d"
1288 b" - delta-type=%-6s"
1305 b" - delta-type=%-6s"
1289 b" snap-depth=%d"
1306 b" snap-depth=%d"
1290 b" - p1-chain-length=%d"
1307 b" - p1-chain-length=%d"
1291 b" p2-chain-length=%d"
1308 b" p2-chain-length=%d"
1292 b" - duration=%f"
1309 b" - duration=%f"
1293 b"\n"
1310 b"\n"
1294 )
1311 )
1295 msg %= (
1312 msg %= (
1296 dbg["target-revlog"],
1313 dbg["target-revlog"],
1297 dbg["revision"],
1314 dbg["revision"],
1315 dbg["delta-base"],
1316 dbg["using-cached-base"],
1298 dbg["search_round_count"],
1317 dbg["search_round_count"],
1299 dbg["delta_try_count"],
1318 dbg["delta_try_count"],
1300 dbg["type"],
1319 dbg["type"],
1301 dbg["snapshot-depth"],
1320 dbg["snapshot-depth"],
1302 dbg["p1-chain-len"],
1321 dbg["p1-chain-len"],
1303 dbg["p2-chain-len"],
1322 dbg["p2-chain-len"],
1304 dbg["duration"],
1323 dbg["duration"],
1305 )
1324 )
1306 self._write_debug(msg)
1325 self._write_debug(msg)
1307 return deltainfo
1326 return deltainfo
1308
1327
1309
1328
1310 def delta_compression(default_compression_header, deltainfo):
1329 def delta_compression(default_compression_header, deltainfo):
1311 """return (COMPRESSION_MODE, deltainfo)
1330 """return (COMPRESSION_MODE, deltainfo)
1312
1331
1313 used by revlog v2+ format to dispatch between PLAIN and DEFAULT
1332 used by revlog v2+ format to dispatch between PLAIN and DEFAULT
1314 compression.
1333 compression.
1315 """
1334 """
1316 h, d = deltainfo.data
1335 h, d = deltainfo.data
1317 compression_mode = COMP_MODE_INLINE
1336 compression_mode = COMP_MODE_INLINE
1318 if not h and not d:
1337 if not h and not d:
1319 # not data to store at all... declare them uncompressed
1338 # not data to store at all... declare them uncompressed
1320 compression_mode = COMP_MODE_PLAIN
1339 compression_mode = COMP_MODE_PLAIN
1321 elif not h:
1340 elif not h:
1322 t = d[0:1]
1341 t = d[0:1]
1323 if t == b'\0':
1342 if t == b'\0':
1324 compression_mode = COMP_MODE_PLAIN
1343 compression_mode = COMP_MODE_PLAIN
1325 elif t == default_compression_header:
1344 elif t == default_compression_header:
1326 compression_mode = COMP_MODE_DEFAULT
1345 compression_mode = COMP_MODE_DEFAULT
1327 elif h == b'u':
1346 elif h == b'u':
1328 # we have a more efficient way to declare uncompressed
1347 # we have a more efficient way to declare uncompressed
1329 h = b''
1348 h = b''
1330 compression_mode = COMP_MODE_PLAIN
1349 compression_mode = COMP_MODE_PLAIN
1331 deltainfo = drop_u_compression(deltainfo)
1350 deltainfo = drop_u_compression(deltainfo)
1332 return compression_mode, deltainfo
1351 return compression_mode, deltainfo
@@ -1,1065 +1,1065 b''
1 Setting up test
1 Setting up test
2
2
3 $ hg init test
3 $ hg init test
4 $ cd test
4 $ cd test
5 $ echo 0 > afile
5 $ echo 0 > afile
6 $ hg add afile
6 $ hg add afile
7 $ hg commit -m "0.0"
7 $ hg commit -m "0.0"
8 $ echo 1 >> afile
8 $ echo 1 >> afile
9 $ hg commit -m "0.1"
9 $ hg commit -m "0.1"
10 $ echo 2 >> afile
10 $ echo 2 >> afile
11 $ hg commit -m "0.2"
11 $ hg commit -m "0.2"
12 $ echo 3 >> afile
12 $ echo 3 >> afile
13 $ hg commit -m "0.3"
13 $ hg commit -m "0.3"
14 $ hg update -C 0
14 $ hg update -C 0
15 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
15 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
16 $ echo 1 >> afile
16 $ echo 1 >> afile
17 $ hg commit -m "1.1"
17 $ hg commit -m "1.1"
18 created new head
18 created new head
19 $ echo 2 >> afile
19 $ echo 2 >> afile
20 $ hg commit -m "1.2"
20 $ hg commit -m "1.2"
21 $ echo "a line" > fred
21 $ echo "a line" > fred
22 $ echo 3 >> afile
22 $ echo 3 >> afile
23 $ hg add fred
23 $ hg add fred
24 $ hg commit -m "1.3"
24 $ hg commit -m "1.3"
25 $ hg mv afile adifferentfile
25 $ hg mv afile adifferentfile
26 $ hg commit -m "1.3m"
26 $ hg commit -m "1.3m"
27 $ hg update -C 3
27 $ hg update -C 3
28 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
28 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
29 $ hg mv afile anotherfile
29 $ hg mv afile anotherfile
30 $ hg commit -m "0.3m"
30 $ hg commit -m "0.3m"
31 $ hg verify
31 $ hg verify
32 checking changesets
32 checking changesets
33 checking manifests
33 checking manifests
34 crosschecking files in changesets and manifests
34 crosschecking files in changesets and manifests
35 checking files
35 checking files
36 checked 9 changesets with 7 changes to 4 files
36 checked 9 changesets with 7 changes to 4 files
37 $ cd ..
37 $ cd ..
38 $ hg init empty
38 $ hg init empty
39
39
40 Bundle and phase
40 Bundle and phase
41
41
42 $ hg -R test phase --force --secret 0
42 $ hg -R test phase --force --secret 0
43 $ hg -R test bundle phase.hg empty
43 $ hg -R test bundle phase.hg empty
44 searching for changes
44 searching for changes
45 no changes found (ignored 9 secret changesets)
45 no changes found (ignored 9 secret changesets)
46 [1]
46 [1]
47 $ hg -R test phase --draft -r 'head()'
47 $ hg -R test phase --draft -r 'head()'
48
48
49 Bundle --all
49 Bundle --all
50
50
51 $ hg -R test bundle --all all.hg
51 $ hg -R test bundle --all all.hg
52 9 changesets found
52 9 changesets found
53
53
54 Bundle test to full.hg
54 Bundle test to full.hg
55
55
56 $ hg -R test bundle full.hg empty
56 $ hg -R test bundle full.hg empty
57 searching for changes
57 searching for changes
58 9 changesets found
58 9 changesets found
59
59
60 Unbundle full.hg in test
60 Unbundle full.hg in test
61
61
62 $ hg -R test unbundle full.hg
62 $ hg -R test unbundle full.hg
63 adding changesets
63 adding changesets
64 adding manifests
64 adding manifests
65 adding file changes
65 adding file changes
66 added 0 changesets with 0 changes to 4 files
66 added 0 changesets with 0 changes to 4 files
67 (run 'hg update' to get a working copy)
67 (run 'hg update' to get a working copy)
68
68
69 Verify empty
69 Verify empty
70
70
71 $ hg -R empty heads
71 $ hg -R empty heads
72 [1]
72 [1]
73 $ hg -R empty verify
73 $ hg -R empty verify
74 checking changesets
74 checking changesets
75 checking manifests
75 checking manifests
76 crosschecking files in changesets and manifests
76 crosschecking files in changesets and manifests
77 checking files
77 checking files
78 checked 0 changesets with 0 changes to 0 files
78 checked 0 changesets with 0 changes to 0 files
79
79
80 #if repobundlerepo
80 #if repobundlerepo
81
81
82 Pull full.hg into test (using --cwd)
82 Pull full.hg into test (using --cwd)
83
83
84 $ hg --cwd test pull ../full.hg
84 $ hg --cwd test pull ../full.hg
85 pulling from ../full.hg
85 pulling from ../full.hg
86 searching for changes
86 searching for changes
87 no changes found
87 no changes found
88
88
89 Verify that there are no leaked temporary files after pull (issue2797)
89 Verify that there are no leaked temporary files after pull (issue2797)
90
90
91 $ ls test/.hg | grep .hg10un
91 $ ls test/.hg | grep .hg10un
92 [1]
92 [1]
93
93
94 Pull full.hg into empty (using --cwd)
94 Pull full.hg into empty (using --cwd)
95
95
96 $ hg --cwd empty pull ../full.hg
96 $ hg --cwd empty pull ../full.hg
97 pulling from ../full.hg
97 pulling from ../full.hg
98 requesting all changes
98 requesting all changes
99 adding changesets
99 adding changesets
100 adding manifests
100 adding manifests
101 adding file changes
101 adding file changes
102 added 9 changesets with 7 changes to 4 files (+1 heads)
102 added 9 changesets with 7 changes to 4 files (+1 heads)
103 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
103 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
104 (run 'hg heads' to see heads, 'hg merge' to merge)
104 (run 'hg heads' to see heads, 'hg merge' to merge)
105
105
106 Rollback empty
106 Rollback empty
107
107
108 $ hg -R empty rollback
108 $ hg -R empty rollback
109 repository tip rolled back to revision -1 (undo pull)
109 repository tip rolled back to revision -1 (undo pull)
110
110
111 Pull full.hg into empty again (using --cwd)
111 Pull full.hg into empty again (using --cwd)
112
112
113 $ hg --cwd empty pull ../full.hg
113 $ hg --cwd empty pull ../full.hg
114 pulling from ../full.hg
114 pulling from ../full.hg
115 requesting all changes
115 requesting all changes
116 adding changesets
116 adding changesets
117 adding manifests
117 adding manifests
118 adding file changes
118 adding file changes
119 added 9 changesets with 7 changes to 4 files (+1 heads)
119 added 9 changesets with 7 changes to 4 files (+1 heads)
120 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
120 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
121 (run 'hg heads' to see heads, 'hg merge' to merge)
121 (run 'hg heads' to see heads, 'hg merge' to merge)
122
122
123 Pull full.hg into test (using -R)
123 Pull full.hg into test (using -R)
124
124
125 $ hg -R test pull full.hg
125 $ hg -R test pull full.hg
126 pulling from full.hg
126 pulling from full.hg
127 searching for changes
127 searching for changes
128 no changes found
128 no changes found
129
129
130 Pull full.hg into empty (using -R)
130 Pull full.hg into empty (using -R)
131
131
132 $ hg -R empty pull full.hg
132 $ hg -R empty pull full.hg
133 pulling from full.hg
133 pulling from full.hg
134 searching for changes
134 searching for changes
135 no changes found
135 no changes found
136
136
137 Rollback empty
137 Rollback empty
138
138
139 $ hg -R empty rollback
139 $ hg -R empty rollback
140 repository tip rolled back to revision -1 (undo pull)
140 repository tip rolled back to revision -1 (undo pull)
141
141
142 Pull full.hg into empty again (using -R)
142 Pull full.hg into empty again (using -R)
143
143
144 $ hg -R empty pull full.hg
144 $ hg -R empty pull full.hg
145 pulling from full.hg
145 pulling from full.hg
146 requesting all changes
146 requesting all changes
147 adding changesets
147 adding changesets
148 adding manifests
148 adding manifests
149 adding file changes
149 adding file changes
150 added 9 changesets with 7 changes to 4 files (+1 heads)
150 added 9 changesets with 7 changes to 4 files (+1 heads)
151 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
151 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
152 (run 'hg heads' to see heads, 'hg merge' to merge)
152 (run 'hg heads' to see heads, 'hg merge' to merge)
153
153
154 Log -R full.hg in fresh empty
154 Log -R full.hg in fresh empty
155
155
156 $ rm -r empty
156 $ rm -r empty
157 $ hg init empty
157 $ hg init empty
158 $ cd empty
158 $ cd empty
159 $ hg -R bundle://../full.hg log
159 $ hg -R bundle://../full.hg log
160 changeset: 8:aa35859c02ea
160 changeset: 8:aa35859c02ea
161 tag: tip
161 tag: tip
162 parent: 3:eebf5a27f8ca
162 parent: 3:eebf5a27f8ca
163 user: test
163 user: test
164 date: Thu Jan 01 00:00:00 1970 +0000
164 date: Thu Jan 01 00:00:00 1970 +0000
165 summary: 0.3m
165 summary: 0.3m
166
166
167 changeset: 7:a6a34bfa0076
167 changeset: 7:a6a34bfa0076
168 user: test
168 user: test
169 date: Thu Jan 01 00:00:00 1970 +0000
169 date: Thu Jan 01 00:00:00 1970 +0000
170 summary: 1.3m
170 summary: 1.3m
171
171
172 changeset: 6:7373c1169842
172 changeset: 6:7373c1169842
173 user: test
173 user: test
174 date: Thu Jan 01 00:00:00 1970 +0000
174 date: Thu Jan 01 00:00:00 1970 +0000
175 summary: 1.3
175 summary: 1.3
176
176
177 changeset: 5:1bb50a9436a7
177 changeset: 5:1bb50a9436a7
178 user: test
178 user: test
179 date: Thu Jan 01 00:00:00 1970 +0000
179 date: Thu Jan 01 00:00:00 1970 +0000
180 summary: 1.2
180 summary: 1.2
181
181
182 changeset: 4:095197eb4973
182 changeset: 4:095197eb4973
183 parent: 0:f9ee2f85a263
183 parent: 0:f9ee2f85a263
184 user: test
184 user: test
185 date: Thu Jan 01 00:00:00 1970 +0000
185 date: Thu Jan 01 00:00:00 1970 +0000
186 summary: 1.1
186 summary: 1.1
187
187
188 changeset: 3:eebf5a27f8ca
188 changeset: 3:eebf5a27f8ca
189 user: test
189 user: test
190 date: Thu Jan 01 00:00:00 1970 +0000
190 date: Thu Jan 01 00:00:00 1970 +0000
191 summary: 0.3
191 summary: 0.3
192
192
193 changeset: 2:e38ba6f5b7e0
193 changeset: 2:e38ba6f5b7e0
194 user: test
194 user: test
195 date: Thu Jan 01 00:00:00 1970 +0000
195 date: Thu Jan 01 00:00:00 1970 +0000
196 summary: 0.2
196 summary: 0.2
197
197
198 changeset: 1:34c2bf6b0626
198 changeset: 1:34c2bf6b0626
199 user: test
199 user: test
200 date: Thu Jan 01 00:00:00 1970 +0000
200 date: Thu Jan 01 00:00:00 1970 +0000
201 summary: 0.1
201 summary: 0.1
202
202
203 changeset: 0:f9ee2f85a263
203 changeset: 0:f9ee2f85a263
204 user: test
204 user: test
205 date: Thu Jan 01 00:00:00 1970 +0000
205 date: Thu Jan 01 00:00:00 1970 +0000
206 summary: 0.0
206 summary: 0.0
207
207
208 Make sure bundlerepo doesn't leak tempfiles (issue2491)
208 Make sure bundlerepo doesn't leak tempfiles (issue2491)
209
209
210 $ ls .hg
210 $ ls .hg
211 00changelog.i
211 00changelog.i
212 cache
212 cache
213 requires
213 requires
214 store
214 store
215 wcache
215 wcache
216
216
217 Pull ../full.hg into empty (with hook)
217 Pull ../full.hg into empty (with hook)
218
218
219 $ cat >> .hg/hgrc <<EOF
219 $ cat >> .hg/hgrc <<EOF
220 > [hooks]
220 > [hooks]
221 > changegroup = sh -c "printenv.py --line changegroup"
221 > changegroup = sh -c "printenv.py --line changegroup"
222 > EOF
222 > EOF
223
223
224 doesn't work (yet ?)
224 doesn't work (yet ?)
225 NOTE: msys is mangling the URL below
225 NOTE: msys is mangling the URL below
226
226
227 hg -R bundle://../full.hg verify
227 hg -R bundle://../full.hg verify
228
228
229 $ hg pull bundle://../full.hg
229 $ hg pull bundle://../full.hg
230 pulling from bundle:../full.hg
230 pulling from bundle:../full.hg
231 requesting all changes
231 requesting all changes
232 adding changesets
232 adding changesets
233 adding manifests
233 adding manifests
234 adding file changes
234 adding file changes
235 added 9 changesets with 7 changes to 4 files (+1 heads)
235 added 9 changesets with 7 changes to 4 files (+1 heads)
236 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
236 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
237 changegroup hook: HG_HOOKNAME=changegroup
237 changegroup hook: HG_HOOKNAME=changegroup
238 HG_HOOKTYPE=changegroup
238 HG_HOOKTYPE=changegroup
239 HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735
239 HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735
240 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf
240 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf
241 HG_SOURCE=pull
241 HG_SOURCE=pull
242 HG_TXNID=TXN:$ID$
242 HG_TXNID=TXN:$ID$
243 HG_TXNNAME=pull
243 HG_TXNNAME=pull
244 bundle:../full.hg (no-msys !)
244 bundle:../full.hg (no-msys !)
245 bundle;../full.hg (msys !)
245 bundle;../full.hg (msys !)
246 HG_URL=bundle:../full.hg (no-msys !)
246 HG_URL=bundle:../full.hg (no-msys !)
247 HG_URL=bundle;../full.hg (msys !)
247 HG_URL=bundle;../full.hg (msys !)
248
248
249 (run 'hg heads' to see heads, 'hg merge' to merge)
249 (run 'hg heads' to see heads, 'hg merge' to merge)
250
250
251 Rollback empty
251 Rollback empty
252
252
253 $ hg rollback
253 $ hg rollback
254 repository tip rolled back to revision -1 (undo pull)
254 repository tip rolled back to revision -1 (undo pull)
255 $ cd ..
255 $ cd ..
256
256
257 Log -R bundle:empty+full.hg
257 Log -R bundle:empty+full.hg
258
258
259 $ hg -R bundle:empty+full.hg log --template="{rev} "; echo ""
259 $ hg -R bundle:empty+full.hg log --template="{rev} "; echo ""
260 8 7 6 5 4 3 2 1 0
260 8 7 6 5 4 3 2 1 0
261
261
262 Pull full.hg into empty again (using -R; with hook)
262 Pull full.hg into empty again (using -R; with hook)
263
263
264 $ hg -R empty pull full.hg
264 $ hg -R empty pull full.hg
265 pulling from full.hg
265 pulling from full.hg
266 requesting all changes
266 requesting all changes
267 adding changesets
267 adding changesets
268 adding manifests
268 adding manifests
269 adding file changes
269 adding file changes
270 added 9 changesets with 7 changes to 4 files (+1 heads)
270 added 9 changesets with 7 changes to 4 files (+1 heads)
271 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
271 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
272 changegroup hook: HG_HOOKNAME=changegroup
272 changegroup hook: HG_HOOKNAME=changegroup
273 HG_HOOKTYPE=changegroup
273 HG_HOOKTYPE=changegroup
274 HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735
274 HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735
275 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf
275 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf
276 HG_SOURCE=pull
276 HG_SOURCE=pull
277 HG_TXNID=TXN:$ID$
277 HG_TXNID=TXN:$ID$
278 HG_TXNNAME=pull
278 HG_TXNNAME=pull
279 bundle:empty+full.hg
279 bundle:empty+full.hg
280 HG_URL=bundle:empty+full.hg
280 HG_URL=bundle:empty+full.hg
281
281
282 (run 'hg heads' to see heads, 'hg merge' to merge)
282 (run 'hg heads' to see heads, 'hg merge' to merge)
283
283
284 #endif
284 #endif
285
285
286 Cannot produce streaming clone bundles with "hg bundle"
286 Cannot produce streaming clone bundles with "hg bundle"
287
287
288 $ hg -R test bundle -t packed1 packed.hg
288 $ hg -R test bundle -t packed1 packed.hg
289 abort: packed bundles cannot be produced by "hg bundle"
289 abort: packed bundles cannot be produced by "hg bundle"
290 (use 'hg debugcreatestreamclonebundle')
290 (use 'hg debugcreatestreamclonebundle')
291 [10]
291 [10]
292
292
293 packed1 is produced properly
293 packed1 is produced properly
294
294
295
295
296 #if reporevlogstore rust
296 #if reporevlogstore rust
297
297
298 $ hg -R test debugcreatestreamclonebundle packed.hg
298 $ hg -R test debugcreatestreamclonebundle packed.hg
299 writing 2665 bytes for 6 files
299 writing 2665 bytes for 6 files
300 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
300 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
301
301
302 $ f -B 64 --size --sha1 --hexdump packed.hg
302 $ f -B 64 --size --sha1 --hexdump packed.hg
303 packed.hg: size=2865, sha1=353d10311f4befa195d9a1ca4b8e26518115c702
303 packed.hg: size=2865, sha1=353d10311f4befa195d9a1ca4b8e26518115c702
304 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........|
304 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........|
305 0010: 00 00 00 00 0a 69 00 3b 67 65 6e 65 72 61 6c 64 |.....i.;generald|
305 0010: 00 00 00 00 0a 69 00 3b 67 65 6e 65 72 61 6c 64 |.....i.;generald|
306 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 2d 63 6f 6d 70 |elta,revlog-comp|
306 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 2d 63 6f 6d 70 |elta,revlog-comp|
307 0030: 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c 72 65 76 |ression-zstd,rev|
307 0030: 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c 72 65 76 |ression-zstd,rev|
308 $ hg debugbundle --spec packed.hg
308 $ hg debugbundle --spec packed.hg
309 none-packed1;requirements%3Dgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog
309 none-packed1;requirements%3Dgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog
310 #endif
310 #endif
311
311
312 #if reporevlogstore no-rust zstd
312 #if reporevlogstore no-rust zstd
313
313
314 $ hg -R test debugcreatestreamclonebundle packed.hg
314 $ hg -R test debugcreatestreamclonebundle packed.hg
315 writing 2665 bytes for 6 files
315 writing 2665 bytes for 6 files
316 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
316 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
317
317
318 $ f -B 64 --size --sha1 --hexdump packed.hg
318 $ f -B 64 --size --sha1 --hexdump packed.hg
319 packed.hg: size=2865, sha1=353d10311f4befa195d9a1ca4b8e26518115c702
319 packed.hg: size=2865, sha1=353d10311f4befa195d9a1ca4b8e26518115c702
320 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........|
320 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........|
321 0010: 00 00 00 00 0a 69 00 3b 67 65 6e 65 72 61 6c 64 |.....i.;generald|
321 0010: 00 00 00 00 0a 69 00 3b 67 65 6e 65 72 61 6c 64 |.....i.;generald|
322 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 2d 63 6f 6d 70 |elta,revlog-comp|
322 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 2d 63 6f 6d 70 |elta,revlog-comp|
323 0030: 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c 72 65 76 |ression-zstd,rev|
323 0030: 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c 72 65 76 |ression-zstd,rev|
324 $ hg debugbundle --spec packed.hg
324 $ hg debugbundle --spec packed.hg
325 none-packed1;requirements%3Dgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog
325 none-packed1;requirements%3Dgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog
326 #endif
326 #endif
327
327
328 #if reporevlogstore no-rust no-zstd
328 #if reporevlogstore no-rust no-zstd
329
329
330 $ hg -R test debugcreatestreamclonebundle packed.hg
330 $ hg -R test debugcreatestreamclonebundle packed.hg
331 writing 2664 bytes for 6 files
331 writing 2664 bytes for 6 files
332 bundle requirements: generaldelta, revlogv1, sparserevlog
332 bundle requirements: generaldelta, revlogv1, sparserevlog
333
333
334 $ f -B 64 --size --sha1 --hexdump packed.hg
334 $ f -B 64 --size --sha1 --hexdump packed.hg
335 packed.hg: size=2840, sha1=12bf3eee3eb8a04c503ce2d29b48f0135c7edff5
335 packed.hg: size=2840, sha1=12bf3eee3eb8a04c503ce2d29b48f0135c7edff5
336 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........|
336 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........|
337 0010: 00 00 00 00 0a 68 00 23 67 65 6e 65 72 61 6c 64 |.....h.#generald|
337 0010: 00 00 00 00 0a 68 00 23 67 65 6e 65 72 61 6c 64 |.....h.#generald|
338 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 76 31 2c 73 70 |elta,revlogv1,sp|
338 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 76 31 2c 73 70 |elta,revlogv1,sp|
339 0030: 61 72 73 65 72 65 76 6c 6f 67 00 64 61 74 61 2f |arserevlog.data/|
339 0030: 61 72 73 65 72 65 76 6c 6f 67 00 64 61 74 61 2f |arserevlog.data/|
340 $ hg debugbundle --spec packed.hg
340 $ hg debugbundle --spec packed.hg
341 none-packed1;requirements%3Dgeneraldelta%2Crevlogv1%2Csparserevlog
341 none-packed1;requirements%3Dgeneraldelta%2Crevlogv1%2Csparserevlog
342 #endif
342 #endif
343
343
344 #if reporevlogstore
344 #if reporevlogstore
345
345
346 generaldelta requirement is not listed in stream clone bundles unless used
346 generaldelta requirement is not listed in stream clone bundles unless used
347
347
348 $ hg --config format.usegeneraldelta=false init testnongd
348 $ hg --config format.usegeneraldelta=false init testnongd
349 $ cd testnongd
349 $ cd testnongd
350 $ touch foo
350 $ touch foo
351 $ hg -q commit -A -m initial
351 $ hg -q commit -A -m initial
352 $ cd ..
352 $ cd ..
353
353
354 #endif
354 #endif
355
355
356 #if reporevlogstore rust
356 #if reporevlogstore rust
357
357
358 $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg
358 $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg
359 writing 301 bytes for 3 files
359 writing 301 bytes for 3 files
360 bundle requirements: revlog-compression-zstd, revlogv1
360 bundle requirements: revlog-compression-zstd, revlogv1
361
361
362 $ f -B 64 --size --sha1 --hexdump packednongd.hg
362 $ f -B 64 --size --sha1 --hexdump packednongd.hg
363 packednongd.hg: size=407, sha1=0b8714422b785ba8eb98c916b41ffd5fb994c9b5
363 packednongd.hg: size=407, sha1=0b8714422b785ba8eb98c916b41ffd5fb994c9b5
364 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........|
364 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........|
365 0010: 00 00 00 00 01 2d 00 21 72 65 76 6c 6f 67 2d 63 |.....-.!revlog-c|
365 0010: 00 00 00 00 01 2d 00 21 72 65 76 6c 6f 67 2d 63 |.....-.!revlog-c|
366 0020: 6f 6d 70 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c |ompression-zstd,|
366 0020: 6f 6d 70 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c |ompression-zstd,|
367 0030: 72 65 76 6c 6f 67 76 31 00 64 61 74 61 2f 66 6f |revlogv1.data/fo|
367 0030: 72 65 76 6c 6f 67 76 31 00 64 61 74 61 2f 66 6f |revlogv1.data/fo|
368
368
369 $ hg debugbundle --spec packednongd.hg
369 $ hg debugbundle --spec packednongd.hg
370 none-packed1;requirements%3Drevlog-compression-zstd%2Crevlogv1
370 none-packed1;requirements%3Drevlog-compression-zstd%2Crevlogv1
371
371
372 #endif
372 #endif
373
373
374 #if reporevlogstore no-rust zstd
374 #if reporevlogstore no-rust zstd
375
375
376 $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg
376 $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg
377 writing 301 bytes for 3 files
377 writing 301 bytes for 3 files
378 bundle requirements: revlog-compression-zstd, revlogv1
378 bundle requirements: revlog-compression-zstd, revlogv1
379
379
380 $ f -B 64 --size --sha1 --hexdump packednongd.hg
380 $ f -B 64 --size --sha1 --hexdump packednongd.hg
381 packednongd.hg: size=407, sha1=0b8714422b785ba8eb98c916b41ffd5fb994c9b5
381 packednongd.hg: size=407, sha1=0b8714422b785ba8eb98c916b41ffd5fb994c9b5
382 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........|
382 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........|
383 0010: 00 00 00 00 01 2d 00 21 72 65 76 6c 6f 67 2d 63 |.....-.!revlog-c|
383 0010: 00 00 00 00 01 2d 00 21 72 65 76 6c 6f 67 2d 63 |.....-.!revlog-c|
384 0020: 6f 6d 70 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c |ompression-zstd,|
384 0020: 6f 6d 70 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c |ompression-zstd,|
385 0030: 72 65 76 6c 6f 67 76 31 00 64 61 74 61 2f 66 6f |revlogv1.data/fo|
385 0030: 72 65 76 6c 6f 67 76 31 00 64 61 74 61 2f 66 6f |revlogv1.data/fo|
386
386
387 $ hg debugbundle --spec packednongd.hg
387 $ hg debugbundle --spec packednongd.hg
388 none-packed1;requirements%3Drevlog-compression-zstd%2Crevlogv1
388 none-packed1;requirements%3Drevlog-compression-zstd%2Crevlogv1
389
389
390
390
391 #endif
391 #endif
392
392
393 #if reporevlogstore no-rust no-zstd
393 #if reporevlogstore no-rust no-zstd
394
394
395 $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg
395 $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg
396 writing 301 bytes for 3 files
396 writing 301 bytes for 3 files
397 bundle requirements: revlogv1
397 bundle requirements: revlogv1
398
398
399 $ f -B 64 --size --sha1 --hexdump packednongd.hg
399 $ f -B 64 --size --sha1 --hexdump packednongd.hg
400 packednongd.hg: size=383, sha1=1d9c230238edd5d38907100b729ba72b1831fe6f
400 packednongd.hg: size=383, sha1=1d9c230238edd5d38907100b729ba72b1831fe6f
401 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........|
401 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........|
402 0010: 00 00 00 00 01 2d 00 09 72 65 76 6c 6f 67 76 31 |.....-..revlogv1|
402 0010: 00 00 00 00 01 2d 00 09 72 65 76 6c 6f 67 76 31 |.....-..revlogv1|
403 0020: 00 64 61 74 61 2f 66 6f 6f 2e 69 00 36 34 0a 00 |.data/foo.i.64..|
403 0020: 00 64 61 74 61 2f 66 6f 6f 2e 69 00 36 34 0a 00 |.data/foo.i.64..|
404 0030: 01 00 01 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
404 0030: 01 00 01 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
405
405
406 $ hg debugbundle --spec packednongd.hg
406 $ hg debugbundle --spec packednongd.hg
407 none-packed1;requirements%3Drevlogv1
407 none-packed1;requirements%3Drevlogv1
408
408
409
409
410 #endif
410 #endif
411
411
412 #if reporevlogstore
412 #if reporevlogstore
413
413
414 Warning emitted when packed bundles contain secret changesets
414 Warning emitted when packed bundles contain secret changesets
415
415
416 $ hg init testsecret
416 $ hg init testsecret
417 $ cd testsecret
417 $ cd testsecret
418 $ touch foo
418 $ touch foo
419 $ hg -q commit -A -m initial
419 $ hg -q commit -A -m initial
420 $ hg phase --force --secret -r .
420 $ hg phase --force --secret -r .
421 $ cd ..
421 $ cd ..
422
422
423 #endif
423 #endif
424
424
425 #if reporevlogstore rust
425 #if reporevlogstore rust
426
426
427 $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
427 $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
428 (warning: stream clone bundle will contain secret revisions)
428 (warning: stream clone bundle will contain secret revisions)
429 writing 301 bytes for 3 files
429 writing 301 bytes for 3 files
430 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
430 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
431
431
432 #endif
432 #endif
433
433
434 #if reporevlogstore no-rust zstd
434 #if reporevlogstore no-rust zstd
435
435
436 $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
436 $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
437 (warning: stream clone bundle will contain secret revisions)
437 (warning: stream clone bundle will contain secret revisions)
438 writing 301 bytes for 3 files
438 writing 301 bytes for 3 files
439 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
439 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
440
440
441 #endif
441 #endif
442
442
443 #if reporevlogstore no-rust no-zstd
443 #if reporevlogstore no-rust no-zstd
444
444
445 $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
445 $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
446 (warning: stream clone bundle will contain secret revisions)
446 (warning: stream clone bundle will contain secret revisions)
447 writing 301 bytes for 3 files
447 writing 301 bytes for 3 files
448 bundle requirements: generaldelta, revlogv1, sparserevlog
448 bundle requirements: generaldelta, revlogv1, sparserevlog
449
449
450 #endif
450 #endif
451
451
452 #if reporevlogstore
452 #if reporevlogstore
453
453
454 Unpacking packed1 bundles with "hg unbundle" isn't allowed
454 Unpacking packed1 bundles with "hg unbundle" isn't allowed
455
455
456 $ hg init packed
456 $ hg init packed
457 $ hg -R packed unbundle packed.hg
457 $ hg -R packed unbundle packed.hg
458 abort: packed bundles cannot be applied with "hg unbundle"
458 abort: packed bundles cannot be applied with "hg unbundle"
459 (use "hg debugapplystreamclonebundle")
459 (use "hg debugapplystreamclonebundle")
460 [10]
460 [10]
461
461
462 packed1 can be consumed from debug command
462 packed1 can be consumed from debug command
463
463
464 (this also confirms that streamclone-ed changes are visible via
464 (this also confirms that streamclone-ed changes are visible via
465 @filecache properties to in-process procedures before closing
465 @filecache properties to in-process procedures before closing
466 transaction)
466 transaction)
467
467
468 $ cat > $TESTTMP/showtip.py <<EOF
468 $ cat > $TESTTMP/showtip.py <<EOF
469 >
469 >
470 > def showtip(ui, repo, hooktype, **kwargs):
470 > def showtip(ui, repo, hooktype, **kwargs):
471 > ui.warn(b'%s: %s\n' % (hooktype, repo[b'tip'].hex()[:12]))
471 > ui.warn(b'%s: %s\n' % (hooktype, repo[b'tip'].hex()[:12]))
472 >
472 >
473 > def reposetup(ui, repo):
473 > def reposetup(ui, repo):
474 > # this confirms (and ensures) that (empty) 00changelog.i
474 > # this confirms (and ensures) that (empty) 00changelog.i
475 > # before streamclone is already cached as repo.changelog
475 > # before streamclone is already cached as repo.changelog
476 > ui.setconfig(b'hooks', b'pretxnopen.showtip', showtip)
476 > ui.setconfig(b'hooks', b'pretxnopen.showtip', showtip)
477 >
477 >
478 > # this confirms that streamclone-ed changes are visible to
478 > # this confirms that streamclone-ed changes are visible to
479 > # in-process procedures before closing transaction
479 > # in-process procedures before closing transaction
480 > ui.setconfig(b'hooks', b'pretxnclose.showtip', showtip)
480 > ui.setconfig(b'hooks', b'pretxnclose.showtip', showtip)
481 >
481 >
482 > # this confirms that streamclone-ed changes are still visible
482 > # this confirms that streamclone-ed changes are still visible
483 > # after closing transaction
483 > # after closing transaction
484 > ui.setconfig(b'hooks', b'txnclose.showtip', showtip)
484 > ui.setconfig(b'hooks', b'txnclose.showtip', showtip)
485 > EOF
485 > EOF
486 $ cat >> $HGRCPATH <<EOF
486 $ cat >> $HGRCPATH <<EOF
487 > [extensions]
487 > [extensions]
488 > showtip = $TESTTMP/showtip.py
488 > showtip = $TESTTMP/showtip.py
489 > EOF
489 > EOF
490
490
491 $ hg -R packed debugapplystreamclonebundle packed.hg
491 $ hg -R packed debugapplystreamclonebundle packed.hg
492 6 files to transfer, 2.60 KB of data
492 6 files to transfer, 2.60 KB of data
493 pretxnopen: 000000000000
493 pretxnopen: 000000000000
494 pretxnclose: aa35859c02ea
494 pretxnclose: aa35859c02ea
495 transferred 2.60 KB in * seconds (* */sec) (glob)
495 transferred 2.60 KB in * seconds (* */sec) (glob)
496 txnclose: aa35859c02ea
496 txnclose: aa35859c02ea
497
497
498 (for safety, confirm visibility of streamclone-ed changes by another
498 (for safety, confirm visibility of streamclone-ed changes by another
499 process, too)
499 process, too)
500
500
501 $ hg -R packed tip -T "{node|short}\n"
501 $ hg -R packed tip -T "{node|short}\n"
502 aa35859c02ea
502 aa35859c02ea
503
503
504 $ cat >> $HGRCPATH <<EOF
504 $ cat >> $HGRCPATH <<EOF
505 > [extensions]
505 > [extensions]
506 > showtip = !
506 > showtip = !
507 > EOF
507 > EOF
508
508
509 Does not work on non-empty repo
509 Does not work on non-empty repo
510
510
511 $ hg -R packed debugapplystreamclonebundle packed.hg
511 $ hg -R packed debugapplystreamclonebundle packed.hg
512 abort: cannot apply stream clone bundle on non-empty repo
512 abort: cannot apply stream clone bundle on non-empty repo
513 [255]
513 [255]
514
514
515 #endif
515 #endif
516
516
517 Create partial clones
517 Create partial clones
518
518
519 $ rm -r empty
519 $ rm -r empty
520 $ hg init empty
520 $ hg init empty
521 $ hg clone -r 3 test partial
521 $ hg clone -r 3 test partial
522 adding changesets
522 adding changesets
523 adding manifests
523 adding manifests
524 adding file changes
524 adding file changes
525 added 4 changesets with 4 changes to 1 files
525 added 4 changesets with 4 changes to 1 files
526 new changesets f9ee2f85a263:eebf5a27f8ca
526 new changesets f9ee2f85a263:eebf5a27f8ca
527 updating to branch default
527 updating to branch default
528 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
528 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
529 $ hg clone partial partial2
529 $ hg clone partial partial2
530 updating to branch default
530 updating to branch default
531 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
531 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
532 $ cd partial
532 $ cd partial
533
533
534 #if repobundlerepo
534 #if repobundlerepo
535
535
536 Log -R full.hg in partial
536 Log -R full.hg in partial
537
537
538 $ hg -R bundle://../full.hg log -T phases
538 $ hg -R bundle://../full.hg log -T phases
539 changeset: 8:aa35859c02ea
539 changeset: 8:aa35859c02ea
540 tag: tip
540 tag: tip
541 phase: draft
541 phase: draft
542 parent: 3:eebf5a27f8ca
542 parent: 3:eebf5a27f8ca
543 user: test
543 user: test
544 date: Thu Jan 01 00:00:00 1970 +0000
544 date: Thu Jan 01 00:00:00 1970 +0000
545 summary: 0.3m
545 summary: 0.3m
546
546
547 changeset: 7:a6a34bfa0076
547 changeset: 7:a6a34bfa0076
548 phase: draft
548 phase: draft
549 user: test
549 user: test
550 date: Thu Jan 01 00:00:00 1970 +0000
550 date: Thu Jan 01 00:00:00 1970 +0000
551 summary: 1.3m
551 summary: 1.3m
552
552
553 changeset: 6:7373c1169842
553 changeset: 6:7373c1169842
554 phase: draft
554 phase: draft
555 user: test
555 user: test
556 date: Thu Jan 01 00:00:00 1970 +0000
556 date: Thu Jan 01 00:00:00 1970 +0000
557 summary: 1.3
557 summary: 1.3
558
558
559 changeset: 5:1bb50a9436a7
559 changeset: 5:1bb50a9436a7
560 phase: draft
560 phase: draft
561 user: test
561 user: test
562 date: Thu Jan 01 00:00:00 1970 +0000
562 date: Thu Jan 01 00:00:00 1970 +0000
563 summary: 1.2
563 summary: 1.2
564
564
565 changeset: 4:095197eb4973
565 changeset: 4:095197eb4973
566 phase: draft
566 phase: draft
567 parent: 0:f9ee2f85a263
567 parent: 0:f9ee2f85a263
568 user: test
568 user: test
569 date: Thu Jan 01 00:00:00 1970 +0000
569 date: Thu Jan 01 00:00:00 1970 +0000
570 summary: 1.1
570 summary: 1.1
571
571
572 changeset: 3:eebf5a27f8ca
572 changeset: 3:eebf5a27f8ca
573 phase: public
573 phase: public
574 user: test
574 user: test
575 date: Thu Jan 01 00:00:00 1970 +0000
575 date: Thu Jan 01 00:00:00 1970 +0000
576 summary: 0.3
576 summary: 0.3
577
577
578 changeset: 2:e38ba6f5b7e0
578 changeset: 2:e38ba6f5b7e0
579 phase: public
579 phase: public
580 user: test
580 user: test
581 date: Thu Jan 01 00:00:00 1970 +0000
581 date: Thu Jan 01 00:00:00 1970 +0000
582 summary: 0.2
582 summary: 0.2
583
583
584 changeset: 1:34c2bf6b0626
584 changeset: 1:34c2bf6b0626
585 phase: public
585 phase: public
586 user: test
586 user: test
587 date: Thu Jan 01 00:00:00 1970 +0000
587 date: Thu Jan 01 00:00:00 1970 +0000
588 summary: 0.1
588 summary: 0.1
589
589
590 changeset: 0:f9ee2f85a263
590 changeset: 0:f9ee2f85a263
591 phase: public
591 phase: public
592 user: test
592 user: test
593 date: Thu Jan 01 00:00:00 1970 +0000
593 date: Thu Jan 01 00:00:00 1970 +0000
594 summary: 0.0
594 summary: 0.0
595
595
596
596
597 Incoming full.hg in partial
597 Incoming full.hg in partial
598
598
599 $ hg incoming bundle://../full.hg
599 $ hg incoming bundle://../full.hg
600 comparing with bundle:../full.hg
600 comparing with bundle:../full.hg
601 searching for changes
601 searching for changes
602 changeset: 4:095197eb4973
602 changeset: 4:095197eb4973
603 parent: 0:f9ee2f85a263
603 parent: 0:f9ee2f85a263
604 user: test
604 user: test
605 date: Thu Jan 01 00:00:00 1970 +0000
605 date: Thu Jan 01 00:00:00 1970 +0000
606 summary: 1.1
606 summary: 1.1
607
607
608 changeset: 5:1bb50a9436a7
608 changeset: 5:1bb50a9436a7
609 user: test
609 user: test
610 date: Thu Jan 01 00:00:00 1970 +0000
610 date: Thu Jan 01 00:00:00 1970 +0000
611 summary: 1.2
611 summary: 1.2
612
612
613 changeset: 6:7373c1169842
613 changeset: 6:7373c1169842
614 user: test
614 user: test
615 date: Thu Jan 01 00:00:00 1970 +0000
615 date: Thu Jan 01 00:00:00 1970 +0000
616 summary: 1.3
616 summary: 1.3
617
617
618 changeset: 7:a6a34bfa0076
618 changeset: 7:a6a34bfa0076
619 user: test
619 user: test
620 date: Thu Jan 01 00:00:00 1970 +0000
620 date: Thu Jan 01 00:00:00 1970 +0000
621 summary: 1.3m
621 summary: 1.3m
622
622
623 changeset: 8:aa35859c02ea
623 changeset: 8:aa35859c02ea
624 tag: tip
624 tag: tip
625 parent: 3:eebf5a27f8ca
625 parent: 3:eebf5a27f8ca
626 user: test
626 user: test
627 date: Thu Jan 01 00:00:00 1970 +0000
627 date: Thu Jan 01 00:00:00 1970 +0000
628 summary: 0.3m
628 summary: 0.3m
629
629
630
630
631 Outgoing -R full.hg vs partial2 in partial
631 Outgoing -R full.hg vs partial2 in partial
632
632
633 $ hg -R bundle://../full.hg outgoing ../partial2
633 $ hg -R bundle://../full.hg outgoing ../partial2
634 comparing with ../partial2
634 comparing with ../partial2
635 searching for changes
635 searching for changes
636 changeset: 4:095197eb4973
636 changeset: 4:095197eb4973
637 parent: 0:f9ee2f85a263
637 parent: 0:f9ee2f85a263
638 user: test
638 user: test
639 date: Thu Jan 01 00:00:00 1970 +0000
639 date: Thu Jan 01 00:00:00 1970 +0000
640 summary: 1.1
640 summary: 1.1
641
641
642 changeset: 5:1bb50a9436a7
642 changeset: 5:1bb50a9436a7
643 user: test
643 user: test
644 date: Thu Jan 01 00:00:00 1970 +0000
644 date: Thu Jan 01 00:00:00 1970 +0000
645 summary: 1.2
645 summary: 1.2
646
646
647 changeset: 6:7373c1169842
647 changeset: 6:7373c1169842
648 user: test
648 user: test
649 date: Thu Jan 01 00:00:00 1970 +0000
649 date: Thu Jan 01 00:00:00 1970 +0000
650 summary: 1.3
650 summary: 1.3
651
651
652 changeset: 7:a6a34bfa0076
652 changeset: 7:a6a34bfa0076
653 user: test
653 user: test
654 date: Thu Jan 01 00:00:00 1970 +0000
654 date: Thu Jan 01 00:00:00 1970 +0000
655 summary: 1.3m
655 summary: 1.3m
656
656
657 changeset: 8:aa35859c02ea
657 changeset: 8:aa35859c02ea
658 tag: tip
658 tag: tip
659 parent: 3:eebf5a27f8ca
659 parent: 3:eebf5a27f8ca
660 user: test
660 user: test
661 date: Thu Jan 01 00:00:00 1970 +0000
661 date: Thu Jan 01 00:00:00 1970 +0000
662 summary: 0.3m
662 summary: 0.3m
663
663
664
664
665 Outgoing -R does-not-exist.hg vs partial2 in partial
665 Outgoing -R does-not-exist.hg vs partial2 in partial
666
666
667 $ hg -R bundle://../does-not-exist.hg outgoing ../partial2
667 $ hg -R bundle://../does-not-exist.hg outgoing ../partial2
668 abort: *../does-not-exist.hg* (glob)
668 abort: *../does-not-exist.hg* (glob)
669 [255]
669 [255]
670
670
671 #endif
671 #endif
672
672
673 $ cd ..
673 $ cd ..
674
674
675 hide outer repo
675 hide outer repo
676 $ hg init
676 $ hg init
677
677
678 Direct clone from bundle (all-history)
678 Direct clone from bundle (all-history)
679
679
680 #if repobundlerepo
680 #if repobundlerepo
681
681
682 $ hg clone full.hg full-clone
682 $ hg clone full.hg full-clone
683 requesting all changes
683 requesting all changes
684 adding changesets
684 adding changesets
685 adding manifests
685 adding manifests
686 adding file changes
686 adding file changes
687 added 9 changesets with 7 changes to 4 files (+1 heads)
687 added 9 changesets with 7 changes to 4 files (+1 heads)
688 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
688 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
689 updating to branch default
689 updating to branch default
690 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
690 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
691 $ hg -R full-clone heads
691 $ hg -R full-clone heads
692 changeset: 8:aa35859c02ea
692 changeset: 8:aa35859c02ea
693 tag: tip
693 tag: tip
694 parent: 3:eebf5a27f8ca
694 parent: 3:eebf5a27f8ca
695 user: test
695 user: test
696 date: Thu Jan 01 00:00:00 1970 +0000
696 date: Thu Jan 01 00:00:00 1970 +0000
697 summary: 0.3m
697 summary: 0.3m
698
698
699 changeset: 7:a6a34bfa0076
699 changeset: 7:a6a34bfa0076
700 user: test
700 user: test
701 date: Thu Jan 01 00:00:00 1970 +0000
701 date: Thu Jan 01 00:00:00 1970 +0000
702 summary: 1.3m
702 summary: 1.3m
703
703
704 $ rm -r full-clone
704 $ rm -r full-clone
705
705
706 When cloning from a non-copiable repository into '', do not
706 When cloning from a non-copiable repository into '', do not
707 recurse infinitely (issue2528)
707 recurse infinitely (issue2528)
708
708
709 $ hg clone full.hg ''
709 $ hg clone full.hg ''
710 abort: empty destination path is not valid
710 abort: empty destination path is not valid
711 [10]
711 [10]
712
712
713 test for https://bz.mercurial-scm.org/216
713 test for https://bz.mercurial-scm.org/216
714
714
715 Unbundle incremental bundles into fresh empty in one go
715 Unbundle incremental bundles into fresh empty in one go
716
716
717 $ rm -r empty
717 $ rm -r empty
718 $ hg init empty
718 $ hg init empty
719 $ hg -R test bundle --base null -r 0 ../0.hg
719 $ hg -R test bundle --base null -r 0 ../0.hg
720 1 changesets found
720 1 changesets found
721 $ hg -R test bundle --exact -r 1 ../1.hg
721 $ hg -R test bundle --exact -r 1 ../1.hg
722 1 changesets found
722 1 changesets found
723 $ hg -R empty unbundle -u ../0.hg ../1.hg
723 $ hg -R empty unbundle -u ../0.hg ../1.hg
724 adding changesets
724 adding changesets
725 adding manifests
725 adding manifests
726 adding file changes
726 adding file changes
727 added 1 changesets with 1 changes to 1 files
727 added 1 changesets with 1 changes to 1 files
728 new changesets f9ee2f85a263 (1 drafts)
728 new changesets f9ee2f85a263 (1 drafts)
729 adding changesets
729 adding changesets
730 adding manifests
730 adding manifests
731 adding file changes
731 adding file changes
732 added 1 changesets with 1 changes to 1 files
732 added 1 changesets with 1 changes to 1 files
733 new changesets 34c2bf6b0626 (1 drafts)
733 new changesets 34c2bf6b0626 (1 drafts)
734 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
734 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
735
735
736 View full contents of the bundle
736 View full contents of the bundle
737 $ hg -R test bundle --base null -r 3 ../partial.hg
737 $ hg -R test bundle --base null -r 3 ../partial.hg
738 4 changesets found
738 4 changesets found
739 $ cd test
739 $ cd test
740 $ hg -R ../../partial.hg log -r "bundle()"
740 $ hg -R ../../partial.hg log -r "bundle()"
741 changeset: 0:f9ee2f85a263
741 changeset: 0:f9ee2f85a263
742 user: test
742 user: test
743 date: Thu Jan 01 00:00:00 1970 +0000
743 date: Thu Jan 01 00:00:00 1970 +0000
744 summary: 0.0
744 summary: 0.0
745
745
746 changeset: 1:34c2bf6b0626
746 changeset: 1:34c2bf6b0626
747 user: test
747 user: test
748 date: Thu Jan 01 00:00:00 1970 +0000
748 date: Thu Jan 01 00:00:00 1970 +0000
749 summary: 0.1
749 summary: 0.1
750
750
751 changeset: 2:e38ba6f5b7e0
751 changeset: 2:e38ba6f5b7e0
752 user: test
752 user: test
753 date: Thu Jan 01 00:00:00 1970 +0000
753 date: Thu Jan 01 00:00:00 1970 +0000
754 summary: 0.2
754 summary: 0.2
755
755
756 changeset: 3:eebf5a27f8ca
756 changeset: 3:eebf5a27f8ca
757 user: test
757 user: test
758 date: Thu Jan 01 00:00:00 1970 +0000
758 date: Thu Jan 01 00:00:00 1970 +0000
759 summary: 0.3
759 summary: 0.3
760
760
761 $ cd ..
761 $ cd ..
762
762
763 #endif
763 #endif
764
764
765 test for 540d1059c802
765 test for 540d1059c802
766
766
767 $ hg init orig
767 $ hg init orig
768 $ cd orig
768 $ cd orig
769 $ echo foo > foo
769 $ echo foo > foo
770 $ hg add foo
770 $ hg add foo
771 $ hg ci -m 'add foo'
771 $ hg ci -m 'add foo'
772
772
773 $ hg clone . ../copy
773 $ hg clone . ../copy
774 updating to branch default
774 updating to branch default
775 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
775 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
776 $ hg tag foo
776 $ hg tag foo
777
777
778 $ cd ../copy
778 $ cd ../copy
779 $ echo >> foo
779 $ echo >> foo
780 $ hg ci -m 'change foo'
780 $ hg ci -m 'change foo'
781 $ hg bundle ../bundle.hg ../orig
781 $ hg bundle ../bundle.hg ../orig
782 searching for changes
782 searching for changes
783 1 changesets found
783 1 changesets found
784
784
785 $ cd ..
785 $ cd ..
786
786
787 #if repobundlerepo
787 #if repobundlerepo
788 $ cd orig
788 $ cd orig
789 $ hg incoming ../bundle.hg
789 $ hg incoming ../bundle.hg
790 comparing with ../bundle.hg
790 comparing with ../bundle.hg
791 searching for changes
791 searching for changes
792 changeset: 2:ed1b79f46b9a
792 changeset: 2:ed1b79f46b9a
793 tag: tip
793 tag: tip
794 parent: 0:bbd179dfa0a7
794 parent: 0:bbd179dfa0a7
795 user: test
795 user: test
796 date: Thu Jan 01 00:00:00 1970 +0000
796 date: Thu Jan 01 00:00:00 1970 +0000
797 summary: change foo
797 summary: change foo
798
798
799 $ cd ..
799 $ cd ..
800
800
801 test bundle with # in the filename (issue2154):
801 test bundle with # in the filename (issue2154):
802
802
803 $ cp bundle.hg 'test#bundle.hg'
803 $ cp bundle.hg 'test#bundle.hg'
804 $ cd orig
804 $ cd orig
805 $ hg incoming '../test#bundle.hg'
805 $ hg incoming '../test#bundle.hg'
806 comparing with ../test
806 comparing with ../test
807 abort: unknown revision 'bundle.hg'
807 abort: unknown revision 'bundle.hg'
808 [10]
808 [10]
809
809
810 note that percent encoding is not handled:
810 note that percent encoding is not handled:
811
811
812 $ hg incoming ../test%23bundle.hg
812 $ hg incoming ../test%23bundle.hg
813 abort: repository ../test%23bundle.hg not found
813 abort: repository ../test%23bundle.hg not found
814 [255]
814 [255]
815 $ cd ..
815 $ cd ..
816
816
817 #endif
817 #endif
818
818
819 test to bundle revisions on the newly created branch (issue3828):
819 test to bundle revisions on the newly created branch (issue3828):
820
820
821 $ hg -q clone -U test test-clone
821 $ hg -q clone -U test test-clone
822 $ cd test
822 $ cd test
823
823
824 $ hg -q branch foo
824 $ hg -q branch foo
825 $ hg commit -m "create foo branch"
825 $ hg commit -m "create foo branch"
826 $ hg -q outgoing ../test-clone
826 $ hg -q outgoing ../test-clone
827 9:b4f5acb1ee27
827 9:b4f5acb1ee27
828 $ hg -q bundle --branch foo foo.hg ../test-clone
828 $ hg -q bundle --branch foo foo.hg ../test-clone
829 #if repobundlerepo
829 #if repobundlerepo
830 $ hg -R foo.hg -q log -r "bundle()"
830 $ hg -R foo.hg -q log -r "bundle()"
831 9:b4f5acb1ee27
831 9:b4f5acb1ee27
832 #endif
832 #endif
833
833
834 $ cd ..
834 $ cd ..
835
835
836 test for https://bz.mercurial-scm.org/1144
836 test for https://bz.mercurial-scm.org/1144
837
837
838 test that verify bundle does not traceback
838 test that verify bundle does not traceback
839
839
840 partial history bundle, fails w/ unknown parent
840 partial history bundle, fails w/ unknown parent
841
841
842 $ hg -R bundle.hg verify
842 $ hg -R bundle.hg verify
843 abort: 00changelog@bbd179dfa0a71671c253b3ae0aa1513b60d199fa: unknown parent
843 abort: 00changelog@bbd179dfa0a71671c253b3ae0aa1513b60d199fa: unknown parent
844 [50]
844 [50]
845
845
846 full history bundle, refuses to verify non-local repo
846 full history bundle, refuses to verify non-local repo
847
847
848 #if repobundlerepo
848 #if repobundlerepo
849 $ hg -R all.hg verify
849 $ hg -R all.hg verify
850 abort: cannot verify bundle or remote repos
850 abort: cannot verify bundle or remote repos
851 [255]
851 [255]
852 #endif
852 #endif
853
853
854 but, regular verify must continue to work
854 but, regular verify must continue to work
855
855
856 $ hg -R orig verify
856 $ hg -R orig verify
857 checking changesets
857 checking changesets
858 checking manifests
858 checking manifests
859 crosschecking files in changesets and manifests
859 crosschecking files in changesets and manifests
860 checking files
860 checking files
861 checked 2 changesets with 2 changes to 2 files
861 checked 2 changesets with 2 changes to 2 files
862
862
863 #if repobundlerepo
863 #if repobundlerepo
864 diff against bundle
864 diff against bundle
865
865
866 $ hg init b
866 $ hg init b
867 $ cd b
867 $ cd b
868 $ hg -R ../all.hg diff -r tip
868 $ hg -R ../all.hg diff -r tip
869 diff -r aa35859c02ea anotherfile
869 diff -r aa35859c02ea anotherfile
870 --- a/anotherfile Thu Jan 01 00:00:00 1970 +0000
870 --- a/anotherfile Thu Jan 01 00:00:00 1970 +0000
871 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
871 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
872 @@ -1,4 +0,0 @@
872 @@ -1,4 +0,0 @@
873 -0
873 -0
874 -1
874 -1
875 -2
875 -2
876 -3
876 -3
877 $ cd ..
877 $ cd ..
878 #endif
878 #endif
879
879
880 bundle single branch
880 bundle single branch
881
881
882 $ hg init branchy
882 $ hg init branchy
883 $ cd branchy
883 $ cd branchy
884 $ echo a >a
884 $ echo a >a
885 $ echo x >x
885 $ echo x >x
886 $ hg ci -Ama
886 $ hg ci -Ama
887 adding a
887 adding a
888 adding x
888 adding x
889 $ echo c >c
889 $ echo c >c
890 $ echo xx >x
890 $ echo xx >x
891 $ hg ci -Amc
891 $ hg ci -Amc
892 adding c
892 adding c
893 $ echo c1 >c1
893 $ echo c1 >c1
894 $ hg ci -Amc1
894 $ hg ci -Amc1
895 adding c1
895 adding c1
896 $ hg up 0
896 $ hg up 0
897 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
897 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
898 $ echo b >b
898 $ echo b >b
899 $ hg ci -Amb
899 $ hg ci -Amb
900 adding b
900 adding b
901 created new head
901 created new head
902 $ echo b1 >b1
902 $ echo b1 >b1
903 $ echo xx >x
903 $ echo xx >x
904 $ hg ci -Amb1
904 $ hg ci -Amb1
905 adding b1
905 adding b1
906 $ hg clone -q -r2 . part
906 $ hg clone -q -r2 . part
907
907
908 == bundling via incoming
908 == bundling via incoming
909
909
910 $ hg in -R part --bundle incoming.hg --template "{node}\n" .
910 $ hg in -R part --bundle incoming.hg --template "{node}\n" .
911 comparing with .
911 comparing with .
912 searching for changes
912 searching for changes
913 1a38c1b849e8b70c756d2d80b0b9a3ac0b7ea11a
913 1a38c1b849e8b70c756d2d80b0b9a3ac0b7ea11a
914 057f4db07f61970e1c11e83be79e9d08adc4dc31
914 057f4db07f61970e1c11e83be79e9d08adc4dc31
915
915
916 == bundling
916 == bundling
917
917
918 $ hg bundle bundle.hg part --debug --config progress.debug=true
918 $ hg bundle bundle.hg part --debug --config progress.debug=true
919 query 1; heads
919 query 1; heads
920 searching for changes
920 searching for changes
921 all remote heads known locally
921 all remote heads known locally
922 2 changesets found
922 2 changesets found
923 list of changesets:
923 list of changesets:
924 1a38c1b849e8b70c756d2d80b0b9a3ac0b7ea11a
924 1a38c1b849e8b70c756d2d80b0b9a3ac0b7ea11a
925 057f4db07f61970e1c11e83be79e9d08adc4dc31
925 057f4db07f61970e1c11e83be79e9d08adc4dc31
926 bundle2-output-bundle: "HG20", (1 params) 2 parts total
926 bundle2-output-bundle: "HG20", (1 params) 2 parts total
927 bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
927 bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
928 changesets: 1/2 chunks (50.00%)
928 changesets: 1/2 chunks (50.00%)
929 changesets: 2/2 chunks (100.00%)
929 changesets: 2/2 chunks (100.00%)
930 manifests: 1/2 chunks (50.00%)
930 manifests: 1/2 chunks (50.00%)
931 manifests: 2/2 chunks (100.00%)
931 manifests: 2/2 chunks (100.00%)
932 files: b 1/3 files (33.33%)
932 files: b 1/3 files (33.33%)
933 files: b1 2/3 files (66.67%)
933 files: b1 2/3 files (66.67%)
934 files: x 3/3 files (100.00%)
934 files: x 3/3 files (100.00%)
935 bundle2-output-part: "cache:rev-branch-cache" (advisory) streamed payload
935 bundle2-output-part: "cache:rev-branch-cache" (advisory) streamed payload
936
936
937 #if repobundlerepo
937 #if repobundlerepo
938 == Test for issue3441
938 == Test for issue3441
939
939
940 $ hg clone -q -r0 . part2
940 $ hg clone -q -r0 . part2
941 $ hg -q -R part2 pull bundle.hg
941 $ hg -q -R part2 pull bundle.hg
942 $ hg -R part2 verify
942 $ hg -R part2 verify
943 checking changesets
943 checking changesets
944 checking manifests
944 checking manifests
945 crosschecking files in changesets and manifests
945 crosschecking files in changesets and manifests
946 checking files
946 checking files
947 checked 3 changesets with 5 changes to 4 files
947 checked 3 changesets with 5 changes to 4 files
948 #endif
948 #endif
949
949
950 == Test bundling no commits
950 == Test bundling no commits
951
951
952 $ hg bundle -r 'public()' no-output.hg
952 $ hg bundle -r 'public()' no-output.hg
953 abort: no commits to bundle
953 abort: no commits to bundle
954 [10]
954 [10]
955
955
956 $ cd ..
956 $ cd ..
957
957
958 When user merges to the revision existing only in the bundle,
958 When user merges to the revision existing only in the bundle,
959 it should show warning that second parent of the working
959 it should show warning that second parent of the working
960 directory does not exist
960 directory does not exist
961
961
962 $ hg init update2bundled
962 $ hg init update2bundled
963 $ cd update2bundled
963 $ cd update2bundled
964 $ cat <<EOF >> .hg/hgrc
964 $ cat <<EOF >> .hg/hgrc
965 > [extensions]
965 > [extensions]
966 > strip =
966 > strip =
967 > EOF
967 > EOF
968 $ echo "aaa" >> a
968 $ echo "aaa" >> a
969 $ hg commit -A -m 0
969 $ hg commit -A -m 0
970 adding a
970 adding a
971 $ echo "bbb" >> b
971 $ echo "bbb" >> b
972 $ hg commit -A -m 1
972 $ hg commit -A -m 1
973 adding b
973 adding b
974 $ echo "ccc" >> c
974 $ echo "ccc" >> c
975 $ hg commit -A -m 2
975 $ hg commit -A -m 2
976 adding c
976 adding c
977 $ hg update -r 1
977 $ hg update -r 1
978 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
978 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
979 $ echo "ddd" >> d
979 $ echo "ddd" >> d
980 $ hg commit -A -m 3
980 $ hg commit -A -m 3
981 adding d
981 adding d
982 created new head
982 created new head
983 $ hg update -r 2
983 $ hg update -r 2
984 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
984 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
985 $ hg log -G
985 $ hg log -G
986 o changeset: 3:8bd3e1f196af
986 o changeset: 3:8bd3e1f196af
987 | tag: tip
987 | tag: tip
988 | parent: 1:a01eca7af26d
988 | parent: 1:a01eca7af26d
989 | user: test
989 | user: test
990 | date: Thu Jan 01 00:00:00 1970 +0000
990 | date: Thu Jan 01 00:00:00 1970 +0000
991 | summary: 3
991 | summary: 3
992 |
992 |
993 | @ changeset: 2:4652c276ac4f
993 | @ changeset: 2:4652c276ac4f
994 |/ user: test
994 |/ user: test
995 | date: Thu Jan 01 00:00:00 1970 +0000
995 | date: Thu Jan 01 00:00:00 1970 +0000
996 | summary: 2
996 | summary: 2
997 |
997 |
998 o changeset: 1:a01eca7af26d
998 o changeset: 1:a01eca7af26d
999 | user: test
999 | user: test
1000 | date: Thu Jan 01 00:00:00 1970 +0000
1000 | date: Thu Jan 01 00:00:00 1970 +0000
1001 | summary: 1
1001 | summary: 1
1002 |
1002 |
1003 o changeset: 0:4fe08cd4693e
1003 o changeset: 0:4fe08cd4693e
1004 user: test
1004 user: test
1005 date: Thu Jan 01 00:00:00 1970 +0000
1005 date: Thu Jan 01 00:00:00 1970 +0000
1006 summary: 0
1006 summary: 0
1007
1007
1008
1008
1009 #if repobundlerepo
1009 #if repobundlerepo
1010 $ hg bundle --base 1 -r 3 ../update2bundled.hg
1010 $ hg bundle --base 1 -r 3 ../update2bundled.hg
1011 1 changesets found
1011 1 changesets found
1012 $ hg strip -r 3
1012 $ hg strip -r 3
1013 saved backup bundle to $TESTTMP/update2bundled/.hg/strip-backup/8bd3e1f196af-017e56d8-backup.hg
1013 saved backup bundle to $TESTTMP/update2bundled/.hg/strip-backup/8bd3e1f196af-017e56d8-backup.hg
1014 $ hg merge -R ../update2bundled.hg -r 3
1014 $ hg merge -R ../update2bundled.hg -r 3
1015 setting parent to node 8bd3e1f196af289b2b121be08031e76d7ae92098 that only exists in the bundle
1015 setting parent to node 8bd3e1f196af289b2b121be08031e76d7ae92098 that only exists in the bundle
1016 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1016 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1017 (branch merge, don't forget to commit)
1017 (branch merge, don't forget to commit)
1018
1018
1019 When user updates to the revision existing only in the bundle,
1019 When user updates to the revision existing only in the bundle,
1020 it should show warning
1020 it should show warning
1021
1021
1022 $ hg update -R ../update2bundled.hg --clean -r 3
1022 $ hg update -R ../update2bundled.hg --clean -r 3
1023 setting parent to node 8bd3e1f196af289b2b121be08031e76d7ae92098 that only exists in the bundle
1023 setting parent to node 8bd3e1f196af289b2b121be08031e76d7ae92098 that only exists in the bundle
1024 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
1024 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
1025
1025
1026 When user updates to the revision existing in the local repository
1026 When user updates to the revision existing in the local repository
1027 the warning shouldn't be emitted
1027 the warning shouldn't be emitted
1028
1028
1029 $ hg update -R ../update2bundled.hg -r 0
1029 $ hg update -R ../update2bundled.hg -r 0
1030 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
1030 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
1031 #endif
1031 #endif
1032
1032
1033 Test the option that create slim bundle
1033 Test the option that create slim bundle
1034
1034
1035 $ hg bundle -a --config devel.bundle.delta=p1 ./slim.hg
1035 $ hg bundle -a --config devel.bundle.delta=p1 ./slim.hg
1036 3 changesets found
1036 3 changesets found
1037
1037
1038 Test the option that create and no-delta's bundle
1038 Test the option that create and no-delta's bundle
1039 $ hg bundle -a --config devel.bundle.delta=full ./full.hg
1039 $ hg bundle -a --config devel.bundle.delta=full ./full.hg
1040 3 changesets found
1040 3 changesets found
1041
1041
1042 Test the debug output when applying delta
1042 Test the debug output when applying delta
1043 -----------------------------------------
1043 -----------------------------------------
1044
1044
1045 $ hg init foo
1045 $ hg init foo
1046 $ hg -R foo unbundle ./slim.hg \
1046 $ hg -R foo unbundle ./slim.hg \
1047 > --config debug.revlog.debug-delta=yes \
1047 > --config debug.revlog.debug-delta=yes \
1048 > --config storage.revlog.reuse-external-delta=no \
1048 > --config storage.revlog.reuse-external-delta=no \
1049 > --config storage.revlog.reuse-external-delta-parent=no
1049 > --config storage.revlog.reuse-external-delta-parent=no
1050 adding changesets
1050 adding changesets
1051 DBG-DELTAS: CHANGELOG: rev=0: search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1051 DBG-DELTAS: CHANGELOG: rev=0: delta-base=0 is-cached=0 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1052 DBG-DELTAS: CHANGELOG: rev=1: search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
1052 DBG-DELTAS: CHANGELOG: rev=1: delta-base=1 is-cached=0 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
1053 DBG-DELTAS: CHANGELOG: rev=2: search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
1053 DBG-DELTAS: CHANGELOG: rev=2: delta-base=2 is-cached=0 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
1054 adding manifests
1054 adding manifests
1055 DBG-DELTAS: MANIFESTLOG: rev=0: search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1055 DBG-DELTAS: MANIFESTLOG: rev=0: delta-base=0 is-cached=0 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1056 DBG-DELTAS: MANIFESTLOG: rev=1: search-rounds=1 try-count=1 - delta-type=delta snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
1056 DBG-DELTAS: MANIFESTLOG: rev=1: delta-base=0 is-cached=1 - search-rounds=1 try-count=1 - delta-type=delta snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
1057 DBG-DELTAS: MANIFESTLOG: rev=2: search-rounds=1 try-count=1 - delta-type=delta snap-depth=0 - p1-chain-length=1 p2-chain-length=-1 - duration=* (glob)
1057 DBG-DELTAS: MANIFESTLOG: rev=2: delta-base=1 is-cached=1 - search-rounds=1 try-count=1 - delta-type=delta snap-depth=0 - p1-chain-length=1 p2-chain-length=-1 - duration=* (glob)
1058 adding file changes
1058 adding file changes
1059 DBG-DELTAS: FILELOG:a: rev=0: search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1059 DBG-DELTAS: FILELOG:a: rev=0: delta-base=0 is-cached=0 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1060 DBG-DELTAS: FILELOG:b: rev=0: search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1060 DBG-DELTAS: FILELOG:b: rev=0: delta-base=0 is-cached=0 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1061 DBG-DELTAS: FILELOG:c: rev=0: search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1061 DBG-DELTAS: FILELOG:c: rev=0: delta-base=0 is-cached=0 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1062 added 3 changesets with 3 changes to 3 files
1062 added 3 changesets with 3 changes to 3 files
1063 new changesets 4fe08cd4693e:4652c276ac4f (3 drafts)
1063 new changesets 4fe08cd4693e:4652c276ac4f (3 drafts)
1064 (run 'hg update' to get a working copy)
1064 (run 'hg update' to get a working copy)
1065
1065
@@ -1,346 +1,346 b''
1 ====================================
1 ====================================
2 Test delta choice with sparse revlog
2 Test delta choice with sparse revlog
3 ====================================
3 ====================================
4
4
5 Sparse-revlog usually shows the most gain on Manifest. However, it is simpler
5 Sparse-revlog usually shows the most gain on Manifest. However, it is simpler
6 to general an appropriate file, so we test with a single file instead. The
6 to general an appropriate file, so we test with a single file instead. The
7 goal is to observe intermediate snapshot being created.
7 goal is to observe intermediate snapshot being created.
8
8
9 We need a large enough file. Part of the content needs to be replaced
9 We need a large enough file. Part of the content needs to be replaced
10 repeatedly while some of it changes rarely.
10 repeatedly while some of it changes rarely.
11
11
12 $ bundlepath="$TESTDIR/artifacts/cache/big-file-churn.hg"
12 $ bundlepath="$TESTDIR/artifacts/cache/big-file-churn.hg"
13
13
14 $ expectedhash=`cat "$bundlepath".md5`
14 $ expectedhash=`cat "$bundlepath".md5`
15
15
16 #if slow
16 #if slow
17
17
18 $ if [ ! -f "$bundlepath" ]; then
18 $ if [ ! -f "$bundlepath" ]; then
19 > "$TESTDIR"/artifacts/scripts/generate-churning-bundle.py > /dev/null
19 > "$TESTDIR"/artifacts/scripts/generate-churning-bundle.py > /dev/null
20 > fi
20 > fi
21
21
22 #else
22 #else
23
23
24 $ if [ ! -f "$bundlepath" ]; then
24 $ if [ ! -f "$bundlepath" ]; then
25 > echo 'skipped: missing artifact, run "'"$TESTDIR"'/artifacts/scripts/generate-churning-bundle.py"'
25 > echo 'skipped: missing artifact, run "'"$TESTDIR"'/artifacts/scripts/generate-churning-bundle.py"'
26 > exit 80
26 > exit 80
27 > fi
27 > fi
28
28
29 #endif
29 #endif
30
30
31 $ currenthash=`f -M "$bundlepath" | cut -d = -f 2`
31 $ currenthash=`f -M "$bundlepath" | cut -d = -f 2`
32 $ if [ "$currenthash" != "$expectedhash" ]; then
32 $ if [ "$currenthash" != "$expectedhash" ]; then
33 > echo 'skipped: outdated artifact, md5 "'"$currenthash"'" expected "'"$expectedhash"'" run "'"$TESTDIR"'/artifacts/scripts/generate-churning-bundle.py"'
33 > echo 'skipped: outdated artifact, md5 "'"$currenthash"'" expected "'"$expectedhash"'" run "'"$TESTDIR"'/artifacts/scripts/generate-churning-bundle.py"'
34 > exit 80
34 > exit 80
35 > fi
35 > fi
36
36
37 $ cat >> $HGRCPATH << EOF
37 $ cat >> $HGRCPATH << EOF
38 > [format]
38 > [format]
39 > sparse-revlog = yes
39 > sparse-revlog = yes
40 > maxchainlen = 15
40 > maxchainlen = 15
41 > [storage]
41 > [storage]
42 > revlog.optimize-delta-parent-choice = yes
42 > revlog.optimize-delta-parent-choice = yes
43 > revlog.reuse-external-delta = no
43 > revlog.reuse-external-delta = no
44 > EOF
44 > EOF
45 $ hg init sparse-repo
45 $ hg init sparse-repo
46 $ cd sparse-repo
46 $ cd sparse-repo
47 $ hg unbundle $bundlepath
47 $ hg unbundle $bundlepath
48 adding changesets
48 adding changesets
49 adding manifests
49 adding manifests
50 adding file changes
50 adding file changes
51 added 5001 changesets with 5001 changes to 1 files (+89 heads)
51 added 5001 changesets with 5001 changes to 1 files (+89 heads)
52 new changesets 9706f5af64f4:d9032adc8114 (5001 drafts)
52 new changesets 9706f5af64f4:d9032adc8114 (5001 drafts)
53 (run 'hg heads' to see heads, 'hg merge' to merge)
53 (run 'hg heads' to see heads, 'hg merge' to merge)
54 $ hg up
54 $ hg up
55 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
55 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
56 updated to "d9032adc8114: commit #5000"
56 updated to "d9032adc8114: commit #5000"
57 89 other heads for branch "default"
57 89 other heads for branch "default"
58
58
59 $ hg log --stat -r 0:3
59 $ hg log --stat -r 0:3
60 changeset: 0:9706f5af64f4
60 changeset: 0:9706f5af64f4
61 user: test
61 user: test
62 date: Thu Jan 01 00:00:00 1970 +0000
62 date: Thu Jan 01 00:00:00 1970 +0000
63 summary: initial commit
63 summary: initial commit
64
64
65 SPARSE-REVLOG-TEST-FILE | 10500 ++++++++++++++++++++++++++++++++++++++++++++++
65 SPARSE-REVLOG-TEST-FILE | 10500 ++++++++++++++++++++++++++++++++++++++++++++++
66 1 files changed, 10500 insertions(+), 0 deletions(-)
66 1 files changed, 10500 insertions(+), 0 deletions(-)
67
67
68 changeset: 1:724907deaa5e
68 changeset: 1:724907deaa5e
69 user: test
69 user: test
70 date: Thu Jan 01 00:00:00 1970 +0000
70 date: Thu Jan 01 00:00:00 1970 +0000
71 summary: commit #1
71 summary: commit #1
72
72
73 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
73 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
74 1 files changed, 534 insertions(+), 534 deletions(-)
74 1 files changed, 534 insertions(+), 534 deletions(-)
75
75
76 changeset: 2:62c41bce3e5d
76 changeset: 2:62c41bce3e5d
77 user: test
77 user: test
78 date: Thu Jan 01 00:00:00 1970 +0000
78 date: Thu Jan 01 00:00:00 1970 +0000
79 summary: commit #2
79 summary: commit #2
80
80
81 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
81 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
82 1 files changed, 534 insertions(+), 534 deletions(-)
82 1 files changed, 534 insertions(+), 534 deletions(-)
83
83
84 changeset: 3:348a9cbd6959
84 changeset: 3:348a9cbd6959
85 user: test
85 user: test
86 date: Thu Jan 01 00:00:00 1970 +0000
86 date: Thu Jan 01 00:00:00 1970 +0000
87 summary: commit #3
87 summary: commit #3
88
88
89 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
89 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
90 1 files changed, 534 insertions(+), 534 deletions(-)
90 1 files changed, 534 insertions(+), 534 deletions(-)
91
91
92
92
93 $ f -s .hg/store/data/*.d
93 $ f -s .hg/store/data/*.d
94 .hg/store/data/_s_p_a_r_s_e-_r_e_v_l_o_g-_t_e_s_t-_f_i_l_e.d: size=58616973
94 .hg/store/data/_s_p_a_r_s_e-_r_e_v_l_o_g-_t_e_s_t-_f_i_l_e.d: size=58616973
95 $ hg debugrevlog *
95 $ hg debugrevlog *
96 format : 1
96 format : 1
97 flags : generaldelta
97 flags : generaldelta
98
98
99 revisions : 5001
99 revisions : 5001
100 merges : 625 (12.50%)
100 merges : 625 (12.50%)
101 normal : 4376 (87.50%)
101 normal : 4376 (87.50%)
102 revisions : 5001
102 revisions : 5001
103 empty : 0 ( 0.00%)
103 empty : 0 ( 0.00%)
104 text : 0 (100.00%)
104 text : 0 (100.00%)
105 delta : 0 (100.00%)
105 delta : 0 (100.00%)
106 snapshot : 383 ( 7.66%)
106 snapshot : 383 ( 7.66%)
107 lvl-0 : 3 ( 0.06%)
107 lvl-0 : 3 ( 0.06%)
108 lvl-1 : 18 ( 0.36%)
108 lvl-1 : 18 ( 0.36%)
109 lvl-2 : 62 ( 1.24%)
109 lvl-2 : 62 ( 1.24%)
110 lvl-3 : 108 ( 2.16%)
110 lvl-3 : 108 ( 2.16%)
111 lvl-4 : 191 ( 3.82%)
111 lvl-4 : 191 ( 3.82%)
112 lvl-5 : 1 ( 0.02%)
112 lvl-5 : 1 ( 0.02%)
113 deltas : 4618 (92.34%)
113 deltas : 4618 (92.34%)
114 revision size : 58616973
114 revision size : 58616973
115 snapshot : 9247844 (15.78%)
115 snapshot : 9247844 (15.78%)
116 lvl-0 : 539532 ( 0.92%)
116 lvl-0 : 539532 ( 0.92%)
117 lvl-1 : 1467743 ( 2.50%)
117 lvl-1 : 1467743 ( 2.50%)
118 lvl-2 : 1873820 ( 3.20%)
118 lvl-2 : 1873820 ( 3.20%)
119 lvl-3 : 2326874 ( 3.97%)
119 lvl-3 : 2326874 ( 3.97%)
120 lvl-4 : 3029118 ( 5.17%)
120 lvl-4 : 3029118 ( 5.17%)
121 lvl-5 : 10757 ( 0.02%)
121 lvl-5 : 10757 ( 0.02%)
122 deltas : 49369129 (84.22%)
122 deltas : 49369129 (84.22%)
123
123
124 chunks : 5001
124 chunks : 5001
125 0x28 : 5001 (100.00%)
125 0x28 : 5001 (100.00%)
126 chunks size : 58616973
126 chunks size : 58616973
127 0x28 : 58616973 (100.00%)
127 0x28 : 58616973 (100.00%)
128
128
129 avg chain length : 9
129 avg chain length : 9
130 max chain length : 15
130 max chain length : 15
131 max chain reach : 27366701
131 max chain reach : 27366701
132 compression ratio : 29
132 compression ratio : 29
133
133
134 uncompressed data size (min/max/avg) : 346468 / 346472 / 346471
134 uncompressed data size (min/max/avg) : 346468 / 346472 / 346471
135 full revision size (min/max/avg) : 179288 / 180786 / 179844
135 full revision size (min/max/avg) : 179288 / 180786 / 179844
136 inter-snapshot size (min/max/avg) : 10757 / 169507 / 22916
136 inter-snapshot size (min/max/avg) : 10757 / 169507 / 22916
137 level-1 (min/max/avg) : 13905 / 169507 / 81541
137 level-1 (min/max/avg) : 13905 / 169507 / 81541
138 level-2 (min/max/avg) : 10887 / 83873 / 30222
138 level-2 (min/max/avg) : 10887 / 83873 / 30222
139 level-3 (min/max/avg) : 10911 / 43047 / 21545
139 level-3 (min/max/avg) : 10911 / 43047 / 21545
140 level-4 (min/max/avg) : 10838 / 21390 / 15859
140 level-4 (min/max/avg) : 10838 / 21390 / 15859
141 level-5 (min/max/avg) : 10757 / 10757 / 10757
141 level-5 (min/max/avg) : 10757 / 10757 / 10757
142 delta size (min/max/avg) : 9672 / 108072 / 10690
142 delta size (min/max/avg) : 9672 / 108072 / 10690
143
143
144 deltas against prev : 3906 (84.58%)
144 deltas against prev : 3906 (84.58%)
145 where prev = p1 : 3906 (100.00%)
145 where prev = p1 : 3906 (100.00%)
146 where prev = p2 : 0 ( 0.00%)
146 where prev = p2 : 0 ( 0.00%)
147 other : 0 ( 0.00%)
147 other : 0 ( 0.00%)
148 deltas against p1 : 649 (14.05%)
148 deltas against p1 : 649 (14.05%)
149 deltas against p2 : 63 ( 1.36%)
149 deltas against p2 : 63 ( 1.36%)
150 deltas against other : 0 ( 0.00%)
150 deltas against other : 0 ( 0.00%)
151
151
152
152
153 Test `debug-delta-find`
153 Test `debug-delta-find`
154 -----------------------
154 -----------------------
155
155
156 $ ls -1
156 $ ls -1
157 SPARSE-REVLOG-TEST-FILE
157 SPARSE-REVLOG-TEST-FILE
158 $ hg debugdeltachain SPARSE-REVLOG-TEST-FILE | grep snap | tail -1
158 $ hg debugdeltachain SPARSE-REVLOG-TEST-FILE | grep snap | tail -1
159 4971 4970 -1 3 5 4930 snap 19179 346472 427596 1.23414 15994877 15567281 36.40652 427596 179288 1.00000 5
159 4971 4970 -1 3 5 4930 snap 19179 346472 427596 1.23414 15994877 15567281 36.40652 427596 179288 1.00000 5
160 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971
160 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971
161 DBG-DELTAS-SEARCH: SEARCH rev=4971
161 DBG-DELTAS-SEARCH: SEARCH rev=4971
162 DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
162 DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
163 DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
163 DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
164 DBG-DELTAS-SEARCH: type=snapshot-4
164 DBG-DELTAS-SEARCH: type=snapshot-4
165 DBG-DELTAS-SEARCH: size=18296
165 DBG-DELTAS-SEARCH: size=18296
166 DBG-DELTAS-SEARCH: base=4930
166 DBG-DELTAS-SEARCH: base=4930
167 DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
167 DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
168 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
168 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
169 DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
169 DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
170 DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
170 DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
171 DBG-DELTAS-SEARCH: type=snapshot-4
171 DBG-DELTAS-SEARCH: type=snapshot-4
172 DBG-DELTAS-SEARCH: size=19179
172 DBG-DELTAS-SEARCH: size=19179
173 DBG-DELTAS-SEARCH: base=4930
173 DBG-DELTAS-SEARCH: base=4930
174 DBG-DELTAS-SEARCH: TOO-HIGH
174 DBG-DELTAS-SEARCH: TOO-HIGH
175 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
175 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
176 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
176 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
177 DBG-DELTAS-SEARCH: type=snapshot-3
177 DBG-DELTAS-SEARCH: type=snapshot-3
178 DBG-DELTAS-SEARCH: size=39228
178 DBG-DELTAS-SEARCH: size=39228
179 DBG-DELTAS-SEARCH: base=4799
179 DBG-DELTAS-SEARCH: base=4799
180 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
180 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
181 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
181 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
182 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
182 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
183 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
183 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
184 DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
184 DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
185 DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
185 DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
186 DBG-DELTAS-SEARCH: type=snapshot-2
186 DBG-DELTAS-SEARCH: type=snapshot-2
187 DBG-DELTAS-SEARCH: size=50213
187 DBG-DELTAS-SEARCH: size=50213
188 DBG-DELTAS-SEARCH: base=4623
188 DBG-DELTAS-SEARCH: base=4623
189 DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
189 DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
190 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
190 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
191 DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
191 DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
192 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
192 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: delta-base=4930 is-cached=0 - search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
193
193
194 $ cat << EOF >>.hg/hgrc
194 $ cat << EOF >>.hg/hgrc
195 > [storage]
195 > [storage]
196 > revlog.optimize-delta-parent-choice = no
196 > revlog.optimize-delta-parent-choice = no
197 > revlog.reuse-external-delta = yes
197 > revlog.reuse-external-delta = yes
198 > EOF
198 > EOF
199
199
200 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --quiet
200 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --quiet
201 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
201 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: delta-base=4930 is-cached=0 - search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
202 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source full
202 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source full
203 DBG-DELTAS-SEARCH: SEARCH rev=4971
203 DBG-DELTAS-SEARCH: SEARCH rev=4971
204 DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
204 DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
205 DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
205 DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
206 DBG-DELTAS-SEARCH: type=snapshot-4
206 DBG-DELTAS-SEARCH: type=snapshot-4
207 DBG-DELTAS-SEARCH: size=18296
207 DBG-DELTAS-SEARCH: size=18296
208 DBG-DELTAS-SEARCH: base=4930
208 DBG-DELTAS-SEARCH: base=4930
209 DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
209 DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
210 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
210 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
211 DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
211 DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
212 DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
212 DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
213 DBG-DELTAS-SEARCH: type=snapshot-4
213 DBG-DELTAS-SEARCH: type=snapshot-4
214 DBG-DELTAS-SEARCH: size=19179
214 DBG-DELTAS-SEARCH: size=19179
215 DBG-DELTAS-SEARCH: base=4930
215 DBG-DELTAS-SEARCH: base=4930
216 DBG-DELTAS-SEARCH: TOO-HIGH
216 DBG-DELTAS-SEARCH: TOO-HIGH
217 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
217 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
218 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
218 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
219 DBG-DELTAS-SEARCH: type=snapshot-3
219 DBG-DELTAS-SEARCH: type=snapshot-3
220 DBG-DELTAS-SEARCH: size=39228
220 DBG-DELTAS-SEARCH: size=39228
221 DBG-DELTAS-SEARCH: base=4799
221 DBG-DELTAS-SEARCH: base=4799
222 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
222 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
223 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
223 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
224 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
224 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
225 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
225 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
226 DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
226 DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
227 DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
227 DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
228 DBG-DELTAS-SEARCH: type=snapshot-2
228 DBG-DELTAS-SEARCH: type=snapshot-2
229 DBG-DELTAS-SEARCH: size=50213
229 DBG-DELTAS-SEARCH: size=50213
230 DBG-DELTAS-SEARCH: base=4623
230 DBG-DELTAS-SEARCH: base=4623
231 DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
231 DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
232 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
232 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
233 DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
233 DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
234 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
234 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: delta-base=4930 is-cached=0 - search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
235 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source storage
235 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source storage
236 DBG-DELTAS-SEARCH: SEARCH rev=4971
236 DBG-DELTAS-SEARCH: SEARCH rev=4971
237 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
237 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - cached-delta
238 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
238 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
239 DBG-DELTAS-SEARCH: type=snapshot-3
239 DBG-DELTAS-SEARCH: type=snapshot-3
240 DBG-DELTAS-SEARCH: size=39228
240 DBG-DELTAS-SEARCH: size=39228
241 DBG-DELTAS-SEARCH: base=4799
241 DBG-DELTAS-SEARCH: base=4799
242 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
242 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
243 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
243 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
244 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
244 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
245 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=1 try-count=1 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
245 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: delta-base=4930 is-cached=1 - search-rounds=1 try-count=1 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
246 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source p1
246 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source p1
247 DBG-DELTAS-SEARCH: SEARCH rev=4971
247 DBG-DELTAS-SEARCH: SEARCH rev=4971
248 DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
248 DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
249 DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
249 DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
250 DBG-DELTAS-SEARCH: type=snapshot-4
250 DBG-DELTAS-SEARCH: type=snapshot-4
251 DBG-DELTAS-SEARCH: size=18296
251 DBG-DELTAS-SEARCH: size=18296
252 DBG-DELTAS-SEARCH: base=4930
252 DBG-DELTAS-SEARCH: base=4930
253 DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
253 DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
254 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
254 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
255 DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
255 DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
256 DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
256 DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
257 DBG-DELTAS-SEARCH: type=snapshot-4
257 DBG-DELTAS-SEARCH: type=snapshot-4
258 DBG-DELTAS-SEARCH: size=19179
258 DBG-DELTAS-SEARCH: size=19179
259 DBG-DELTAS-SEARCH: base=4930
259 DBG-DELTAS-SEARCH: base=4930
260 DBG-DELTAS-SEARCH: TOO-HIGH
260 DBG-DELTAS-SEARCH: TOO-HIGH
261 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
261 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
262 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
262 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
263 DBG-DELTAS-SEARCH: type=snapshot-3
263 DBG-DELTAS-SEARCH: type=snapshot-3
264 DBG-DELTAS-SEARCH: size=39228
264 DBG-DELTAS-SEARCH: size=39228
265 DBG-DELTAS-SEARCH: base=4799
265 DBG-DELTAS-SEARCH: base=4799
266 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
266 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
267 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
267 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
268 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
268 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
269 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
269 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
270 DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
270 DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
271 DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
271 DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
272 DBG-DELTAS-SEARCH: type=snapshot-2
272 DBG-DELTAS-SEARCH: type=snapshot-2
273 DBG-DELTAS-SEARCH: size=50213
273 DBG-DELTAS-SEARCH: size=50213
274 DBG-DELTAS-SEARCH: base=4623
274 DBG-DELTAS-SEARCH: base=4623
275 DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
275 DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
276 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
276 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
277 DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
277 DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
278 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
278 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: delta-base=4930 is-cached=0 - search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
279 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source p2
279 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source p2
280 DBG-DELTAS-SEARCH: SEARCH rev=4971
280 DBG-DELTAS-SEARCH: SEARCH rev=4971
281 DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
281 DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
282 DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
282 DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
283 DBG-DELTAS-SEARCH: type=snapshot-4
283 DBG-DELTAS-SEARCH: type=snapshot-4
284 DBG-DELTAS-SEARCH: size=18296
284 DBG-DELTAS-SEARCH: size=18296
285 DBG-DELTAS-SEARCH: base=4930
285 DBG-DELTAS-SEARCH: base=4930
286 DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
286 DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
287 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
287 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
288 DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
288 DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
289 DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
289 DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
290 DBG-DELTAS-SEARCH: type=snapshot-4
290 DBG-DELTAS-SEARCH: type=snapshot-4
291 DBG-DELTAS-SEARCH: size=19179
291 DBG-DELTAS-SEARCH: size=19179
292 DBG-DELTAS-SEARCH: base=4930
292 DBG-DELTAS-SEARCH: base=4930
293 DBG-DELTAS-SEARCH: TOO-HIGH
293 DBG-DELTAS-SEARCH: TOO-HIGH
294 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
294 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
295 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
295 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
296 DBG-DELTAS-SEARCH: type=snapshot-3
296 DBG-DELTAS-SEARCH: type=snapshot-3
297 DBG-DELTAS-SEARCH: size=39228
297 DBG-DELTAS-SEARCH: size=39228
298 DBG-DELTAS-SEARCH: base=4799
298 DBG-DELTAS-SEARCH: base=4799
299 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
299 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
300 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
300 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
301 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
301 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
302 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
302 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
303 DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
303 DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
304 DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
304 DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
305 DBG-DELTAS-SEARCH: type=snapshot-2
305 DBG-DELTAS-SEARCH: type=snapshot-2
306 DBG-DELTAS-SEARCH: size=50213
306 DBG-DELTAS-SEARCH: size=50213
307 DBG-DELTAS-SEARCH: base=4623
307 DBG-DELTAS-SEARCH: base=4623
308 DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
308 DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
309 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
309 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
310 DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
310 DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
311 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
311 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: delta-base=4930 is-cached=0 - search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
312 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source prev
312 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source prev
313 DBG-DELTAS-SEARCH: SEARCH rev=4971
313 DBG-DELTAS-SEARCH: SEARCH rev=4971
314 DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
314 DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
315 DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
315 DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
316 DBG-DELTAS-SEARCH: type=snapshot-4
316 DBG-DELTAS-SEARCH: type=snapshot-4
317 DBG-DELTAS-SEARCH: size=18296
317 DBG-DELTAS-SEARCH: size=18296
318 DBG-DELTAS-SEARCH: base=4930
318 DBG-DELTAS-SEARCH: base=4930
319 DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
319 DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
320 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
320 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
321 DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
321 DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
322 DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
322 DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
323 DBG-DELTAS-SEARCH: type=snapshot-4
323 DBG-DELTAS-SEARCH: type=snapshot-4
324 DBG-DELTAS-SEARCH: size=19179
324 DBG-DELTAS-SEARCH: size=19179
325 DBG-DELTAS-SEARCH: base=4930
325 DBG-DELTAS-SEARCH: base=4930
326 DBG-DELTAS-SEARCH: TOO-HIGH
326 DBG-DELTAS-SEARCH: TOO-HIGH
327 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
327 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
328 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
328 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
329 DBG-DELTAS-SEARCH: type=snapshot-3
329 DBG-DELTAS-SEARCH: type=snapshot-3
330 DBG-DELTAS-SEARCH: size=39228
330 DBG-DELTAS-SEARCH: size=39228
331 DBG-DELTAS-SEARCH: base=4799
331 DBG-DELTAS-SEARCH: base=4799
332 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
332 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
333 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
333 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
334 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
334 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
335 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
335 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
336 DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
336 DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
337 DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
337 DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
338 DBG-DELTAS-SEARCH: type=snapshot-2
338 DBG-DELTAS-SEARCH: type=snapshot-2
339 DBG-DELTAS-SEARCH: size=50213
339 DBG-DELTAS-SEARCH: size=50213
340 DBG-DELTAS-SEARCH: base=4623
340 DBG-DELTAS-SEARCH: base=4623
341 DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
341 DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
342 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
342 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
343 DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
343 DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
344 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
344 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: delta-base=4930 is-cached=0 - search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
345
345
346 $ cd ..
346 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now