##// END OF EJS Templates
delta-find: properly report full snapshot used from cache as such...
marmoute -
r50676:d57b966c default
parent child Browse files
Show More
@@ -1,1494 +1,1502 b''
1 # revlogdeltas.py - Logic around delta computation for revlog
1 # revlogdeltas.py - Logic around delta computation for revlog
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 # Copyright 2018 Octobus <contact@octobus.net>
4 # Copyright 2018 Octobus <contact@octobus.net>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8 """Helper class to compute deltas stored inside revlogs"""
8 """Helper class to compute deltas stored inside revlogs"""
9
9
10
10
11 import collections
11 import collections
12 import struct
12 import struct
13
13
14 # import stuff from node for others to import from revlog
14 # import stuff from node for others to import from revlog
15 from ..node import nullrev
15 from ..node import nullrev
16 from ..i18n import _
16 from ..i18n import _
17 from ..pycompat import getattr
17 from ..pycompat import getattr
18
18
19 from .constants import (
19 from .constants import (
20 COMP_MODE_DEFAULT,
20 COMP_MODE_DEFAULT,
21 COMP_MODE_INLINE,
21 COMP_MODE_INLINE,
22 COMP_MODE_PLAIN,
22 COMP_MODE_PLAIN,
23 DELTA_BASE_REUSE_FORCE,
23 DELTA_BASE_REUSE_FORCE,
24 DELTA_BASE_REUSE_NO,
24 DELTA_BASE_REUSE_NO,
25 KIND_CHANGELOG,
25 KIND_CHANGELOG,
26 KIND_FILELOG,
26 KIND_FILELOG,
27 KIND_MANIFESTLOG,
27 KIND_MANIFESTLOG,
28 REVIDX_ISCENSORED,
28 REVIDX_ISCENSORED,
29 REVIDX_RAWTEXT_CHANGING_FLAGS,
29 REVIDX_RAWTEXT_CHANGING_FLAGS,
30 )
30 )
31
31
32 from ..thirdparty import attr
32 from ..thirdparty import attr
33
33
34 from .. import (
34 from .. import (
35 error,
35 error,
36 mdiff,
36 mdiff,
37 util,
37 util,
38 )
38 )
39
39
40 from . import flagutil
40 from . import flagutil
41
41
42 # maximum <delta-chain-data>/<revision-text-length> ratio
42 # maximum <delta-chain-data>/<revision-text-length> ratio
43 LIMIT_DELTA2TEXT = 2
43 LIMIT_DELTA2TEXT = 2
44
44
45
45
46 class _testrevlog:
46 class _testrevlog:
47 """minimalist fake revlog to use in doctests"""
47 """minimalist fake revlog to use in doctests"""
48
48
49 def __init__(self, data, density=0.5, mingap=0, snapshot=()):
49 def __init__(self, data, density=0.5, mingap=0, snapshot=()):
50 """data is an list of revision payload boundaries"""
50 """data is an list of revision payload boundaries"""
51 self._data = data
51 self._data = data
52 self._srdensitythreshold = density
52 self._srdensitythreshold = density
53 self._srmingapsize = mingap
53 self._srmingapsize = mingap
54 self._snapshot = set(snapshot)
54 self._snapshot = set(snapshot)
55 self.index = None
55 self.index = None
56
56
57 def start(self, rev):
57 def start(self, rev):
58 if rev == nullrev:
58 if rev == nullrev:
59 return 0
59 return 0
60 if rev == 0:
60 if rev == 0:
61 return 0
61 return 0
62 return self._data[rev - 1]
62 return self._data[rev - 1]
63
63
64 def end(self, rev):
64 def end(self, rev):
65 if rev == nullrev:
65 if rev == nullrev:
66 return 0
66 return 0
67 return self._data[rev]
67 return self._data[rev]
68
68
69 def length(self, rev):
69 def length(self, rev):
70 return self.end(rev) - self.start(rev)
70 return self.end(rev) - self.start(rev)
71
71
72 def __len__(self):
72 def __len__(self):
73 return len(self._data)
73 return len(self._data)
74
74
75 def issnapshot(self, rev):
75 def issnapshot(self, rev):
76 if rev == nullrev:
76 if rev == nullrev:
77 return True
77 return True
78 return rev in self._snapshot
78 return rev in self._snapshot
79
79
80
80
81 def slicechunk(revlog, revs, targetsize=None):
81 def slicechunk(revlog, revs, targetsize=None):
82 """slice revs to reduce the amount of unrelated data to be read from disk.
82 """slice revs to reduce the amount of unrelated data to be read from disk.
83
83
84 ``revs`` is sliced into groups that should be read in one time.
84 ``revs`` is sliced into groups that should be read in one time.
85 Assume that revs are sorted.
85 Assume that revs are sorted.
86
86
87 The initial chunk is sliced until the overall density (payload/chunks-span
87 The initial chunk is sliced until the overall density (payload/chunks-span
88 ratio) is above `revlog._srdensitythreshold`. No gap smaller than
88 ratio) is above `revlog._srdensitythreshold`. No gap smaller than
89 `revlog._srmingapsize` is skipped.
89 `revlog._srmingapsize` is skipped.
90
90
91 If `targetsize` is set, no chunk larger than `targetsize` will be yield.
91 If `targetsize` is set, no chunk larger than `targetsize` will be yield.
92 For consistency with other slicing choice, this limit won't go lower than
92 For consistency with other slicing choice, this limit won't go lower than
93 `revlog._srmingapsize`.
93 `revlog._srmingapsize`.
94
94
95 If individual revisions chunk are larger than this limit, they will still
95 If individual revisions chunk are larger than this limit, they will still
96 be raised individually.
96 be raised individually.
97
97
98 >>> data = [
98 >>> data = [
99 ... 5, #00 (5)
99 ... 5, #00 (5)
100 ... 10, #01 (5)
100 ... 10, #01 (5)
101 ... 12, #02 (2)
101 ... 12, #02 (2)
102 ... 12, #03 (empty)
102 ... 12, #03 (empty)
103 ... 27, #04 (15)
103 ... 27, #04 (15)
104 ... 31, #05 (4)
104 ... 31, #05 (4)
105 ... 31, #06 (empty)
105 ... 31, #06 (empty)
106 ... 42, #07 (11)
106 ... 42, #07 (11)
107 ... 47, #08 (5)
107 ... 47, #08 (5)
108 ... 47, #09 (empty)
108 ... 47, #09 (empty)
109 ... 48, #10 (1)
109 ... 48, #10 (1)
110 ... 51, #11 (3)
110 ... 51, #11 (3)
111 ... 74, #12 (23)
111 ... 74, #12 (23)
112 ... 85, #13 (11)
112 ... 85, #13 (11)
113 ... 86, #14 (1)
113 ... 86, #14 (1)
114 ... 91, #15 (5)
114 ... 91, #15 (5)
115 ... ]
115 ... ]
116 >>> revlog = _testrevlog(data, snapshot=range(16))
116 >>> revlog = _testrevlog(data, snapshot=range(16))
117
117
118 >>> list(slicechunk(revlog, list(range(16))))
118 >>> list(slicechunk(revlog, list(range(16))))
119 [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]]
119 [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]]
120 >>> list(slicechunk(revlog, [0, 15]))
120 >>> list(slicechunk(revlog, [0, 15]))
121 [[0], [15]]
121 [[0], [15]]
122 >>> list(slicechunk(revlog, [0, 11, 15]))
122 >>> list(slicechunk(revlog, [0, 11, 15]))
123 [[0], [11], [15]]
123 [[0], [11], [15]]
124 >>> list(slicechunk(revlog, [0, 11, 13, 15]))
124 >>> list(slicechunk(revlog, [0, 11, 13, 15]))
125 [[0], [11, 13, 15]]
125 [[0], [11, 13, 15]]
126 >>> list(slicechunk(revlog, [1, 2, 3, 5, 8, 10, 11, 14]))
126 >>> list(slicechunk(revlog, [1, 2, 3, 5, 8, 10, 11, 14]))
127 [[1, 2], [5, 8, 10, 11], [14]]
127 [[1, 2], [5, 8, 10, 11], [14]]
128
128
129 Slicing with a maximum chunk size
129 Slicing with a maximum chunk size
130 >>> list(slicechunk(revlog, [0, 11, 13, 15], targetsize=15))
130 >>> list(slicechunk(revlog, [0, 11, 13, 15], targetsize=15))
131 [[0], [11], [13], [15]]
131 [[0], [11], [13], [15]]
132 >>> list(slicechunk(revlog, [0, 11, 13, 15], targetsize=20))
132 >>> list(slicechunk(revlog, [0, 11, 13, 15], targetsize=20))
133 [[0], [11], [13, 15]]
133 [[0], [11], [13, 15]]
134
134
135 Slicing involving nullrev
135 Slicing involving nullrev
136 >>> list(slicechunk(revlog, [-1, 0, 11, 13, 15], targetsize=20))
136 >>> list(slicechunk(revlog, [-1, 0, 11, 13, 15], targetsize=20))
137 [[-1, 0], [11], [13, 15]]
137 [[-1, 0], [11], [13, 15]]
138 >>> list(slicechunk(revlog, [-1, 13, 15], targetsize=5))
138 >>> list(slicechunk(revlog, [-1, 13, 15], targetsize=5))
139 [[-1], [13], [15]]
139 [[-1], [13], [15]]
140 """
140 """
141 if targetsize is not None:
141 if targetsize is not None:
142 targetsize = max(targetsize, revlog._srmingapsize)
142 targetsize = max(targetsize, revlog._srmingapsize)
143 # targetsize should not be specified when evaluating delta candidates:
143 # targetsize should not be specified when evaluating delta candidates:
144 # * targetsize is used to ensure we stay within specification when reading,
144 # * targetsize is used to ensure we stay within specification when reading,
145 densityslicing = getattr(revlog.index, 'slicechunktodensity', None)
145 densityslicing = getattr(revlog.index, 'slicechunktodensity', None)
146 if densityslicing is None:
146 if densityslicing is None:
147 densityslicing = lambda x, y, z: _slicechunktodensity(revlog, x, y, z)
147 densityslicing = lambda x, y, z: _slicechunktodensity(revlog, x, y, z)
148 for chunk in densityslicing(
148 for chunk in densityslicing(
149 revs, revlog._srdensitythreshold, revlog._srmingapsize
149 revs, revlog._srdensitythreshold, revlog._srmingapsize
150 ):
150 ):
151 for subchunk in _slicechunktosize(revlog, chunk, targetsize):
151 for subchunk in _slicechunktosize(revlog, chunk, targetsize):
152 yield subchunk
152 yield subchunk
153
153
154
154
155 def _slicechunktosize(revlog, revs, targetsize=None):
155 def _slicechunktosize(revlog, revs, targetsize=None):
156 """slice revs to match the target size
156 """slice revs to match the target size
157
157
158 This is intended to be used on chunk that density slicing selected by that
158 This is intended to be used on chunk that density slicing selected by that
159 are still too large compared to the read garantee of revlog. This might
159 are still too large compared to the read garantee of revlog. This might
160 happens when "minimal gap size" interrupted the slicing or when chain are
160 happens when "minimal gap size" interrupted the slicing or when chain are
161 built in a way that create large blocks next to each other.
161 built in a way that create large blocks next to each other.
162
162
163 >>> data = [
163 >>> data = [
164 ... 3, #0 (3)
164 ... 3, #0 (3)
165 ... 5, #1 (2)
165 ... 5, #1 (2)
166 ... 6, #2 (1)
166 ... 6, #2 (1)
167 ... 8, #3 (2)
167 ... 8, #3 (2)
168 ... 8, #4 (empty)
168 ... 8, #4 (empty)
169 ... 11, #5 (3)
169 ... 11, #5 (3)
170 ... 12, #6 (1)
170 ... 12, #6 (1)
171 ... 13, #7 (1)
171 ... 13, #7 (1)
172 ... 14, #8 (1)
172 ... 14, #8 (1)
173 ... ]
173 ... ]
174
174
175 == All snapshots cases ==
175 == All snapshots cases ==
176 >>> revlog = _testrevlog(data, snapshot=range(9))
176 >>> revlog = _testrevlog(data, snapshot=range(9))
177
177
178 Cases where chunk is already small enough
178 Cases where chunk is already small enough
179 >>> list(_slicechunktosize(revlog, [0], 3))
179 >>> list(_slicechunktosize(revlog, [0], 3))
180 [[0]]
180 [[0]]
181 >>> list(_slicechunktosize(revlog, [6, 7], 3))
181 >>> list(_slicechunktosize(revlog, [6, 7], 3))
182 [[6, 7]]
182 [[6, 7]]
183 >>> list(_slicechunktosize(revlog, [0], None))
183 >>> list(_slicechunktosize(revlog, [0], None))
184 [[0]]
184 [[0]]
185 >>> list(_slicechunktosize(revlog, [6, 7], None))
185 >>> list(_slicechunktosize(revlog, [6, 7], None))
186 [[6, 7]]
186 [[6, 7]]
187
187
188 cases where we need actual slicing
188 cases where we need actual slicing
189 >>> list(_slicechunktosize(revlog, [0, 1], 3))
189 >>> list(_slicechunktosize(revlog, [0, 1], 3))
190 [[0], [1]]
190 [[0], [1]]
191 >>> list(_slicechunktosize(revlog, [1, 3], 3))
191 >>> list(_slicechunktosize(revlog, [1, 3], 3))
192 [[1], [3]]
192 [[1], [3]]
193 >>> list(_slicechunktosize(revlog, [1, 2, 3], 3))
193 >>> list(_slicechunktosize(revlog, [1, 2, 3], 3))
194 [[1, 2], [3]]
194 [[1, 2], [3]]
195 >>> list(_slicechunktosize(revlog, [3, 5], 3))
195 >>> list(_slicechunktosize(revlog, [3, 5], 3))
196 [[3], [5]]
196 [[3], [5]]
197 >>> list(_slicechunktosize(revlog, [3, 4, 5], 3))
197 >>> list(_slicechunktosize(revlog, [3, 4, 5], 3))
198 [[3], [5]]
198 [[3], [5]]
199 >>> list(_slicechunktosize(revlog, [5, 6, 7, 8], 3))
199 >>> list(_slicechunktosize(revlog, [5, 6, 7, 8], 3))
200 [[5], [6, 7, 8]]
200 [[5], [6, 7, 8]]
201 >>> list(_slicechunktosize(revlog, [0, 1, 2, 3, 4, 5, 6, 7, 8], 3))
201 >>> list(_slicechunktosize(revlog, [0, 1, 2, 3, 4, 5, 6, 7, 8], 3))
202 [[0], [1, 2], [3], [5], [6, 7, 8]]
202 [[0], [1, 2], [3], [5], [6, 7, 8]]
203
203
204 Case with too large individual chunk (must return valid chunk)
204 Case with too large individual chunk (must return valid chunk)
205 >>> list(_slicechunktosize(revlog, [0, 1], 2))
205 >>> list(_slicechunktosize(revlog, [0, 1], 2))
206 [[0], [1]]
206 [[0], [1]]
207 >>> list(_slicechunktosize(revlog, [1, 3], 1))
207 >>> list(_slicechunktosize(revlog, [1, 3], 1))
208 [[1], [3]]
208 [[1], [3]]
209 >>> list(_slicechunktosize(revlog, [3, 4, 5], 2))
209 >>> list(_slicechunktosize(revlog, [3, 4, 5], 2))
210 [[3], [5]]
210 [[3], [5]]
211
211
212 == No Snapshot cases ==
212 == No Snapshot cases ==
213 >>> revlog = _testrevlog(data)
213 >>> revlog = _testrevlog(data)
214
214
215 Cases where chunk is already small enough
215 Cases where chunk is already small enough
216 >>> list(_slicechunktosize(revlog, [0], 3))
216 >>> list(_slicechunktosize(revlog, [0], 3))
217 [[0]]
217 [[0]]
218 >>> list(_slicechunktosize(revlog, [6, 7], 3))
218 >>> list(_slicechunktosize(revlog, [6, 7], 3))
219 [[6, 7]]
219 [[6, 7]]
220 >>> list(_slicechunktosize(revlog, [0], None))
220 >>> list(_slicechunktosize(revlog, [0], None))
221 [[0]]
221 [[0]]
222 >>> list(_slicechunktosize(revlog, [6, 7], None))
222 >>> list(_slicechunktosize(revlog, [6, 7], None))
223 [[6, 7]]
223 [[6, 7]]
224
224
225 cases where we need actual slicing
225 cases where we need actual slicing
226 >>> list(_slicechunktosize(revlog, [0, 1], 3))
226 >>> list(_slicechunktosize(revlog, [0, 1], 3))
227 [[0], [1]]
227 [[0], [1]]
228 >>> list(_slicechunktosize(revlog, [1, 3], 3))
228 >>> list(_slicechunktosize(revlog, [1, 3], 3))
229 [[1], [3]]
229 [[1], [3]]
230 >>> list(_slicechunktosize(revlog, [1, 2, 3], 3))
230 >>> list(_slicechunktosize(revlog, [1, 2, 3], 3))
231 [[1], [2, 3]]
231 [[1], [2, 3]]
232 >>> list(_slicechunktosize(revlog, [3, 5], 3))
232 >>> list(_slicechunktosize(revlog, [3, 5], 3))
233 [[3], [5]]
233 [[3], [5]]
234 >>> list(_slicechunktosize(revlog, [3, 4, 5], 3))
234 >>> list(_slicechunktosize(revlog, [3, 4, 5], 3))
235 [[3], [4, 5]]
235 [[3], [4, 5]]
236 >>> list(_slicechunktosize(revlog, [5, 6, 7, 8], 3))
236 >>> list(_slicechunktosize(revlog, [5, 6, 7, 8], 3))
237 [[5], [6, 7, 8]]
237 [[5], [6, 7, 8]]
238 >>> list(_slicechunktosize(revlog, [0, 1, 2, 3, 4, 5, 6, 7, 8], 3))
238 >>> list(_slicechunktosize(revlog, [0, 1, 2, 3, 4, 5, 6, 7, 8], 3))
239 [[0], [1, 2], [3], [5], [6, 7, 8]]
239 [[0], [1, 2], [3], [5], [6, 7, 8]]
240
240
241 Case with too large individual chunk (must return valid chunk)
241 Case with too large individual chunk (must return valid chunk)
242 >>> list(_slicechunktosize(revlog, [0, 1], 2))
242 >>> list(_slicechunktosize(revlog, [0, 1], 2))
243 [[0], [1]]
243 [[0], [1]]
244 >>> list(_slicechunktosize(revlog, [1, 3], 1))
244 >>> list(_slicechunktosize(revlog, [1, 3], 1))
245 [[1], [3]]
245 [[1], [3]]
246 >>> list(_slicechunktosize(revlog, [3, 4, 5], 2))
246 >>> list(_slicechunktosize(revlog, [3, 4, 5], 2))
247 [[3], [5]]
247 [[3], [5]]
248
248
249 == mixed case ==
249 == mixed case ==
250 >>> revlog = _testrevlog(data, snapshot=[0, 1, 2])
250 >>> revlog = _testrevlog(data, snapshot=[0, 1, 2])
251 >>> list(_slicechunktosize(revlog, list(range(9)), 5))
251 >>> list(_slicechunktosize(revlog, list(range(9)), 5))
252 [[0, 1], [2], [3, 4, 5], [6, 7, 8]]
252 [[0, 1], [2], [3, 4, 5], [6, 7, 8]]
253 """
253 """
254 assert targetsize is None or 0 <= targetsize
254 assert targetsize is None or 0 <= targetsize
255 startdata = revlog.start(revs[0])
255 startdata = revlog.start(revs[0])
256 enddata = revlog.end(revs[-1])
256 enddata = revlog.end(revs[-1])
257 fullspan = enddata - startdata
257 fullspan = enddata - startdata
258 if targetsize is None or fullspan <= targetsize:
258 if targetsize is None or fullspan <= targetsize:
259 yield revs
259 yield revs
260 return
260 return
261
261
262 startrevidx = 0
262 startrevidx = 0
263 endrevidx = 1
263 endrevidx = 1
264 iterrevs = enumerate(revs)
264 iterrevs = enumerate(revs)
265 next(iterrevs) # skip first rev.
265 next(iterrevs) # skip first rev.
266 # first step: get snapshots out of the way
266 # first step: get snapshots out of the way
267 for idx, r in iterrevs:
267 for idx, r in iterrevs:
268 span = revlog.end(r) - startdata
268 span = revlog.end(r) - startdata
269 snapshot = revlog.issnapshot(r)
269 snapshot = revlog.issnapshot(r)
270 if span <= targetsize and snapshot:
270 if span <= targetsize and snapshot:
271 endrevidx = idx + 1
271 endrevidx = idx + 1
272 else:
272 else:
273 chunk = _trimchunk(revlog, revs, startrevidx, endrevidx)
273 chunk = _trimchunk(revlog, revs, startrevidx, endrevidx)
274 if chunk:
274 if chunk:
275 yield chunk
275 yield chunk
276 startrevidx = idx
276 startrevidx = idx
277 startdata = revlog.start(r)
277 startdata = revlog.start(r)
278 endrevidx = idx + 1
278 endrevidx = idx + 1
279 if not snapshot:
279 if not snapshot:
280 break
280 break
281
281
282 # for the others, we use binary slicing to quickly converge toward valid
282 # for the others, we use binary slicing to quickly converge toward valid
283 # chunks (otherwise, we might end up looking for start/end of many
283 # chunks (otherwise, we might end up looking for start/end of many
284 # revisions). This logic is not looking for the perfect slicing point, it
284 # revisions). This logic is not looking for the perfect slicing point, it
285 # focuses on quickly converging toward valid chunks.
285 # focuses on quickly converging toward valid chunks.
286 nbitem = len(revs)
286 nbitem = len(revs)
287 while (enddata - startdata) > targetsize:
287 while (enddata - startdata) > targetsize:
288 endrevidx = nbitem
288 endrevidx = nbitem
289 if nbitem - startrevidx <= 1:
289 if nbitem - startrevidx <= 1:
290 break # protect against individual chunk larger than limit
290 break # protect against individual chunk larger than limit
291 localenddata = revlog.end(revs[endrevidx - 1])
291 localenddata = revlog.end(revs[endrevidx - 1])
292 span = localenddata - startdata
292 span = localenddata - startdata
293 while span > targetsize:
293 while span > targetsize:
294 if endrevidx - startrevidx <= 1:
294 if endrevidx - startrevidx <= 1:
295 break # protect against individual chunk larger than limit
295 break # protect against individual chunk larger than limit
296 endrevidx -= (endrevidx - startrevidx) // 2
296 endrevidx -= (endrevidx - startrevidx) // 2
297 localenddata = revlog.end(revs[endrevidx - 1])
297 localenddata = revlog.end(revs[endrevidx - 1])
298 span = localenddata - startdata
298 span = localenddata - startdata
299 chunk = _trimchunk(revlog, revs, startrevidx, endrevidx)
299 chunk = _trimchunk(revlog, revs, startrevidx, endrevidx)
300 if chunk:
300 if chunk:
301 yield chunk
301 yield chunk
302 startrevidx = endrevidx
302 startrevidx = endrevidx
303 startdata = revlog.start(revs[startrevidx])
303 startdata = revlog.start(revs[startrevidx])
304
304
305 chunk = _trimchunk(revlog, revs, startrevidx)
305 chunk = _trimchunk(revlog, revs, startrevidx)
306 if chunk:
306 if chunk:
307 yield chunk
307 yield chunk
308
308
309
309
310 def _slicechunktodensity(revlog, revs, targetdensity=0.5, mingapsize=0):
310 def _slicechunktodensity(revlog, revs, targetdensity=0.5, mingapsize=0):
311 """slice revs to reduce the amount of unrelated data to be read from disk.
311 """slice revs to reduce the amount of unrelated data to be read from disk.
312
312
313 ``revs`` is sliced into groups that should be read in one time.
313 ``revs`` is sliced into groups that should be read in one time.
314 Assume that revs are sorted.
314 Assume that revs are sorted.
315
315
316 The initial chunk is sliced until the overall density (payload/chunks-span
316 The initial chunk is sliced until the overall density (payload/chunks-span
317 ratio) is above `targetdensity`. No gap smaller than `mingapsize` is
317 ratio) is above `targetdensity`. No gap smaller than `mingapsize` is
318 skipped.
318 skipped.
319
319
320 >>> revlog = _testrevlog([
320 >>> revlog = _testrevlog([
321 ... 5, #00 (5)
321 ... 5, #00 (5)
322 ... 10, #01 (5)
322 ... 10, #01 (5)
323 ... 12, #02 (2)
323 ... 12, #02 (2)
324 ... 12, #03 (empty)
324 ... 12, #03 (empty)
325 ... 27, #04 (15)
325 ... 27, #04 (15)
326 ... 31, #05 (4)
326 ... 31, #05 (4)
327 ... 31, #06 (empty)
327 ... 31, #06 (empty)
328 ... 42, #07 (11)
328 ... 42, #07 (11)
329 ... 47, #08 (5)
329 ... 47, #08 (5)
330 ... 47, #09 (empty)
330 ... 47, #09 (empty)
331 ... 48, #10 (1)
331 ... 48, #10 (1)
332 ... 51, #11 (3)
332 ... 51, #11 (3)
333 ... 74, #12 (23)
333 ... 74, #12 (23)
334 ... 85, #13 (11)
334 ... 85, #13 (11)
335 ... 86, #14 (1)
335 ... 86, #14 (1)
336 ... 91, #15 (5)
336 ... 91, #15 (5)
337 ... ])
337 ... ])
338
338
339 >>> list(_slicechunktodensity(revlog, list(range(16))))
339 >>> list(_slicechunktodensity(revlog, list(range(16))))
340 [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]]
340 [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]]
341 >>> list(_slicechunktodensity(revlog, [0, 15]))
341 >>> list(_slicechunktodensity(revlog, [0, 15]))
342 [[0], [15]]
342 [[0], [15]]
343 >>> list(_slicechunktodensity(revlog, [0, 11, 15]))
343 >>> list(_slicechunktodensity(revlog, [0, 11, 15]))
344 [[0], [11], [15]]
344 [[0], [11], [15]]
345 >>> list(_slicechunktodensity(revlog, [0, 11, 13, 15]))
345 >>> list(_slicechunktodensity(revlog, [0, 11, 13, 15]))
346 [[0], [11, 13, 15]]
346 [[0], [11, 13, 15]]
347 >>> list(_slicechunktodensity(revlog, [1, 2, 3, 5, 8, 10, 11, 14]))
347 >>> list(_slicechunktodensity(revlog, [1, 2, 3, 5, 8, 10, 11, 14]))
348 [[1, 2], [5, 8, 10, 11], [14]]
348 [[1, 2], [5, 8, 10, 11], [14]]
349 >>> list(_slicechunktodensity(revlog, [1, 2, 3, 5, 8, 10, 11, 14],
349 >>> list(_slicechunktodensity(revlog, [1, 2, 3, 5, 8, 10, 11, 14],
350 ... mingapsize=20))
350 ... mingapsize=20))
351 [[1, 2, 3, 5, 8, 10, 11], [14]]
351 [[1, 2, 3, 5, 8, 10, 11], [14]]
352 >>> list(_slicechunktodensity(revlog, [1, 2, 3, 5, 8, 10, 11, 14],
352 >>> list(_slicechunktodensity(revlog, [1, 2, 3, 5, 8, 10, 11, 14],
353 ... targetdensity=0.95))
353 ... targetdensity=0.95))
354 [[1, 2], [5], [8, 10, 11], [14]]
354 [[1, 2], [5], [8, 10, 11], [14]]
355 >>> list(_slicechunktodensity(revlog, [1, 2, 3, 5, 8, 10, 11, 14],
355 >>> list(_slicechunktodensity(revlog, [1, 2, 3, 5, 8, 10, 11, 14],
356 ... targetdensity=0.95, mingapsize=12))
356 ... targetdensity=0.95, mingapsize=12))
357 [[1, 2], [5, 8, 10, 11], [14]]
357 [[1, 2], [5, 8, 10, 11], [14]]
358 """
358 """
359 start = revlog.start
359 start = revlog.start
360 length = revlog.length
360 length = revlog.length
361
361
362 if len(revs) <= 1:
362 if len(revs) <= 1:
363 yield revs
363 yield revs
364 return
364 return
365
365
366 deltachainspan = segmentspan(revlog, revs)
366 deltachainspan = segmentspan(revlog, revs)
367
367
368 if deltachainspan < mingapsize:
368 if deltachainspan < mingapsize:
369 yield revs
369 yield revs
370 return
370 return
371
371
372 readdata = deltachainspan
372 readdata = deltachainspan
373 chainpayload = sum(length(r) for r in revs)
373 chainpayload = sum(length(r) for r in revs)
374
374
375 if deltachainspan:
375 if deltachainspan:
376 density = chainpayload / float(deltachainspan)
376 density = chainpayload / float(deltachainspan)
377 else:
377 else:
378 density = 1.0
378 density = 1.0
379
379
380 if density >= targetdensity:
380 if density >= targetdensity:
381 yield revs
381 yield revs
382 return
382 return
383
383
384 # Store the gaps in a heap to have them sorted by decreasing size
384 # Store the gaps in a heap to have them sorted by decreasing size
385 gaps = []
385 gaps = []
386 prevend = None
386 prevend = None
387 for i, rev in enumerate(revs):
387 for i, rev in enumerate(revs):
388 revstart = start(rev)
388 revstart = start(rev)
389 revlen = length(rev)
389 revlen = length(rev)
390
390
391 # Skip empty revisions to form larger holes
391 # Skip empty revisions to form larger holes
392 if revlen == 0:
392 if revlen == 0:
393 continue
393 continue
394
394
395 if prevend is not None:
395 if prevend is not None:
396 gapsize = revstart - prevend
396 gapsize = revstart - prevend
397 # only consider holes that are large enough
397 # only consider holes that are large enough
398 if gapsize > mingapsize:
398 if gapsize > mingapsize:
399 gaps.append((gapsize, i))
399 gaps.append((gapsize, i))
400
400
401 prevend = revstart + revlen
401 prevend = revstart + revlen
402 # sort the gaps to pop them from largest to small
402 # sort the gaps to pop them from largest to small
403 gaps.sort()
403 gaps.sort()
404
404
405 # Collect the indices of the largest holes until the density is acceptable
405 # Collect the indices of the largest holes until the density is acceptable
406 selected = []
406 selected = []
407 while gaps and density < targetdensity:
407 while gaps and density < targetdensity:
408 gapsize, gapidx = gaps.pop()
408 gapsize, gapidx = gaps.pop()
409
409
410 selected.append(gapidx)
410 selected.append(gapidx)
411
411
412 # the gap sizes are stored as negatives to be sorted decreasingly
412 # the gap sizes are stored as negatives to be sorted decreasingly
413 # by the heap
413 # by the heap
414 readdata -= gapsize
414 readdata -= gapsize
415 if readdata > 0:
415 if readdata > 0:
416 density = chainpayload / float(readdata)
416 density = chainpayload / float(readdata)
417 else:
417 else:
418 density = 1.0
418 density = 1.0
419 selected.sort()
419 selected.sort()
420
420
421 # Cut the revs at collected indices
421 # Cut the revs at collected indices
422 previdx = 0
422 previdx = 0
423 for idx in selected:
423 for idx in selected:
424
424
425 chunk = _trimchunk(revlog, revs, previdx, idx)
425 chunk = _trimchunk(revlog, revs, previdx, idx)
426 if chunk:
426 if chunk:
427 yield chunk
427 yield chunk
428
428
429 previdx = idx
429 previdx = idx
430
430
431 chunk = _trimchunk(revlog, revs, previdx)
431 chunk = _trimchunk(revlog, revs, previdx)
432 if chunk:
432 if chunk:
433 yield chunk
433 yield chunk
434
434
435
435
436 def _trimchunk(revlog, revs, startidx, endidx=None):
436 def _trimchunk(revlog, revs, startidx, endidx=None):
437 """returns revs[startidx:endidx] without empty trailing revs
437 """returns revs[startidx:endidx] without empty trailing revs
438
438
439 Doctest Setup
439 Doctest Setup
440 >>> revlog = _testrevlog([
440 >>> revlog = _testrevlog([
441 ... 5, #0
441 ... 5, #0
442 ... 10, #1
442 ... 10, #1
443 ... 12, #2
443 ... 12, #2
444 ... 12, #3 (empty)
444 ... 12, #3 (empty)
445 ... 17, #4
445 ... 17, #4
446 ... 21, #5
446 ... 21, #5
447 ... 21, #6 (empty)
447 ... 21, #6 (empty)
448 ... ])
448 ... ])
449
449
450 Contiguous cases:
450 Contiguous cases:
451 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 0)
451 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 0)
452 [0, 1, 2, 3, 4, 5]
452 [0, 1, 2, 3, 4, 5]
453 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 0, 5)
453 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 0, 5)
454 [0, 1, 2, 3, 4]
454 [0, 1, 2, 3, 4]
455 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 0, 4)
455 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 0, 4)
456 [0, 1, 2]
456 [0, 1, 2]
457 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 2, 4)
457 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 2, 4)
458 [2]
458 [2]
459 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 3)
459 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 3)
460 [3, 4, 5]
460 [3, 4, 5]
461 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 3, 5)
461 >>> _trimchunk(revlog, [0, 1, 2, 3, 4, 5, 6], 3, 5)
462 [3, 4]
462 [3, 4]
463
463
464 Discontiguous cases:
464 Discontiguous cases:
465 >>> _trimchunk(revlog, [1, 3, 5, 6], 0)
465 >>> _trimchunk(revlog, [1, 3, 5, 6], 0)
466 [1, 3, 5]
466 [1, 3, 5]
467 >>> _trimchunk(revlog, [1, 3, 5, 6], 0, 2)
467 >>> _trimchunk(revlog, [1, 3, 5, 6], 0, 2)
468 [1]
468 [1]
469 >>> _trimchunk(revlog, [1, 3, 5, 6], 1, 3)
469 >>> _trimchunk(revlog, [1, 3, 5, 6], 1, 3)
470 [3, 5]
470 [3, 5]
471 >>> _trimchunk(revlog, [1, 3, 5, 6], 1)
471 >>> _trimchunk(revlog, [1, 3, 5, 6], 1)
472 [3, 5]
472 [3, 5]
473 """
473 """
474 length = revlog.length
474 length = revlog.length
475
475
476 if endidx is None:
476 if endidx is None:
477 endidx = len(revs)
477 endidx = len(revs)
478
478
479 # If we have a non-emtpy delta candidate, there are nothing to trim
479 # If we have a non-emtpy delta candidate, there are nothing to trim
480 if revs[endidx - 1] < len(revlog):
480 if revs[endidx - 1] < len(revlog):
481 # Trim empty revs at the end, except the very first revision of a chain
481 # Trim empty revs at the end, except the very first revision of a chain
482 while (
482 while (
483 endidx > 1 and endidx > startidx and length(revs[endidx - 1]) == 0
483 endidx > 1 and endidx > startidx and length(revs[endidx - 1]) == 0
484 ):
484 ):
485 endidx -= 1
485 endidx -= 1
486
486
487 return revs[startidx:endidx]
487 return revs[startidx:endidx]
488
488
489
489
490 def segmentspan(revlog, revs):
490 def segmentspan(revlog, revs):
491 """Get the byte span of a segment of revisions
491 """Get the byte span of a segment of revisions
492
492
493 revs is a sorted array of revision numbers
493 revs is a sorted array of revision numbers
494
494
495 >>> revlog = _testrevlog([
495 >>> revlog = _testrevlog([
496 ... 5, #0
496 ... 5, #0
497 ... 10, #1
497 ... 10, #1
498 ... 12, #2
498 ... 12, #2
499 ... 12, #3 (empty)
499 ... 12, #3 (empty)
500 ... 17, #4
500 ... 17, #4
501 ... ])
501 ... ])
502
502
503 >>> segmentspan(revlog, [0, 1, 2, 3, 4])
503 >>> segmentspan(revlog, [0, 1, 2, 3, 4])
504 17
504 17
505 >>> segmentspan(revlog, [0, 4])
505 >>> segmentspan(revlog, [0, 4])
506 17
506 17
507 >>> segmentspan(revlog, [3, 4])
507 >>> segmentspan(revlog, [3, 4])
508 5
508 5
509 >>> segmentspan(revlog, [1, 2, 3,])
509 >>> segmentspan(revlog, [1, 2, 3,])
510 7
510 7
511 >>> segmentspan(revlog, [1, 3])
511 >>> segmentspan(revlog, [1, 3])
512 7
512 7
513 """
513 """
514 if not revs:
514 if not revs:
515 return 0
515 return 0
516 end = revlog.end(revs[-1])
516 end = revlog.end(revs[-1])
517 return end - revlog.start(revs[0])
517 return end - revlog.start(revs[0])
518
518
519
519
520 def _textfromdelta(fh, revlog, baserev, delta, p1, p2, flags, expectednode):
520 def _textfromdelta(fh, revlog, baserev, delta, p1, p2, flags, expectednode):
521 """build full text from a (base, delta) pair and other metadata"""
521 """build full text from a (base, delta) pair and other metadata"""
522 # special case deltas which replace entire base; no need to decode
522 # special case deltas which replace entire base; no need to decode
523 # base revision. this neatly avoids censored bases, which throw when
523 # base revision. this neatly avoids censored bases, which throw when
524 # they're decoded.
524 # they're decoded.
525 hlen = struct.calcsize(b">lll")
525 hlen = struct.calcsize(b">lll")
526 if delta[:hlen] == mdiff.replacediffheader(
526 if delta[:hlen] == mdiff.replacediffheader(
527 revlog.rawsize(baserev), len(delta) - hlen
527 revlog.rawsize(baserev), len(delta) - hlen
528 ):
528 ):
529 fulltext = delta[hlen:]
529 fulltext = delta[hlen:]
530 else:
530 else:
531 # deltabase is rawtext before changed by flag processors, which is
531 # deltabase is rawtext before changed by flag processors, which is
532 # equivalent to non-raw text
532 # equivalent to non-raw text
533 basetext = revlog.revision(baserev, _df=fh)
533 basetext = revlog.revision(baserev, _df=fh)
534 fulltext = mdiff.patch(basetext, delta)
534 fulltext = mdiff.patch(basetext, delta)
535
535
536 try:
536 try:
537 validatehash = flagutil.processflagsraw(revlog, fulltext, flags)
537 validatehash = flagutil.processflagsraw(revlog, fulltext, flags)
538 if validatehash:
538 if validatehash:
539 revlog.checkhash(fulltext, expectednode, p1=p1, p2=p2)
539 revlog.checkhash(fulltext, expectednode, p1=p1, p2=p2)
540 if flags & REVIDX_ISCENSORED:
540 if flags & REVIDX_ISCENSORED:
541 raise error.StorageError(
541 raise error.StorageError(
542 _(b'node %s is not censored') % expectednode
542 _(b'node %s is not censored') % expectednode
543 )
543 )
544 except error.CensoredNodeError:
544 except error.CensoredNodeError:
545 # must pass the censored index flag to add censored revisions
545 # must pass the censored index flag to add censored revisions
546 if not flags & REVIDX_ISCENSORED:
546 if not flags & REVIDX_ISCENSORED:
547 raise
547 raise
548 return fulltext
548 return fulltext
549
549
550
550
551 @attr.s(slots=True, frozen=True)
551 @attr.s(slots=True, frozen=True)
552 class _deltainfo:
552 class _deltainfo:
553 distance = attr.ib()
553 distance = attr.ib()
554 deltalen = attr.ib()
554 deltalen = attr.ib()
555 data = attr.ib()
555 data = attr.ib()
556 base = attr.ib()
556 base = attr.ib()
557 chainbase = attr.ib()
557 chainbase = attr.ib()
558 chainlen = attr.ib()
558 chainlen = attr.ib()
559 compresseddeltalen = attr.ib()
559 compresseddeltalen = attr.ib()
560 snapshotdepth = attr.ib()
560 snapshotdepth = attr.ib()
561
561
562
562
563 def drop_u_compression(delta):
563 def drop_u_compression(delta):
564 """turn into a "u" (no-compression) into no-compression without header
564 """turn into a "u" (no-compression) into no-compression without header
565
565
566 This is useful for revlog format that has better compression method.
566 This is useful for revlog format that has better compression method.
567 """
567 """
568 assert delta.data[0] == b'u', delta.data[0]
568 assert delta.data[0] == b'u', delta.data[0]
569 return _deltainfo(
569 return _deltainfo(
570 delta.distance,
570 delta.distance,
571 delta.deltalen - 1,
571 delta.deltalen - 1,
572 (b'', delta.data[1]),
572 (b'', delta.data[1]),
573 delta.base,
573 delta.base,
574 delta.chainbase,
574 delta.chainbase,
575 delta.chainlen,
575 delta.chainlen,
576 delta.compresseddeltalen,
576 delta.compresseddeltalen,
577 delta.snapshotdepth,
577 delta.snapshotdepth,
578 )
578 )
579
579
580
580
581 def is_good_delta_info(revlog, deltainfo, revinfo):
581 def is_good_delta_info(revlog, deltainfo, revinfo):
582 """Returns True if the given delta is good. Good means that it is within
582 """Returns True if the given delta is good. Good means that it is within
583 the disk span, disk size, and chain length bounds that we know to be
583 the disk span, disk size, and chain length bounds that we know to be
584 performant."""
584 performant."""
585 if deltainfo is None:
585 if deltainfo is None:
586 return False
586 return False
587
587
588 if (
588 if (
589 revinfo.cachedelta is not None
589 revinfo.cachedelta is not None
590 and deltainfo.base == revinfo.cachedelta[0]
590 and deltainfo.base == revinfo.cachedelta[0]
591 and revinfo.cachedelta[2] == DELTA_BASE_REUSE_FORCE
591 and revinfo.cachedelta[2] == DELTA_BASE_REUSE_FORCE
592 ):
592 ):
593 return True
593 return True
594
594
595 # - 'deltainfo.distance' is the distance from the base revision --
595 # - 'deltainfo.distance' is the distance from the base revision --
596 # bounding it limits the amount of I/O we need to do.
596 # bounding it limits the amount of I/O we need to do.
597 # - 'deltainfo.compresseddeltalen' is the sum of the total size of
597 # - 'deltainfo.compresseddeltalen' is the sum of the total size of
598 # deltas we need to apply -- bounding it limits the amount of CPU
598 # deltas we need to apply -- bounding it limits the amount of CPU
599 # we consume.
599 # we consume.
600
600
601 textlen = revinfo.textlen
601 textlen = revinfo.textlen
602 defaultmax = textlen * 4
602 defaultmax = textlen * 4
603 maxdist = revlog._maxdeltachainspan
603 maxdist = revlog._maxdeltachainspan
604 if not maxdist:
604 if not maxdist:
605 maxdist = deltainfo.distance # ensure the conditional pass
605 maxdist = deltainfo.distance # ensure the conditional pass
606 maxdist = max(maxdist, defaultmax)
606 maxdist = max(maxdist, defaultmax)
607
607
608 # Bad delta from read span:
608 # Bad delta from read span:
609 #
609 #
610 # If the span of data read is larger than the maximum allowed.
610 # If the span of data read is larger than the maximum allowed.
611 #
611 #
612 # In the sparse-revlog case, we rely on the associated "sparse reading"
612 # In the sparse-revlog case, we rely on the associated "sparse reading"
613 # to avoid issue related to the span of data. In theory, it would be
613 # to avoid issue related to the span of data. In theory, it would be
614 # possible to build pathological revlog where delta pattern would lead
614 # possible to build pathological revlog where delta pattern would lead
615 # to too many reads. However, they do not happen in practice at all. So
615 # to too many reads. However, they do not happen in practice at all. So
616 # we skip the span check entirely.
616 # we skip the span check entirely.
617 if not revlog._sparserevlog and maxdist < deltainfo.distance:
617 if not revlog._sparserevlog and maxdist < deltainfo.distance:
618 return False
618 return False
619
619
620 # Bad delta from new delta size:
620 # Bad delta from new delta size:
621 #
621 #
622 # If the delta size is larger than the target text, storing the
622 # If the delta size is larger than the target text, storing the
623 # delta will be inefficient.
623 # delta will be inefficient.
624 if textlen < deltainfo.deltalen:
624 if textlen < deltainfo.deltalen:
625 return False
625 return False
626
626
627 # Bad delta from cumulated payload size:
627 # Bad delta from cumulated payload size:
628 #
628 #
629 # If the sum of delta get larger than K * target text length.
629 # If the sum of delta get larger than K * target text length.
630 if textlen * LIMIT_DELTA2TEXT < deltainfo.compresseddeltalen:
630 if textlen * LIMIT_DELTA2TEXT < deltainfo.compresseddeltalen:
631 return False
631 return False
632
632
633 # Bad delta from chain length:
633 # Bad delta from chain length:
634 #
634 #
635 # If the number of delta in the chain gets too high.
635 # If the number of delta in the chain gets too high.
636 if revlog._maxchainlen and revlog._maxchainlen < deltainfo.chainlen:
636 if revlog._maxchainlen and revlog._maxchainlen < deltainfo.chainlen:
637 return False
637 return False
638
638
639 # bad delta from intermediate snapshot size limit
639 # bad delta from intermediate snapshot size limit
640 #
640 #
641 # If an intermediate snapshot size is higher than the limit. The
641 # If an intermediate snapshot size is higher than the limit. The
642 # limit exist to prevent endless chain of intermediate delta to be
642 # limit exist to prevent endless chain of intermediate delta to be
643 # created.
643 # created.
644 if (
644 if (
645 deltainfo.snapshotdepth is not None
645 deltainfo.snapshotdepth is not None
646 and (textlen >> deltainfo.snapshotdepth) < deltainfo.deltalen
646 and (textlen >> deltainfo.snapshotdepth) < deltainfo.deltalen
647 ):
647 ):
648 return False
648 return False
649
649
650 # bad delta if new intermediate snapshot is larger than the previous
650 # bad delta if new intermediate snapshot is larger than the previous
651 # snapshot
651 # snapshot
652 if (
652 if (
653 deltainfo.snapshotdepth
653 deltainfo.snapshotdepth
654 and revlog.length(deltainfo.base) < deltainfo.deltalen
654 and revlog.length(deltainfo.base) < deltainfo.deltalen
655 ):
655 ):
656 return False
656 return False
657
657
658 return True
658 return True
659
659
660
660
661 # If a revision's full text is that much bigger than a base candidate full
661 # If a revision's full text is that much bigger than a base candidate full
662 # text's, it is very unlikely that it will produce a valid delta. We no longer
662 # text's, it is very unlikely that it will produce a valid delta. We no longer
663 # consider these candidates.
663 # consider these candidates.
664 LIMIT_BASE2TEXT = 500
664 LIMIT_BASE2TEXT = 500
665
665
666
666
667 def _candidategroups(
667 def _candidategroups(
668 revlog,
668 revlog,
669 textlen,
669 textlen,
670 p1,
670 p1,
671 p2,
671 p2,
672 cachedelta,
672 cachedelta,
673 excluded_bases=None,
673 excluded_bases=None,
674 target_rev=None,
674 target_rev=None,
675 snapshot_cache=None,
675 snapshot_cache=None,
676 ):
676 ):
677 """Provides group of revision to be tested as delta base
677 """Provides group of revision to be tested as delta base
678
678
679 This top level function focus on emitting groups with unique and worthwhile
679 This top level function focus on emitting groups with unique and worthwhile
680 content. See _raw_candidate_groups for details about the group order.
680 content. See _raw_candidate_groups for details about the group order.
681 """
681 """
682 # should we try to build a delta?
682 # should we try to build a delta?
683 if not (len(revlog) and revlog._storedeltachains):
683 if not (len(revlog) and revlog._storedeltachains):
684 yield None
684 yield None
685 return
685 return
686
686
687 deltalength = revlog.length
687 deltalength = revlog.length
688 deltaparent = revlog.deltaparent
688 deltaparent = revlog.deltaparent
689 sparse = revlog._sparserevlog
689 sparse = revlog._sparserevlog
690 good = None
690 good = None
691
691
692 deltas_limit = textlen * LIMIT_DELTA2TEXT
692 deltas_limit = textlen * LIMIT_DELTA2TEXT
693 group_chunk_size = revlog._candidate_group_chunk_size
693 group_chunk_size = revlog._candidate_group_chunk_size
694
694
695 tested = {nullrev}
695 tested = {nullrev}
696 candidates = _refinedgroups(
696 candidates = _refinedgroups(
697 revlog,
697 revlog,
698 p1,
698 p1,
699 p2,
699 p2,
700 cachedelta,
700 cachedelta,
701 snapshot_cache=snapshot_cache,
701 snapshot_cache=snapshot_cache,
702 )
702 )
703 while True:
703 while True:
704 temptative = candidates.send(good)
704 temptative = candidates.send(good)
705 if temptative is None:
705 if temptative is None:
706 break
706 break
707 group = []
707 group = []
708 for rev in temptative:
708 for rev in temptative:
709 # skip over empty delta (no need to include them in a chain)
709 # skip over empty delta (no need to include them in a chain)
710 while revlog._generaldelta and not (
710 while revlog._generaldelta and not (
711 rev == nullrev or rev in tested or deltalength(rev)
711 rev == nullrev or rev in tested or deltalength(rev)
712 ):
712 ):
713 tested.add(rev)
713 tested.add(rev)
714 rev = deltaparent(rev)
714 rev = deltaparent(rev)
715 # no need to try a delta against nullrev, this will be done as a
715 # no need to try a delta against nullrev, this will be done as a
716 # last resort.
716 # last resort.
717 if rev == nullrev:
717 if rev == nullrev:
718 continue
718 continue
719 # filter out revision we tested already
719 # filter out revision we tested already
720 if rev in tested:
720 if rev in tested:
721 continue
721 continue
722
722
723 if (
723 if (
724 cachedelta is not None
724 cachedelta is not None
725 and rev == cachedelta[0]
725 and rev == cachedelta[0]
726 and cachedelta[2] == DELTA_BASE_REUSE_FORCE
726 and cachedelta[2] == DELTA_BASE_REUSE_FORCE
727 ):
727 ):
728 # instructions are to forcibly consider/use this delta base
728 # instructions are to forcibly consider/use this delta base
729 group.append(rev)
729 group.append(rev)
730 continue
730 continue
731
731
732 # an higher authority deamed the base unworthy (e.g. censored)
732 # an higher authority deamed the base unworthy (e.g. censored)
733 if excluded_bases is not None and rev in excluded_bases:
733 if excluded_bases is not None and rev in excluded_bases:
734 tested.add(rev)
734 tested.add(rev)
735 continue
735 continue
736 # We are in some recomputation cases and that rev is too high in
736 # We are in some recomputation cases and that rev is too high in
737 # the revlog
737 # the revlog
738 if target_rev is not None and rev >= target_rev:
738 if target_rev is not None and rev >= target_rev:
739 tested.add(rev)
739 tested.add(rev)
740 continue
740 continue
741 # filter out delta base that will never produce good delta
741 # filter out delta base that will never produce good delta
742 if deltas_limit < revlog.length(rev):
742 if deltas_limit < revlog.length(rev):
743 tested.add(rev)
743 tested.add(rev)
744 continue
744 continue
745 if sparse and revlog.rawsize(rev) < (textlen // LIMIT_BASE2TEXT):
745 if sparse and revlog.rawsize(rev) < (textlen // LIMIT_BASE2TEXT):
746 tested.add(rev)
746 tested.add(rev)
747 continue
747 continue
748 # no delta for rawtext-changing revs (see "candelta" for why)
748 # no delta for rawtext-changing revs (see "candelta" for why)
749 if revlog.flags(rev) & REVIDX_RAWTEXT_CHANGING_FLAGS:
749 if revlog.flags(rev) & REVIDX_RAWTEXT_CHANGING_FLAGS:
750 tested.add(rev)
750 tested.add(rev)
751 continue
751 continue
752
752
753 # If we reach here, we are about to build and test a delta.
753 # If we reach here, we are about to build and test a delta.
754 # The delta building process will compute the chaininfo in all
754 # The delta building process will compute the chaininfo in all
755 # case, since that computation is cached, it is fine to access it
755 # case, since that computation is cached, it is fine to access it
756 # here too.
756 # here too.
757 chainlen, chainsize = revlog._chaininfo(rev)
757 chainlen, chainsize = revlog._chaininfo(rev)
758 # if chain will be too long, skip base
758 # if chain will be too long, skip base
759 if revlog._maxchainlen and chainlen >= revlog._maxchainlen:
759 if revlog._maxchainlen and chainlen >= revlog._maxchainlen:
760 tested.add(rev)
760 tested.add(rev)
761 continue
761 continue
762 # if chain already have too much data, skip base
762 # if chain already have too much data, skip base
763 if deltas_limit < chainsize:
763 if deltas_limit < chainsize:
764 tested.add(rev)
764 tested.add(rev)
765 continue
765 continue
766 if sparse and revlog.upperboundcomp is not None:
766 if sparse and revlog.upperboundcomp is not None:
767 maxcomp = revlog.upperboundcomp
767 maxcomp = revlog.upperboundcomp
768 basenotsnap = (p1, p2, nullrev)
768 basenotsnap = (p1, p2, nullrev)
769 if rev not in basenotsnap and revlog.issnapshot(rev):
769 if rev not in basenotsnap and revlog.issnapshot(rev):
770 snapshotdepth = revlog.snapshotdepth(rev)
770 snapshotdepth = revlog.snapshotdepth(rev)
771 # If text is significantly larger than the base, we can
771 # If text is significantly larger than the base, we can
772 # expect the resulting delta to be proportional to the size
772 # expect the resulting delta to be proportional to the size
773 # difference
773 # difference
774 revsize = revlog.rawsize(rev)
774 revsize = revlog.rawsize(rev)
775 rawsizedistance = max(textlen - revsize, 0)
775 rawsizedistance = max(textlen - revsize, 0)
776 # use an estimate of the compression upper bound.
776 # use an estimate of the compression upper bound.
777 lowestrealisticdeltalen = rawsizedistance // maxcomp
777 lowestrealisticdeltalen = rawsizedistance // maxcomp
778
778
779 # check the absolute constraint on the delta size
779 # check the absolute constraint on the delta size
780 snapshotlimit = textlen >> snapshotdepth
780 snapshotlimit = textlen >> snapshotdepth
781 if snapshotlimit < lowestrealisticdeltalen:
781 if snapshotlimit < lowestrealisticdeltalen:
782 # delta lower bound is larger than accepted upper bound
782 # delta lower bound is larger than accepted upper bound
783 tested.add(rev)
783 tested.add(rev)
784 continue
784 continue
785
785
786 # check the relative constraint on the delta size
786 # check the relative constraint on the delta size
787 revlength = revlog.length(rev)
787 revlength = revlog.length(rev)
788 if revlength < lowestrealisticdeltalen:
788 if revlength < lowestrealisticdeltalen:
789 # delta probable lower bound is larger than target base
789 # delta probable lower bound is larger than target base
790 tested.add(rev)
790 tested.add(rev)
791 continue
791 continue
792
792
793 group.append(rev)
793 group.append(rev)
794 if group:
794 if group:
795 # When the size of the candidate group is big, it can result in a
795 # When the size of the candidate group is big, it can result in a
796 # quite significant performance impact. To reduce this, we can send
796 # quite significant performance impact. To reduce this, we can send
797 # them in smaller batches until the new batch does not provide any
797 # them in smaller batches until the new batch does not provide any
798 # improvements.
798 # improvements.
799 #
799 #
800 # This might reduce the overall efficiency of the compression in
800 # This might reduce the overall efficiency of the compression in
801 # some corner cases, but that should also prevent very pathological
801 # some corner cases, but that should also prevent very pathological
802 # cases from being an issue. (eg. 20 000 candidates).
802 # cases from being an issue. (eg. 20 000 candidates).
803 #
803 #
804 # XXX note that the ordering of the group becomes important as it
804 # XXX note that the ordering of the group becomes important as it
805 # now impacts the final result. The current order is unprocessed
805 # now impacts the final result. The current order is unprocessed
806 # and can be improved.
806 # and can be improved.
807 if group_chunk_size == 0:
807 if group_chunk_size == 0:
808 tested.update(group)
808 tested.update(group)
809 good = yield tuple(group)
809 good = yield tuple(group)
810 else:
810 else:
811 prev_good = good
811 prev_good = good
812 for start in range(0, len(group), group_chunk_size):
812 for start in range(0, len(group), group_chunk_size):
813 sub_group = group[start : start + group_chunk_size]
813 sub_group = group[start : start + group_chunk_size]
814 tested.update(sub_group)
814 tested.update(sub_group)
815 good = yield tuple(sub_group)
815 good = yield tuple(sub_group)
816 if prev_good == good:
816 if prev_good == good:
817 break
817 break
818
818
819 yield None
819 yield None
820
820
821
821
822 def _refinedgroups(revlog, p1, p2, cachedelta, snapshot_cache=None):
822 def _refinedgroups(revlog, p1, p2, cachedelta, snapshot_cache=None):
823 good = None
823 good = None
824 # First we try to reuse a the delta contained in the bundle.
824 # First we try to reuse a the delta contained in the bundle.
825 # (or from the source revlog)
825 # (or from the source revlog)
826 #
826 #
827 # This logic only applies to general delta repositories and can be disabled
827 # This logic only applies to general delta repositories and can be disabled
828 # through configuration. Disabling reuse source delta is useful when
828 # through configuration. Disabling reuse source delta is useful when
829 # we want to make sure we recomputed "optimal" deltas.
829 # we want to make sure we recomputed "optimal" deltas.
830 debug_info = None
830 debug_info = None
831 if cachedelta is not None and cachedelta[2] > DELTA_BASE_REUSE_NO:
831 if cachedelta is not None and cachedelta[2] > DELTA_BASE_REUSE_NO:
832 # Assume what we received from the server is a good choice
832 # Assume what we received from the server is a good choice
833 # build delta will reuse the cache
833 # build delta will reuse the cache
834 if debug_info is not None:
834 if debug_info is not None:
835 debug_info['cached-delta.tested'] += 1
835 debug_info['cached-delta.tested'] += 1
836 good = yield (cachedelta[0],)
836 good = yield (cachedelta[0],)
837 if good is not None:
837 if good is not None:
838 if debug_info is not None:
838 if debug_info is not None:
839 debug_info['cached-delta.accepted'] += 1
839 debug_info['cached-delta.accepted'] += 1
840 yield None
840 yield None
841 return
841 return
842 if snapshot_cache is None:
842 if snapshot_cache is None:
843 snapshot_cache = SnapshotCache()
843 snapshot_cache = SnapshotCache()
844 groups = _rawgroups(
844 groups = _rawgroups(
845 revlog,
845 revlog,
846 p1,
846 p1,
847 p2,
847 p2,
848 cachedelta,
848 cachedelta,
849 snapshot_cache,
849 snapshot_cache,
850 )
850 )
851 for candidates in groups:
851 for candidates in groups:
852 good = yield candidates
852 good = yield candidates
853 if good is not None:
853 if good is not None:
854 break
854 break
855
855
856 # If sparse revlog is enabled, we can try to refine the available deltas
856 # If sparse revlog is enabled, we can try to refine the available deltas
857 if not revlog._sparserevlog:
857 if not revlog._sparserevlog:
858 yield None
858 yield None
859 return
859 return
860
860
861 # if we have a refinable value, try to refine it
861 # if we have a refinable value, try to refine it
862 if good is not None and good not in (p1, p2) and revlog.issnapshot(good):
862 if good is not None and good not in (p1, p2) and revlog.issnapshot(good):
863 # refine snapshot down
863 # refine snapshot down
864 previous = None
864 previous = None
865 while previous != good:
865 while previous != good:
866 previous = good
866 previous = good
867 base = revlog.deltaparent(good)
867 base = revlog.deltaparent(good)
868 if base == nullrev:
868 if base == nullrev:
869 break
869 break
870 good = yield (base,)
870 good = yield (base,)
871 # refine snapshot up
871 # refine snapshot up
872 if not snapshot_cache.snapshots:
872 if not snapshot_cache.snapshots:
873 snapshot_cache.update(revlog, good + 1)
873 snapshot_cache.update(revlog, good + 1)
874 previous = None
874 previous = None
875 while good != previous:
875 while good != previous:
876 previous = good
876 previous = good
877 children = tuple(sorted(c for c in snapshot_cache.snapshots[good]))
877 children = tuple(sorted(c for c in snapshot_cache.snapshots[good]))
878 good = yield children
878 good = yield children
879
879
880 if debug_info is not None:
880 if debug_info is not None:
881 if good is None:
881 if good is None:
882 debug_info['no-solution'] += 1
882 debug_info['no-solution'] += 1
883
883
884 yield None
884 yield None
885
885
886
886
887 def _rawgroups(revlog, p1, p2, cachedelta, snapshot_cache=None):
887 def _rawgroups(revlog, p1, p2, cachedelta, snapshot_cache=None):
888 """Provides group of revision to be tested as delta base
888 """Provides group of revision to be tested as delta base
889
889
890 This lower level function focus on emitting delta theorically interresting
890 This lower level function focus on emitting delta theorically interresting
891 without looking it any practical details.
891 without looking it any practical details.
892
892
893 The group order aims at providing fast or small candidates first.
893 The group order aims at providing fast or small candidates first.
894 """
894 """
895 gdelta = revlog._generaldelta
895 gdelta = revlog._generaldelta
896 # gate sparse behind general-delta because of issue6056
896 # gate sparse behind general-delta because of issue6056
897 sparse = gdelta and revlog._sparserevlog
897 sparse = gdelta and revlog._sparserevlog
898 curr = len(revlog)
898 curr = len(revlog)
899 prev = curr - 1
899 prev = curr - 1
900 deltachain = lambda rev: revlog._deltachain(rev)[0]
900 deltachain = lambda rev: revlog._deltachain(rev)[0]
901
901
902 if gdelta:
902 if gdelta:
903 # exclude already lazy tested base if any
903 # exclude already lazy tested base if any
904 parents = [p for p in (p1, p2) if p != nullrev]
904 parents = [p for p in (p1, p2) if p != nullrev]
905
905
906 if not revlog._deltabothparents and len(parents) == 2:
906 if not revlog._deltabothparents and len(parents) == 2:
907 parents.sort()
907 parents.sort()
908 # To minimize the chance of having to build a fulltext,
908 # To minimize the chance of having to build a fulltext,
909 # pick first whichever parent is closest to us (max rev)
909 # pick first whichever parent is closest to us (max rev)
910 yield (parents[1],)
910 yield (parents[1],)
911 # then the other one (min rev) if the first did not fit
911 # then the other one (min rev) if the first did not fit
912 yield (parents[0],)
912 yield (parents[0],)
913 elif len(parents) > 0:
913 elif len(parents) > 0:
914 # Test all parents (1 or 2), and keep the best candidate
914 # Test all parents (1 or 2), and keep the best candidate
915 yield parents
915 yield parents
916
916
917 if sparse and parents:
917 if sparse and parents:
918 if snapshot_cache is None:
918 if snapshot_cache is None:
919 # map: base-rev: [snapshot-revs]
919 # map: base-rev: [snapshot-revs]
920 snapshot_cache = SnapshotCache()
920 snapshot_cache = SnapshotCache()
921 # See if we can use an existing snapshot in the parent chains to use as
921 # See if we can use an existing snapshot in the parent chains to use as
922 # a base for a new intermediate-snapshot
922 # a base for a new intermediate-snapshot
923 #
923 #
924 # search for snapshot in parents delta chain
924 # search for snapshot in parents delta chain
925 # map: snapshot-level: snapshot-rev
925 # map: snapshot-level: snapshot-rev
926 parents_snaps = collections.defaultdict(set)
926 parents_snaps = collections.defaultdict(set)
927 candidate_chains = [deltachain(p) for p in parents]
927 candidate_chains = [deltachain(p) for p in parents]
928 for chain in candidate_chains:
928 for chain in candidate_chains:
929 for idx, s in enumerate(chain):
929 for idx, s in enumerate(chain):
930 if not revlog.issnapshot(s):
930 if not revlog.issnapshot(s):
931 break
931 break
932 parents_snaps[idx].add(s)
932 parents_snaps[idx].add(s)
933 snapfloor = min(parents_snaps[0]) + 1
933 snapfloor = min(parents_snaps[0]) + 1
934 snapshot_cache.update(revlog, snapfloor)
934 snapshot_cache.update(revlog, snapfloor)
935 # search for the highest "unrelated" revision
935 # search for the highest "unrelated" revision
936 #
936 #
937 # Adding snapshots used by "unrelated" revision increase the odd we
937 # Adding snapshots used by "unrelated" revision increase the odd we
938 # reuse an independant, yet better snapshot chain.
938 # reuse an independant, yet better snapshot chain.
939 #
939 #
940 # XXX instead of building a set of revisions, we could lazily enumerate
940 # XXX instead of building a set of revisions, we could lazily enumerate
941 # over the chains. That would be more efficient, however we stick to
941 # over the chains. That would be more efficient, however we stick to
942 # simple code for now.
942 # simple code for now.
943 all_revs = set()
943 all_revs = set()
944 for chain in candidate_chains:
944 for chain in candidate_chains:
945 all_revs.update(chain)
945 all_revs.update(chain)
946 other = None
946 other = None
947 for r in revlog.revs(prev, snapfloor):
947 for r in revlog.revs(prev, snapfloor):
948 if r not in all_revs:
948 if r not in all_revs:
949 other = r
949 other = r
950 break
950 break
951 if other is not None:
951 if other is not None:
952 # To avoid unfair competition, we won't use unrelated intermediate
952 # To avoid unfair competition, we won't use unrelated intermediate
953 # snapshot that are deeper than the ones from the parent delta
953 # snapshot that are deeper than the ones from the parent delta
954 # chain.
954 # chain.
955 max_depth = max(parents_snaps.keys())
955 max_depth = max(parents_snaps.keys())
956 chain = deltachain(other)
956 chain = deltachain(other)
957 for depth, s in enumerate(chain):
957 for depth, s in enumerate(chain):
958 if s < snapfloor:
958 if s < snapfloor:
959 continue
959 continue
960 if max_depth < depth:
960 if max_depth < depth:
961 break
961 break
962 if not revlog.issnapshot(s):
962 if not revlog.issnapshot(s):
963 break
963 break
964 parents_snaps[depth].add(s)
964 parents_snaps[depth].add(s)
965 # Test them as possible intermediate snapshot base
965 # Test them as possible intermediate snapshot base
966 # We test them from highest to lowest level. High level one are more
966 # We test them from highest to lowest level. High level one are more
967 # likely to result in small delta
967 # likely to result in small delta
968 floor = None
968 floor = None
969 for idx, snaps in sorted(parents_snaps.items(), reverse=True):
969 for idx, snaps in sorted(parents_snaps.items(), reverse=True):
970 siblings = set()
970 siblings = set()
971 for s in snaps:
971 for s in snaps:
972 siblings.update(snapshot_cache.snapshots[s])
972 siblings.update(snapshot_cache.snapshots[s])
973 # Before considering making a new intermediate snapshot, we check
973 # Before considering making a new intermediate snapshot, we check
974 # if an existing snapshot, children of base we consider, would be
974 # if an existing snapshot, children of base we consider, would be
975 # suitable.
975 # suitable.
976 #
976 #
977 # It give a change to reuse a delta chain "unrelated" to the
977 # It give a change to reuse a delta chain "unrelated" to the
978 # current revision instead of starting our own. Without such
978 # current revision instead of starting our own. Without such
979 # re-use, topological branches would keep reopening new chains.
979 # re-use, topological branches would keep reopening new chains.
980 # Creating more and more snapshot as the repository grow.
980 # Creating more and more snapshot as the repository grow.
981
981
982 if floor is not None:
982 if floor is not None:
983 # We only do this for siblings created after the one in our
983 # We only do this for siblings created after the one in our
984 # parent's delta chain. Those created before has less chances
984 # parent's delta chain. Those created before has less chances
985 # to be valid base since our ancestors had to create a new
985 # to be valid base since our ancestors had to create a new
986 # snapshot.
986 # snapshot.
987 siblings = [r for r in siblings if floor < r]
987 siblings = [r for r in siblings if floor < r]
988 yield tuple(sorted(siblings))
988 yield tuple(sorted(siblings))
989 # then test the base from our parent's delta chain.
989 # then test the base from our parent's delta chain.
990 yield tuple(sorted(snaps))
990 yield tuple(sorted(snaps))
991 floor = min(snaps)
991 floor = min(snaps)
992 # No suitable base found in the parent chain, search if any full
992 # No suitable base found in the parent chain, search if any full
993 # snapshots emitted since parent's base would be a suitable base for an
993 # snapshots emitted since parent's base would be a suitable base for an
994 # intermediate snapshot.
994 # intermediate snapshot.
995 #
995 #
996 # It give a chance to reuse a delta chain unrelated to the current
996 # It give a chance to reuse a delta chain unrelated to the current
997 # revisions instead of starting our own. Without such re-use,
997 # revisions instead of starting our own. Without such re-use,
998 # topological branches would keep reopening new full chains. Creating
998 # topological branches would keep reopening new full chains. Creating
999 # more and more snapshot as the repository grow.
999 # more and more snapshot as the repository grow.
1000 full = [r for r in snapshot_cache.snapshots[nullrev] if snapfloor <= r]
1000 full = [r for r in snapshot_cache.snapshots[nullrev] if snapfloor <= r]
1001 yield tuple(sorted(full))
1001 yield tuple(sorted(full))
1002
1002
1003 if not sparse:
1003 if not sparse:
1004 # other approach failed try against prev to hopefully save us a
1004 # other approach failed try against prev to hopefully save us a
1005 # fulltext.
1005 # fulltext.
1006 yield (prev,)
1006 yield (prev,)
1007
1007
1008
1008
1009 class SnapshotCache:
1009 class SnapshotCache:
1010 __slots__ = ('snapshots', '_start_rev', '_end_rev')
1010 __slots__ = ('snapshots', '_start_rev', '_end_rev')
1011
1011
1012 def __init__(self):
1012 def __init__(self):
1013 self.snapshots = collections.defaultdict(set)
1013 self.snapshots = collections.defaultdict(set)
1014 self._start_rev = None
1014 self._start_rev = None
1015 self._end_rev = None
1015 self._end_rev = None
1016
1016
1017 def update(self, revlog, start_rev=0):
1017 def update(self, revlog, start_rev=0):
1018 """find snapshots from start_rev to tip"""
1018 """find snapshots from start_rev to tip"""
1019 nb_revs = len(revlog)
1019 nb_revs = len(revlog)
1020 end_rev = nb_revs - 1
1020 end_rev = nb_revs - 1
1021 if start_rev > end_rev:
1021 if start_rev > end_rev:
1022 return # range is empty
1022 return # range is empty
1023
1023
1024 if self._start_rev is None:
1024 if self._start_rev is None:
1025 assert self._end_rev is None
1025 assert self._end_rev is None
1026 self._update(revlog, start_rev, end_rev)
1026 self._update(revlog, start_rev, end_rev)
1027 elif not (self._start_rev <= start_rev and end_rev <= self._end_rev):
1027 elif not (self._start_rev <= start_rev and end_rev <= self._end_rev):
1028 if start_rev < self._start_rev:
1028 if start_rev < self._start_rev:
1029 self._update(revlog, start_rev, self._start_rev - 1)
1029 self._update(revlog, start_rev, self._start_rev - 1)
1030 if self._end_rev < end_rev:
1030 if self._end_rev < end_rev:
1031 self._update(revlog, self._end_rev + 1, end_rev)
1031 self._update(revlog, self._end_rev + 1, end_rev)
1032
1032
1033 if self._start_rev is None:
1033 if self._start_rev is None:
1034 assert self._end_rev is None
1034 assert self._end_rev is None
1035 self._end_rev = end_rev
1035 self._end_rev = end_rev
1036 self._start_rev = start_rev
1036 self._start_rev = start_rev
1037 else:
1037 else:
1038 self._start_rev = min(self._start_rev, start_rev)
1038 self._start_rev = min(self._start_rev, start_rev)
1039 self._end_rev = max(self._end_rev, end_rev)
1039 self._end_rev = max(self._end_rev, end_rev)
1040 assert self._start_rev <= self._end_rev, (
1040 assert self._start_rev <= self._end_rev, (
1041 self._start_rev,
1041 self._start_rev,
1042 self._end_rev,
1042 self._end_rev,
1043 )
1043 )
1044
1044
1045 def _update(self, revlog, start_rev, end_rev):
1045 def _update(self, revlog, start_rev, end_rev):
1046 """internal method that actually do update content"""
1046 """internal method that actually do update content"""
1047 assert self._start_rev is None or (
1047 assert self._start_rev is None or (
1048 start_rev < self._start_rev or start_rev > self._end_rev
1048 start_rev < self._start_rev or start_rev > self._end_rev
1049 ), (self._start_rev, self._end_rev, start_rev, end_rev)
1049 ), (self._start_rev, self._end_rev, start_rev, end_rev)
1050 assert self._start_rev is None or (
1050 assert self._start_rev is None or (
1051 end_rev < self._start_rev or end_rev > self._end_rev
1051 end_rev < self._start_rev or end_rev > self._end_rev
1052 ), (self._start_rev, self._end_rev, start_rev, end_rev)
1052 ), (self._start_rev, self._end_rev, start_rev, end_rev)
1053 cache = self.snapshots
1053 cache = self.snapshots
1054 if util.safehasattr(revlog.index, b'findsnapshots'):
1054 if util.safehasattr(revlog.index, b'findsnapshots'):
1055 revlog.index.findsnapshots(cache, start_rev, end_rev)
1055 revlog.index.findsnapshots(cache, start_rev, end_rev)
1056 else:
1056 else:
1057 deltaparent = revlog.deltaparent
1057 deltaparent = revlog.deltaparent
1058 issnapshot = revlog.issnapshot
1058 issnapshot = revlog.issnapshot
1059 for rev in revlog.revs(start_rev, end_rev):
1059 for rev in revlog.revs(start_rev, end_rev):
1060 if issnapshot(rev):
1060 if issnapshot(rev):
1061 cache[deltaparent(rev)].add(rev)
1061 cache[deltaparent(rev)].add(rev)
1062
1062
1063
1063
1064 class deltacomputer:
1064 class deltacomputer:
1065 def __init__(
1065 def __init__(
1066 self,
1066 self,
1067 revlog,
1067 revlog,
1068 write_debug=None,
1068 write_debug=None,
1069 debug_search=False,
1069 debug_search=False,
1070 debug_info=None,
1070 debug_info=None,
1071 ):
1071 ):
1072 self.revlog = revlog
1072 self.revlog = revlog
1073 self._write_debug = write_debug
1073 self._write_debug = write_debug
1074 self._debug_search = debug_search
1074 self._debug_search = debug_search
1075 self._debug_info = debug_info
1075 self._debug_info = debug_info
1076 self._snapshot_cache = SnapshotCache()
1076 self._snapshot_cache = SnapshotCache()
1077
1077
1078 def buildtext(self, revinfo, fh):
1078 def buildtext(self, revinfo, fh):
1079 """Builds a fulltext version of a revision
1079 """Builds a fulltext version of a revision
1080
1080
1081 revinfo: revisioninfo instance that contains all needed info
1081 revinfo: revisioninfo instance that contains all needed info
1082 fh: file handle to either the .i or the .d revlog file,
1082 fh: file handle to either the .i or the .d revlog file,
1083 depending on whether it is inlined or not
1083 depending on whether it is inlined or not
1084 """
1084 """
1085 btext = revinfo.btext
1085 btext = revinfo.btext
1086 if btext[0] is not None:
1086 if btext[0] is not None:
1087 return btext[0]
1087 return btext[0]
1088
1088
1089 revlog = self.revlog
1089 revlog = self.revlog
1090 cachedelta = revinfo.cachedelta
1090 cachedelta = revinfo.cachedelta
1091 baserev = cachedelta[0]
1091 baserev = cachedelta[0]
1092 delta = cachedelta[1]
1092 delta = cachedelta[1]
1093
1093
1094 fulltext = btext[0] = _textfromdelta(
1094 fulltext = btext[0] = _textfromdelta(
1095 fh,
1095 fh,
1096 revlog,
1096 revlog,
1097 baserev,
1097 baserev,
1098 delta,
1098 delta,
1099 revinfo.p1,
1099 revinfo.p1,
1100 revinfo.p2,
1100 revinfo.p2,
1101 revinfo.flags,
1101 revinfo.flags,
1102 revinfo.node,
1102 revinfo.node,
1103 )
1103 )
1104 return fulltext
1104 return fulltext
1105
1105
1106 def _builddeltadiff(self, base, revinfo, fh):
1106 def _builddeltadiff(self, base, revinfo, fh):
1107 revlog = self.revlog
1107 revlog = self.revlog
1108 t = self.buildtext(revinfo, fh)
1108 t = self.buildtext(revinfo, fh)
1109 if revlog.iscensored(base):
1109 if revlog.iscensored(base):
1110 # deltas based on a censored revision must replace the
1110 # deltas based on a censored revision must replace the
1111 # full content in one patch, so delta works everywhere
1111 # full content in one patch, so delta works everywhere
1112 header = mdiff.replacediffheader(revlog.rawsize(base), len(t))
1112 header = mdiff.replacediffheader(revlog.rawsize(base), len(t))
1113 delta = header + t
1113 delta = header + t
1114 else:
1114 else:
1115 ptext = revlog.rawdata(base, _df=fh)
1115 ptext = revlog.rawdata(base, _df=fh)
1116 delta = mdiff.textdiff(ptext, t)
1116 delta = mdiff.textdiff(ptext, t)
1117
1117
1118 return delta
1118 return delta
1119
1119
1120 def _builddeltainfo(self, revinfo, base, fh):
1120 def _builddeltainfo(self, revinfo, base, fh):
1121 # can we use the cached delta?
1121 # can we use the cached delta?
1122 revlog = self.revlog
1122 revlog = self.revlog
1123 debug_search = self._write_debug is not None and self._debug_search
1123 debug_search = self._write_debug is not None and self._debug_search
1124 chainbase = revlog.chainbase(base)
1124 chainbase = revlog.chainbase(base)
1125 if revlog._generaldelta:
1125 if revlog._generaldelta:
1126 deltabase = base
1126 deltabase = base
1127 else:
1127 else:
1128 deltabase = chainbase
1128 deltabase = chainbase
1129 snapshotdepth = None
1129 snapshotdepth = None
1130 if revlog._sparserevlog and deltabase == nullrev:
1130 if revlog._sparserevlog and deltabase == nullrev:
1131 snapshotdepth = 0
1131 snapshotdepth = 0
1132 elif revlog._sparserevlog and revlog.issnapshot(deltabase):
1132 elif revlog._sparserevlog and revlog.issnapshot(deltabase):
1133 # A delta chain should always be one full snapshot,
1133 # A delta chain should always be one full snapshot,
1134 # zero or more semi-snapshots, and zero or more deltas
1134 # zero or more semi-snapshots, and zero or more deltas
1135 p1, p2 = revlog.rev(revinfo.p1), revlog.rev(revinfo.p2)
1135 p1, p2 = revlog.rev(revinfo.p1), revlog.rev(revinfo.p2)
1136 if deltabase not in (p1, p2) and revlog.issnapshot(deltabase):
1136 if deltabase not in (p1, p2) and revlog.issnapshot(deltabase):
1137 snapshotdepth = len(revlog._deltachain(deltabase)[0])
1137 snapshotdepth = len(revlog._deltachain(deltabase)[0])
1138 delta = None
1138 delta = None
1139 if revinfo.cachedelta:
1139 if revinfo.cachedelta:
1140 cachebase = revinfo.cachedelta[0]
1140 cachebase = revinfo.cachedelta[0]
1141 # check if the diff still apply
1141 # check if the diff still apply
1142 currentbase = cachebase
1142 currentbase = cachebase
1143 while (
1143 while (
1144 currentbase != nullrev
1144 currentbase != nullrev
1145 and currentbase != base
1145 and currentbase != base
1146 and self.revlog.length(currentbase) == 0
1146 and self.revlog.length(currentbase) == 0
1147 ):
1147 ):
1148 currentbase = self.revlog.deltaparent(currentbase)
1148 currentbase = self.revlog.deltaparent(currentbase)
1149 if self.revlog._lazydelta and currentbase == base:
1149 if self.revlog._lazydelta and currentbase == base:
1150 delta = revinfo.cachedelta[1]
1150 delta = revinfo.cachedelta[1]
1151 if delta is None:
1151 if delta is None:
1152 delta = self._builddeltadiff(base, revinfo, fh)
1152 delta = self._builddeltadiff(base, revinfo, fh)
1153 if debug_search:
1153 if debug_search:
1154 msg = b"DBG-DELTAS-SEARCH: uncompressed-delta-size=%d\n"
1154 msg = b"DBG-DELTAS-SEARCH: uncompressed-delta-size=%d\n"
1155 msg %= len(delta)
1155 msg %= len(delta)
1156 self._write_debug(msg)
1156 self._write_debug(msg)
1157 # snapshotdept need to be neither None nor 0 level snapshot
1157 # snapshotdept need to be neither None nor 0 level snapshot
1158 if revlog.upperboundcomp is not None and snapshotdepth:
1158 if revlog.upperboundcomp is not None and snapshotdepth:
1159 lowestrealisticdeltalen = len(delta) // revlog.upperboundcomp
1159 lowestrealisticdeltalen = len(delta) // revlog.upperboundcomp
1160 snapshotlimit = revinfo.textlen >> snapshotdepth
1160 snapshotlimit = revinfo.textlen >> snapshotdepth
1161 if debug_search:
1161 if debug_search:
1162 msg = b"DBG-DELTAS-SEARCH: projected-lower-size=%d\n"
1162 msg = b"DBG-DELTAS-SEARCH: projected-lower-size=%d\n"
1163 msg %= lowestrealisticdeltalen
1163 msg %= lowestrealisticdeltalen
1164 self._write_debug(msg)
1164 self._write_debug(msg)
1165 if snapshotlimit < lowestrealisticdeltalen:
1165 if snapshotlimit < lowestrealisticdeltalen:
1166 if debug_search:
1166 if debug_search:
1167 msg = b"DBG-DELTAS-SEARCH: DISCARDED (snapshot limit)\n"
1167 msg = b"DBG-DELTAS-SEARCH: DISCARDED (snapshot limit)\n"
1168 self._write_debug(msg)
1168 self._write_debug(msg)
1169 return None
1169 return None
1170 if revlog.length(base) < lowestrealisticdeltalen:
1170 if revlog.length(base) < lowestrealisticdeltalen:
1171 if debug_search:
1171 if debug_search:
1172 msg = b"DBG-DELTAS-SEARCH: DISCARDED (prev size)\n"
1172 msg = b"DBG-DELTAS-SEARCH: DISCARDED (prev size)\n"
1173 self._write_debug(msg)
1173 self._write_debug(msg)
1174 return None
1174 return None
1175 header, data = revlog.compress(delta)
1175 header, data = revlog.compress(delta)
1176 deltalen = len(header) + len(data)
1176 deltalen = len(header) + len(data)
1177 offset = revlog.end(len(revlog) - 1)
1177 offset = revlog.end(len(revlog) - 1)
1178 dist = deltalen + offset - revlog.start(chainbase)
1178 dist = deltalen + offset - revlog.start(chainbase)
1179 chainlen, compresseddeltalen = revlog._chaininfo(base)
1179 chainlen, compresseddeltalen = revlog._chaininfo(base)
1180 chainlen += 1
1180 chainlen += 1
1181 compresseddeltalen += deltalen
1181 compresseddeltalen += deltalen
1182
1182
1183 return _deltainfo(
1183 return _deltainfo(
1184 dist,
1184 dist,
1185 deltalen,
1185 deltalen,
1186 (header, data),
1186 (header, data),
1187 deltabase,
1187 deltabase,
1188 chainbase,
1188 chainbase,
1189 chainlen,
1189 chainlen,
1190 compresseddeltalen,
1190 compresseddeltalen,
1191 snapshotdepth,
1191 snapshotdepth,
1192 )
1192 )
1193
1193
1194 def _fullsnapshotinfo(self, fh, revinfo, curr):
1194 def _fullsnapshotinfo(self, fh, revinfo, curr):
1195 rawtext = self.buildtext(revinfo, fh)
1195 rawtext = self.buildtext(revinfo, fh)
1196 data = self.revlog.compress(rawtext)
1196 data = self.revlog.compress(rawtext)
1197 compresseddeltalen = deltalen = dist = len(data[1]) + len(data[0])
1197 compresseddeltalen = deltalen = dist = len(data[1]) + len(data[0])
1198 deltabase = chainbase = curr
1198 deltabase = chainbase = curr
1199 snapshotdepth = 0
1199 snapshotdepth = 0
1200 chainlen = 1
1200 chainlen = 1
1201
1201
1202 return _deltainfo(
1202 return _deltainfo(
1203 dist,
1203 dist,
1204 deltalen,
1204 deltalen,
1205 data,
1205 data,
1206 deltabase,
1206 deltabase,
1207 chainbase,
1207 chainbase,
1208 chainlen,
1208 chainlen,
1209 compresseddeltalen,
1209 compresseddeltalen,
1210 snapshotdepth,
1210 snapshotdepth,
1211 )
1211 )
1212
1212
1213 def finddeltainfo(self, revinfo, fh, excluded_bases=None, target_rev=None):
1213 def finddeltainfo(self, revinfo, fh, excluded_bases=None, target_rev=None):
1214 """Find an acceptable delta against a candidate revision
1214 """Find an acceptable delta against a candidate revision
1215
1215
1216 revinfo: information about the revision (instance of _revisioninfo)
1216 revinfo: information about the revision (instance of _revisioninfo)
1217 fh: file handle to either the .i or the .d revlog file,
1217 fh: file handle to either the .i or the .d revlog file,
1218 depending on whether it is inlined or not
1218 depending on whether it is inlined or not
1219
1219
1220 Returns the first acceptable candidate revision, as ordered by
1220 Returns the first acceptable candidate revision, as ordered by
1221 _candidategroups
1221 _candidategroups
1222
1222
1223 If no suitable deltabase is found, we return delta info for a full
1223 If no suitable deltabase is found, we return delta info for a full
1224 snapshot.
1224 snapshot.
1225
1225
1226 `excluded_bases` is an optional set of revision that cannot be used as
1226 `excluded_bases` is an optional set of revision that cannot be used as
1227 a delta base. Use this to recompute delta suitable in censor or strip
1227 a delta base. Use this to recompute delta suitable in censor or strip
1228 context.
1228 context.
1229 """
1229 """
1230 if target_rev is None:
1230 if target_rev is None:
1231 target_rev = len(self.revlog)
1231 target_rev = len(self.revlog)
1232
1232
1233 if not revinfo.textlen:
1233 if not revinfo.textlen:
1234 return self._fullsnapshotinfo(fh, revinfo, target_rev)
1234 return self._fullsnapshotinfo(fh, revinfo, target_rev)
1235
1235
1236 if excluded_bases is None:
1236 if excluded_bases is None:
1237 excluded_bases = set()
1237 excluded_bases = set()
1238
1238
1239 # no delta for flag processor revision (see "candelta" for why)
1239 # no delta for flag processor revision (see "candelta" for why)
1240 # not calling candelta since only one revision needs test, also to
1240 # not calling candelta since only one revision needs test, also to
1241 # avoid overhead fetching flags again.
1241 # avoid overhead fetching flags again.
1242 if revinfo.flags & REVIDX_RAWTEXT_CHANGING_FLAGS:
1242 if revinfo.flags & REVIDX_RAWTEXT_CHANGING_FLAGS:
1243 return self._fullsnapshotinfo(fh, revinfo, target_rev)
1243 return self._fullsnapshotinfo(fh, revinfo, target_rev)
1244
1244
1245 gather_debug = (
1245 gather_debug = (
1246 self._write_debug is not None or self._debug_info is not None
1246 self._write_debug is not None or self._debug_info is not None
1247 )
1247 )
1248 debug_search = self._write_debug is not None and self._debug_search
1248 debug_search = self._write_debug is not None and self._debug_search
1249
1249
1250 if gather_debug:
1250 if gather_debug:
1251 start = util.timer()
1251 start = util.timer()
1252
1252
1253 # count the number of different delta we tried (for debug purpose)
1253 # count the number of different delta we tried (for debug purpose)
1254 dbg_try_count = 0
1254 dbg_try_count = 0
1255 # count the number of "search round" we did. (for debug purpose)
1255 # count the number of "search round" we did. (for debug purpose)
1256 dbg_try_rounds = 0
1256 dbg_try_rounds = 0
1257 dbg_type = b'unknown'
1257 dbg_type = b'unknown'
1258
1258
1259 cachedelta = revinfo.cachedelta
1259 cachedelta = revinfo.cachedelta
1260 p1 = revinfo.p1
1260 p1 = revinfo.p1
1261 p2 = revinfo.p2
1261 p2 = revinfo.p2
1262 revlog = self.revlog
1262 revlog = self.revlog
1263
1263
1264 deltainfo = None
1264 deltainfo = None
1265 p1r, p2r = revlog.rev(p1), revlog.rev(p2)
1265 p1r, p2r = revlog.rev(p1), revlog.rev(p2)
1266
1266
1267 if gather_debug:
1267 if gather_debug:
1268 if p1r != nullrev:
1268 if p1r != nullrev:
1269 p1_chain_len = revlog._chaininfo(p1r)[0]
1269 p1_chain_len = revlog._chaininfo(p1r)[0]
1270 else:
1270 else:
1271 p1_chain_len = -1
1271 p1_chain_len = -1
1272 if p2r != nullrev:
1272 if p2r != nullrev:
1273 p2_chain_len = revlog._chaininfo(p2r)[0]
1273 p2_chain_len = revlog._chaininfo(p2r)[0]
1274 else:
1274 else:
1275 p2_chain_len = -1
1275 p2_chain_len = -1
1276 if debug_search:
1276 if debug_search:
1277 msg = b"DBG-DELTAS-SEARCH: SEARCH rev=%d\n"
1277 msg = b"DBG-DELTAS-SEARCH: SEARCH rev=%d\n"
1278 msg %= target_rev
1278 msg %= target_rev
1279 self._write_debug(msg)
1279 self._write_debug(msg)
1280
1280
1281 groups = _candidategroups(
1281 groups = _candidategroups(
1282 self.revlog,
1282 self.revlog,
1283 revinfo.textlen,
1283 revinfo.textlen,
1284 p1r,
1284 p1r,
1285 p2r,
1285 p2r,
1286 cachedelta,
1286 cachedelta,
1287 excluded_bases,
1287 excluded_bases,
1288 target_rev,
1288 target_rev,
1289 snapshot_cache=self._snapshot_cache,
1289 snapshot_cache=self._snapshot_cache,
1290 )
1290 )
1291 candidaterevs = next(groups)
1291 candidaterevs = next(groups)
1292 while candidaterevs is not None:
1292 while candidaterevs is not None:
1293 dbg_try_rounds += 1
1293 dbg_try_rounds += 1
1294 if debug_search:
1294 if debug_search:
1295 prev = None
1295 prev = None
1296 if deltainfo is not None:
1296 if deltainfo is not None:
1297 prev = deltainfo.base
1297 prev = deltainfo.base
1298
1298
1299 if (
1299 if (
1300 cachedelta is not None
1300 cachedelta is not None
1301 and len(candidaterevs) == 1
1301 and len(candidaterevs) == 1
1302 and cachedelta[0] in candidaterevs
1302 and cachedelta[0] in candidaterevs
1303 ):
1303 ):
1304 round_type = b"cached-delta"
1304 round_type = b"cached-delta"
1305 elif p1 in candidaterevs or p2 in candidaterevs:
1305 elif p1 in candidaterevs or p2 in candidaterevs:
1306 round_type = b"parents"
1306 round_type = b"parents"
1307 elif prev is not None and all(c < prev for c in candidaterevs):
1307 elif prev is not None and all(c < prev for c in candidaterevs):
1308 round_type = b"refine-down"
1308 round_type = b"refine-down"
1309 elif prev is not None and all(c > prev for c in candidaterevs):
1309 elif prev is not None and all(c > prev for c in candidaterevs):
1310 round_type = b"refine-up"
1310 round_type = b"refine-up"
1311 else:
1311 else:
1312 round_type = b"search-down"
1312 round_type = b"search-down"
1313 msg = b"DBG-DELTAS-SEARCH: ROUND #%d - %d candidates - %s\n"
1313 msg = b"DBG-DELTAS-SEARCH: ROUND #%d - %d candidates - %s\n"
1314 msg %= (dbg_try_rounds, len(candidaterevs), round_type)
1314 msg %= (dbg_try_rounds, len(candidaterevs), round_type)
1315 self._write_debug(msg)
1315 self._write_debug(msg)
1316 nominateddeltas = []
1316 nominateddeltas = []
1317 if deltainfo is not None:
1317 if deltainfo is not None:
1318 if debug_search:
1318 if debug_search:
1319 msg = (
1319 msg = (
1320 b"DBG-DELTAS-SEARCH: CONTENDER: rev=%d - length=%d\n"
1320 b"DBG-DELTAS-SEARCH: CONTENDER: rev=%d - length=%d\n"
1321 )
1321 )
1322 msg %= (deltainfo.base, deltainfo.deltalen)
1322 msg %= (deltainfo.base, deltainfo.deltalen)
1323 self._write_debug(msg)
1323 self._write_debug(msg)
1324 # if we already found a good delta,
1324 # if we already found a good delta,
1325 # challenge it against refined candidates
1325 # challenge it against refined candidates
1326 nominateddeltas.append(deltainfo)
1326 nominateddeltas.append(deltainfo)
1327 for candidaterev in candidaterevs:
1327 for candidaterev in candidaterevs:
1328 if debug_search:
1328 if debug_search:
1329 msg = b"DBG-DELTAS-SEARCH: CANDIDATE: rev=%d\n"
1329 msg = b"DBG-DELTAS-SEARCH: CANDIDATE: rev=%d\n"
1330 msg %= candidaterev
1330 msg %= candidaterev
1331 self._write_debug(msg)
1331 self._write_debug(msg)
1332 candidate_type = None
1332 candidate_type = None
1333 if candidaterev == p1:
1333 if candidaterev == p1:
1334 candidate_type = b"p1"
1334 candidate_type = b"p1"
1335 elif candidaterev == p2:
1335 elif candidaterev == p2:
1336 candidate_type = b"p2"
1336 candidate_type = b"p2"
1337 elif self.revlog.issnapshot(candidaterev):
1337 elif self.revlog.issnapshot(candidaterev):
1338 candidate_type = b"snapshot-%d"
1338 candidate_type = b"snapshot-%d"
1339 candidate_type %= self.revlog.snapshotdepth(
1339 candidate_type %= self.revlog.snapshotdepth(
1340 candidaterev
1340 candidaterev
1341 )
1341 )
1342
1342
1343 if candidate_type is not None:
1343 if candidate_type is not None:
1344 msg = b"DBG-DELTAS-SEARCH: type=%s\n"
1344 msg = b"DBG-DELTAS-SEARCH: type=%s\n"
1345 msg %= candidate_type
1345 msg %= candidate_type
1346 self._write_debug(msg)
1346 self._write_debug(msg)
1347 msg = b"DBG-DELTAS-SEARCH: size=%d\n"
1347 msg = b"DBG-DELTAS-SEARCH: size=%d\n"
1348 msg %= self.revlog.length(candidaterev)
1348 msg %= self.revlog.length(candidaterev)
1349 self._write_debug(msg)
1349 self._write_debug(msg)
1350 msg = b"DBG-DELTAS-SEARCH: base=%d\n"
1350 msg = b"DBG-DELTAS-SEARCH: base=%d\n"
1351 msg %= self.revlog.deltaparent(candidaterev)
1351 msg %= self.revlog.deltaparent(candidaterev)
1352 self._write_debug(msg)
1352 self._write_debug(msg)
1353
1353
1354 dbg_try_count += 1
1354 dbg_try_count += 1
1355
1355
1356 if debug_search:
1356 if debug_search:
1357 delta_start = util.timer()
1357 delta_start = util.timer()
1358 candidatedelta = self._builddeltainfo(revinfo, candidaterev, fh)
1358 candidatedelta = self._builddeltainfo(revinfo, candidaterev, fh)
1359 if debug_search:
1359 if debug_search:
1360 delta_end = util.timer()
1360 delta_end = util.timer()
1361 msg = b"DBG-DELTAS-SEARCH: delta-search-time=%f\n"
1361 msg = b"DBG-DELTAS-SEARCH: delta-search-time=%f\n"
1362 msg %= delta_end - delta_start
1362 msg %= delta_end - delta_start
1363 self._write_debug(msg)
1363 self._write_debug(msg)
1364 if candidatedelta is not None:
1364 if candidatedelta is not None:
1365 if is_good_delta_info(self.revlog, candidatedelta, revinfo):
1365 if is_good_delta_info(self.revlog, candidatedelta, revinfo):
1366 if debug_search:
1366 if debug_search:
1367 msg = b"DBG-DELTAS-SEARCH: DELTA: length=%d (GOOD)\n"
1367 msg = b"DBG-DELTAS-SEARCH: DELTA: length=%d (GOOD)\n"
1368 msg %= candidatedelta.deltalen
1368 msg %= candidatedelta.deltalen
1369 self._write_debug(msg)
1369 self._write_debug(msg)
1370 nominateddeltas.append(candidatedelta)
1370 nominateddeltas.append(candidatedelta)
1371 elif debug_search:
1371 elif debug_search:
1372 msg = b"DBG-DELTAS-SEARCH: DELTA: length=%d (BAD)\n"
1372 msg = b"DBG-DELTAS-SEARCH: DELTA: length=%d (BAD)\n"
1373 msg %= candidatedelta.deltalen
1373 msg %= candidatedelta.deltalen
1374 self._write_debug(msg)
1374 self._write_debug(msg)
1375 elif debug_search:
1375 elif debug_search:
1376 msg = b"DBG-DELTAS-SEARCH: NO-DELTA\n"
1376 msg = b"DBG-DELTAS-SEARCH: NO-DELTA\n"
1377 self._write_debug(msg)
1377 self._write_debug(msg)
1378 if nominateddeltas:
1378 if nominateddeltas:
1379 deltainfo = min(nominateddeltas, key=lambda x: x.deltalen)
1379 deltainfo = min(nominateddeltas, key=lambda x: x.deltalen)
1380 if deltainfo is not None:
1380 if deltainfo is not None:
1381 candidaterevs = groups.send(deltainfo.base)
1381 candidaterevs = groups.send(deltainfo.base)
1382 else:
1382 else:
1383 candidaterevs = next(groups)
1383 candidaterevs = next(groups)
1384
1384
1385 if deltainfo is None:
1385 if deltainfo is None:
1386 dbg_type = b"full"
1386 dbg_type = b"full"
1387 deltainfo = self._fullsnapshotinfo(fh, revinfo, target_rev)
1387 deltainfo = self._fullsnapshotinfo(fh, revinfo, target_rev)
1388 elif deltainfo.snapshotdepth: # pytype: disable=attribute-error
1388 elif deltainfo.snapshotdepth: # pytype: disable=attribute-error
1389 dbg_type = b"snapshot"
1389 dbg_type = b"snapshot"
1390 else:
1390 else:
1391 dbg_type = b"delta"
1391 dbg_type = b"delta"
1392
1392
1393 if gather_debug:
1393 if gather_debug:
1394 end = util.timer()
1394 end = util.timer()
1395 if dbg_type == b'full':
1396 used_cached = (
1397 cachedelta is not None
1398 and dbg_try_rounds == 0
1399 and dbg_try_count == 0
1400 and cachedelta[0] == nullrev
1401 )
1402 else:
1395 used_cached = (
1403 used_cached = (
1396 cachedelta is not None
1404 cachedelta is not None
1397 and dbg_try_rounds == 1
1405 and dbg_try_rounds == 1
1398 and dbg_try_count == 1
1406 and dbg_try_count == 1
1399 and deltainfo.base == cachedelta[0]
1407 and deltainfo.base == cachedelta[0]
1400 )
1408 )
1401 dbg = {
1409 dbg = {
1402 'duration': end - start,
1410 'duration': end - start,
1403 'revision': target_rev,
1411 'revision': target_rev,
1404 'delta-base': deltainfo.base, # pytype: disable=attribute-error
1412 'delta-base': deltainfo.base, # pytype: disable=attribute-error
1405 'search_round_count': dbg_try_rounds,
1413 'search_round_count': dbg_try_rounds,
1406 'using-cached-base': used_cached,
1414 'using-cached-base': used_cached,
1407 'delta_try_count': dbg_try_count,
1415 'delta_try_count': dbg_try_count,
1408 'type': dbg_type,
1416 'type': dbg_type,
1409 'p1-chain-len': p1_chain_len,
1417 'p1-chain-len': p1_chain_len,
1410 'p2-chain-len': p2_chain_len,
1418 'p2-chain-len': p2_chain_len,
1411 }
1419 }
1412 if (
1420 if (
1413 deltainfo.snapshotdepth # pytype: disable=attribute-error
1421 deltainfo.snapshotdepth # pytype: disable=attribute-error
1414 is not None
1422 is not None
1415 ):
1423 ):
1416 dbg[
1424 dbg[
1417 'snapshot-depth'
1425 'snapshot-depth'
1418 ] = deltainfo.snapshotdepth # pytype: disable=attribute-error
1426 ] = deltainfo.snapshotdepth # pytype: disable=attribute-error
1419 else:
1427 else:
1420 dbg['snapshot-depth'] = 0
1428 dbg['snapshot-depth'] = 0
1421 target_revlog = b"UNKNOWN"
1429 target_revlog = b"UNKNOWN"
1422 target_type = self.revlog.target[0]
1430 target_type = self.revlog.target[0]
1423 target_key = self.revlog.target[1]
1431 target_key = self.revlog.target[1]
1424 if target_type == KIND_CHANGELOG:
1432 if target_type == KIND_CHANGELOG:
1425 target_revlog = b'CHANGELOG:'
1433 target_revlog = b'CHANGELOG:'
1426 elif target_type == KIND_MANIFESTLOG:
1434 elif target_type == KIND_MANIFESTLOG:
1427 target_revlog = b'MANIFESTLOG:'
1435 target_revlog = b'MANIFESTLOG:'
1428 if target_key:
1436 if target_key:
1429 target_revlog += b'%s:' % target_key
1437 target_revlog += b'%s:' % target_key
1430 elif target_type == KIND_FILELOG:
1438 elif target_type == KIND_FILELOG:
1431 target_revlog = b'FILELOG:'
1439 target_revlog = b'FILELOG:'
1432 if target_key:
1440 if target_key:
1433 target_revlog += b'%s:' % target_key
1441 target_revlog += b'%s:' % target_key
1434 dbg['target-revlog'] = target_revlog
1442 dbg['target-revlog'] = target_revlog
1435
1443
1436 if self._debug_info is not None:
1444 if self._debug_info is not None:
1437 self._debug_info.append(dbg)
1445 self._debug_info.append(dbg)
1438
1446
1439 if self._write_debug is not None:
1447 if self._write_debug is not None:
1440 msg = (
1448 msg = (
1441 b"DBG-DELTAS:"
1449 b"DBG-DELTAS:"
1442 b" %-12s"
1450 b" %-12s"
1443 b" rev=%d:"
1451 b" rev=%d:"
1444 b" delta-base=%d"
1452 b" delta-base=%d"
1445 b" is-cached=%d"
1453 b" is-cached=%d"
1446 b" - search-rounds=%d"
1454 b" - search-rounds=%d"
1447 b" try-count=%d"
1455 b" try-count=%d"
1448 b" - delta-type=%-6s"
1456 b" - delta-type=%-6s"
1449 b" snap-depth=%d"
1457 b" snap-depth=%d"
1450 b" - p1-chain-length=%d"
1458 b" - p1-chain-length=%d"
1451 b" p2-chain-length=%d"
1459 b" p2-chain-length=%d"
1452 b" - duration=%f"
1460 b" - duration=%f"
1453 b"\n"
1461 b"\n"
1454 )
1462 )
1455 msg %= (
1463 msg %= (
1456 dbg["target-revlog"],
1464 dbg["target-revlog"],
1457 dbg["revision"],
1465 dbg["revision"],
1458 dbg["delta-base"],
1466 dbg["delta-base"],
1459 dbg["using-cached-base"],
1467 dbg["using-cached-base"],
1460 dbg["search_round_count"],
1468 dbg["search_round_count"],
1461 dbg["delta_try_count"],
1469 dbg["delta_try_count"],
1462 dbg["type"],
1470 dbg["type"],
1463 dbg["snapshot-depth"],
1471 dbg["snapshot-depth"],
1464 dbg["p1-chain-len"],
1472 dbg["p1-chain-len"],
1465 dbg["p2-chain-len"],
1473 dbg["p2-chain-len"],
1466 dbg["duration"],
1474 dbg["duration"],
1467 )
1475 )
1468 self._write_debug(msg)
1476 self._write_debug(msg)
1469 return deltainfo
1477 return deltainfo
1470
1478
1471
1479
1472 def delta_compression(default_compression_header, deltainfo):
1480 def delta_compression(default_compression_header, deltainfo):
1473 """return (COMPRESSION_MODE, deltainfo)
1481 """return (COMPRESSION_MODE, deltainfo)
1474
1482
1475 used by revlog v2+ format to dispatch between PLAIN and DEFAULT
1483 used by revlog v2+ format to dispatch between PLAIN and DEFAULT
1476 compression.
1484 compression.
1477 """
1485 """
1478 h, d = deltainfo.data
1486 h, d = deltainfo.data
1479 compression_mode = COMP_MODE_INLINE
1487 compression_mode = COMP_MODE_INLINE
1480 if not h and not d:
1488 if not h and not d:
1481 # not data to store at all... declare them uncompressed
1489 # not data to store at all... declare them uncompressed
1482 compression_mode = COMP_MODE_PLAIN
1490 compression_mode = COMP_MODE_PLAIN
1483 elif not h:
1491 elif not h:
1484 t = d[0:1]
1492 t = d[0:1]
1485 if t == b'\0':
1493 if t == b'\0':
1486 compression_mode = COMP_MODE_PLAIN
1494 compression_mode = COMP_MODE_PLAIN
1487 elif t == default_compression_header:
1495 elif t == default_compression_header:
1488 compression_mode = COMP_MODE_DEFAULT
1496 compression_mode = COMP_MODE_DEFAULT
1489 elif h == b'u':
1497 elif h == b'u':
1490 # we have a more efficient way to declare uncompressed
1498 # we have a more efficient way to declare uncompressed
1491 h = b''
1499 h = b''
1492 compression_mode = COMP_MODE_PLAIN
1500 compression_mode = COMP_MODE_PLAIN
1493 deltainfo = drop_u_compression(deltainfo)
1501 deltainfo = drop_u_compression(deltainfo)
1494 return compression_mode, deltainfo
1502 return compression_mode, deltainfo
@@ -1,1127 +1,1127 b''
1 Setting up test
1 Setting up test
2
2
3 $ hg init test
3 $ hg init test
4 $ cd test
4 $ cd test
5 $ echo 0 > afile
5 $ echo 0 > afile
6 $ hg add afile
6 $ hg add afile
7 $ hg commit -m "0.0"
7 $ hg commit -m "0.0"
8 $ echo 1 >> afile
8 $ echo 1 >> afile
9 $ hg commit -m "0.1"
9 $ hg commit -m "0.1"
10 $ echo 2 >> afile
10 $ echo 2 >> afile
11 $ hg commit -m "0.2"
11 $ hg commit -m "0.2"
12 $ echo 3 >> afile
12 $ echo 3 >> afile
13 $ hg commit -m "0.3"
13 $ hg commit -m "0.3"
14 $ hg update -C 0
14 $ hg update -C 0
15 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
15 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
16 $ echo 1 >> afile
16 $ echo 1 >> afile
17 $ hg commit -m "1.1"
17 $ hg commit -m "1.1"
18 created new head
18 created new head
19 $ echo 2 >> afile
19 $ echo 2 >> afile
20 $ hg commit -m "1.2"
20 $ hg commit -m "1.2"
21 $ echo "a line" > fred
21 $ echo "a line" > fred
22 $ echo 3 >> afile
22 $ echo 3 >> afile
23 $ hg add fred
23 $ hg add fred
24 $ hg commit -m "1.3"
24 $ hg commit -m "1.3"
25 $ hg mv afile adifferentfile
25 $ hg mv afile adifferentfile
26 $ hg commit -m "1.3m"
26 $ hg commit -m "1.3m"
27 $ hg update -C 3
27 $ hg update -C 3
28 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
28 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
29 $ hg mv afile anotherfile
29 $ hg mv afile anotherfile
30 $ hg commit -m "0.3m"
30 $ hg commit -m "0.3m"
31 $ hg verify
31 $ hg verify
32 checking changesets
32 checking changesets
33 checking manifests
33 checking manifests
34 crosschecking files in changesets and manifests
34 crosschecking files in changesets and manifests
35 checking files
35 checking files
36 checked 9 changesets with 7 changes to 4 files
36 checked 9 changesets with 7 changes to 4 files
37 $ cd ..
37 $ cd ..
38 $ hg init empty
38 $ hg init empty
39
39
40 Bundle and phase
40 Bundle and phase
41
41
42 $ hg -R test phase --force --secret 0
42 $ hg -R test phase --force --secret 0
43 $ hg -R test bundle phase.hg empty
43 $ hg -R test bundle phase.hg empty
44 searching for changes
44 searching for changes
45 no changes found (ignored 9 secret changesets)
45 no changes found (ignored 9 secret changesets)
46 [1]
46 [1]
47 $ hg -R test phase --draft -r 'head()'
47 $ hg -R test phase --draft -r 'head()'
48
48
49 Bundle --all
49 Bundle --all
50
50
51 $ hg -R test bundle --all all.hg
51 $ hg -R test bundle --all all.hg
52 9 changesets found
52 9 changesets found
53
53
54 Bundle test to full.hg
54 Bundle test to full.hg
55
55
56 $ hg -R test bundle full.hg empty
56 $ hg -R test bundle full.hg empty
57 searching for changes
57 searching for changes
58 9 changesets found
58 9 changesets found
59
59
60 Unbundle full.hg in test
60 Unbundle full.hg in test
61
61
62 $ hg -R test unbundle full.hg
62 $ hg -R test unbundle full.hg
63 adding changesets
63 adding changesets
64 adding manifests
64 adding manifests
65 adding file changes
65 adding file changes
66 added 0 changesets with 0 changes to 4 files
66 added 0 changesets with 0 changes to 4 files
67 (run 'hg update' to get a working copy)
67 (run 'hg update' to get a working copy)
68
68
69 Verify empty
69 Verify empty
70
70
71 $ hg -R empty heads
71 $ hg -R empty heads
72 [1]
72 [1]
73 $ hg -R empty verify
73 $ hg -R empty verify
74 checking changesets
74 checking changesets
75 checking manifests
75 checking manifests
76 crosschecking files in changesets and manifests
76 crosschecking files in changesets and manifests
77 checking files
77 checking files
78 checked 0 changesets with 0 changes to 0 files
78 checked 0 changesets with 0 changes to 0 files
79
79
80 #if repobundlerepo
80 #if repobundlerepo
81
81
82 Pull full.hg into test (using --cwd)
82 Pull full.hg into test (using --cwd)
83
83
84 $ hg --cwd test pull ../full.hg
84 $ hg --cwd test pull ../full.hg
85 pulling from ../full.hg
85 pulling from ../full.hg
86 searching for changes
86 searching for changes
87 no changes found
87 no changes found
88
88
89 Verify that there are no leaked temporary files after pull (issue2797)
89 Verify that there are no leaked temporary files after pull (issue2797)
90
90
91 $ ls test/.hg | grep .hg10un
91 $ ls test/.hg | grep .hg10un
92 [1]
92 [1]
93
93
94 Pull full.hg into empty (using --cwd)
94 Pull full.hg into empty (using --cwd)
95
95
96 $ hg --cwd empty pull ../full.hg
96 $ hg --cwd empty pull ../full.hg
97 pulling from ../full.hg
97 pulling from ../full.hg
98 requesting all changes
98 requesting all changes
99 adding changesets
99 adding changesets
100 adding manifests
100 adding manifests
101 adding file changes
101 adding file changes
102 added 9 changesets with 7 changes to 4 files (+1 heads)
102 added 9 changesets with 7 changes to 4 files (+1 heads)
103 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
103 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
104 (run 'hg heads' to see heads, 'hg merge' to merge)
104 (run 'hg heads' to see heads, 'hg merge' to merge)
105
105
106 Rollback empty
106 Rollback empty
107
107
108 $ hg -R empty rollback
108 $ hg -R empty rollback
109 repository tip rolled back to revision -1 (undo pull)
109 repository tip rolled back to revision -1 (undo pull)
110
110
111 Pull full.hg into empty again (using --cwd)
111 Pull full.hg into empty again (using --cwd)
112
112
113 $ hg --cwd empty pull ../full.hg
113 $ hg --cwd empty pull ../full.hg
114 pulling from ../full.hg
114 pulling from ../full.hg
115 requesting all changes
115 requesting all changes
116 adding changesets
116 adding changesets
117 adding manifests
117 adding manifests
118 adding file changes
118 adding file changes
119 added 9 changesets with 7 changes to 4 files (+1 heads)
119 added 9 changesets with 7 changes to 4 files (+1 heads)
120 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
120 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
121 (run 'hg heads' to see heads, 'hg merge' to merge)
121 (run 'hg heads' to see heads, 'hg merge' to merge)
122
122
123 Pull full.hg into test (using -R)
123 Pull full.hg into test (using -R)
124
124
125 $ hg -R test pull full.hg
125 $ hg -R test pull full.hg
126 pulling from full.hg
126 pulling from full.hg
127 searching for changes
127 searching for changes
128 no changes found
128 no changes found
129
129
130 Pull full.hg into empty (using -R)
130 Pull full.hg into empty (using -R)
131
131
132 $ hg -R empty pull full.hg
132 $ hg -R empty pull full.hg
133 pulling from full.hg
133 pulling from full.hg
134 searching for changes
134 searching for changes
135 no changes found
135 no changes found
136
136
137 Rollback empty
137 Rollback empty
138
138
139 $ hg -R empty rollback
139 $ hg -R empty rollback
140 repository tip rolled back to revision -1 (undo pull)
140 repository tip rolled back to revision -1 (undo pull)
141
141
142 Pull full.hg into empty again (using -R)
142 Pull full.hg into empty again (using -R)
143
143
144 $ hg -R empty pull full.hg
144 $ hg -R empty pull full.hg
145 pulling from full.hg
145 pulling from full.hg
146 requesting all changes
146 requesting all changes
147 adding changesets
147 adding changesets
148 adding manifests
148 adding manifests
149 adding file changes
149 adding file changes
150 added 9 changesets with 7 changes to 4 files (+1 heads)
150 added 9 changesets with 7 changes to 4 files (+1 heads)
151 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
151 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
152 (run 'hg heads' to see heads, 'hg merge' to merge)
152 (run 'hg heads' to see heads, 'hg merge' to merge)
153
153
154 Log -R full.hg in fresh empty
154 Log -R full.hg in fresh empty
155
155
156 $ rm -r empty
156 $ rm -r empty
157 $ hg init empty
157 $ hg init empty
158 $ cd empty
158 $ cd empty
159 $ hg -R bundle://../full.hg log
159 $ hg -R bundle://../full.hg log
160 changeset: 8:aa35859c02ea
160 changeset: 8:aa35859c02ea
161 tag: tip
161 tag: tip
162 parent: 3:eebf5a27f8ca
162 parent: 3:eebf5a27f8ca
163 user: test
163 user: test
164 date: Thu Jan 01 00:00:00 1970 +0000
164 date: Thu Jan 01 00:00:00 1970 +0000
165 summary: 0.3m
165 summary: 0.3m
166
166
167 changeset: 7:a6a34bfa0076
167 changeset: 7:a6a34bfa0076
168 user: test
168 user: test
169 date: Thu Jan 01 00:00:00 1970 +0000
169 date: Thu Jan 01 00:00:00 1970 +0000
170 summary: 1.3m
170 summary: 1.3m
171
171
172 changeset: 6:7373c1169842
172 changeset: 6:7373c1169842
173 user: test
173 user: test
174 date: Thu Jan 01 00:00:00 1970 +0000
174 date: Thu Jan 01 00:00:00 1970 +0000
175 summary: 1.3
175 summary: 1.3
176
176
177 changeset: 5:1bb50a9436a7
177 changeset: 5:1bb50a9436a7
178 user: test
178 user: test
179 date: Thu Jan 01 00:00:00 1970 +0000
179 date: Thu Jan 01 00:00:00 1970 +0000
180 summary: 1.2
180 summary: 1.2
181
181
182 changeset: 4:095197eb4973
182 changeset: 4:095197eb4973
183 parent: 0:f9ee2f85a263
183 parent: 0:f9ee2f85a263
184 user: test
184 user: test
185 date: Thu Jan 01 00:00:00 1970 +0000
185 date: Thu Jan 01 00:00:00 1970 +0000
186 summary: 1.1
186 summary: 1.1
187
187
188 changeset: 3:eebf5a27f8ca
188 changeset: 3:eebf5a27f8ca
189 user: test
189 user: test
190 date: Thu Jan 01 00:00:00 1970 +0000
190 date: Thu Jan 01 00:00:00 1970 +0000
191 summary: 0.3
191 summary: 0.3
192
192
193 changeset: 2:e38ba6f5b7e0
193 changeset: 2:e38ba6f5b7e0
194 user: test
194 user: test
195 date: Thu Jan 01 00:00:00 1970 +0000
195 date: Thu Jan 01 00:00:00 1970 +0000
196 summary: 0.2
196 summary: 0.2
197
197
198 changeset: 1:34c2bf6b0626
198 changeset: 1:34c2bf6b0626
199 user: test
199 user: test
200 date: Thu Jan 01 00:00:00 1970 +0000
200 date: Thu Jan 01 00:00:00 1970 +0000
201 summary: 0.1
201 summary: 0.1
202
202
203 changeset: 0:f9ee2f85a263
203 changeset: 0:f9ee2f85a263
204 user: test
204 user: test
205 date: Thu Jan 01 00:00:00 1970 +0000
205 date: Thu Jan 01 00:00:00 1970 +0000
206 summary: 0.0
206 summary: 0.0
207
207
208 Make sure bundlerepo doesn't leak tempfiles (issue2491)
208 Make sure bundlerepo doesn't leak tempfiles (issue2491)
209
209
210 $ ls .hg
210 $ ls .hg
211 00changelog.i
211 00changelog.i
212 cache
212 cache
213 requires
213 requires
214 store
214 store
215 wcache
215 wcache
216
216
217 Pull ../full.hg into empty (with hook)
217 Pull ../full.hg into empty (with hook)
218
218
219 $ cat >> .hg/hgrc <<EOF
219 $ cat >> .hg/hgrc <<EOF
220 > [hooks]
220 > [hooks]
221 > changegroup = sh -c "printenv.py --line changegroup"
221 > changegroup = sh -c "printenv.py --line changegroup"
222 > EOF
222 > EOF
223
223
224 doesn't work (yet ?)
224 doesn't work (yet ?)
225 NOTE: msys is mangling the URL below
225 NOTE: msys is mangling the URL below
226
226
227 hg -R bundle://../full.hg verify
227 hg -R bundle://../full.hg verify
228
228
229 $ hg pull bundle://../full.hg
229 $ hg pull bundle://../full.hg
230 pulling from bundle:../full.hg
230 pulling from bundle:../full.hg
231 requesting all changes
231 requesting all changes
232 adding changesets
232 adding changesets
233 adding manifests
233 adding manifests
234 adding file changes
234 adding file changes
235 added 9 changesets with 7 changes to 4 files (+1 heads)
235 added 9 changesets with 7 changes to 4 files (+1 heads)
236 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
236 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
237 changegroup hook: HG_HOOKNAME=changegroup
237 changegroup hook: HG_HOOKNAME=changegroup
238 HG_HOOKTYPE=changegroup
238 HG_HOOKTYPE=changegroup
239 HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735
239 HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735
240 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf
240 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf
241 HG_SOURCE=pull
241 HG_SOURCE=pull
242 HG_TXNID=TXN:$ID$
242 HG_TXNID=TXN:$ID$
243 HG_TXNNAME=pull
243 HG_TXNNAME=pull
244 bundle:../full.hg (no-msys !)
244 bundle:../full.hg (no-msys !)
245 bundle;../full.hg (msys !)
245 bundle;../full.hg (msys !)
246 HG_URL=bundle:../full.hg (no-msys !)
246 HG_URL=bundle:../full.hg (no-msys !)
247 HG_URL=bundle;../full.hg (msys !)
247 HG_URL=bundle;../full.hg (msys !)
248
248
249 (run 'hg heads' to see heads, 'hg merge' to merge)
249 (run 'hg heads' to see heads, 'hg merge' to merge)
250
250
251 Rollback empty
251 Rollback empty
252
252
253 $ hg rollback
253 $ hg rollback
254 repository tip rolled back to revision -1 (undo pull)
254 repository tip rolled back to revision -1 (undo pull)
255 $ cd ..
255 $ cd ..
256
256
257 Log -R bundle:empty+full.hg
257 Log -R bundle:empty+full.hg
258
258
259 $ hg -R bundle:empty+full.hg log --template="{rev} "; echo ""
259 $ hg -R bundle:empty+full.hg log --template="{rev} "; echo ""
260 8 7 6 5 4 3 2 1 0
260 8 7 6 5 4 3 2 1 0
261
261
262 Pull full.hg into empty again (using -R; with hook)
262 Pull full.hg into empty again (using -R; with hook)
263
263
264 $ hg -R empty pull full.hg
264 $ hg -R empty pull full.hg
265 pulling from full.hg
265 pulling from full.hg
266 requesting all changes
266 requesting all changes
267 adding changesets
267 adding changesets
268 adding manifests
268 adding manifests
269 adding file changes
269 adding file changes
270 added 9 changesets with 7 changes to 4 files (+1 heads)
270 added 9 changesets with 7 changes to 4 files (+1 heads)
271 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
271 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
272 changegroup hook: HG_HOOKNAME=changegroup
272 changegroup hook: HG_HOOKNAME=changegroup
273 HG_HOOKTYPE=changegroup
273 HG_HOOKTYPE=changegroup
274 HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735
274 HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735
275 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf
275 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf
276 HG_SOURCE=pull
276 HG_SOURCE=pull
277 HG_TXNID=TXN:$ID$
277 HG_TXNID=TXN:$ID$
278 HG_TXNNAME=pull
278 HG_TXNNAME=pull
279 bundle:empty+full.hg
279 bundle:empty+full.hg
280 HG_URL=bundle:empty+full.hg
280 HG_URL=bundle:empty+full.hg
281
281
282 (run 'hg heads' to see heads, 'hg merge' to merge)
282 (run 'hg heads' to see heads, 'hg merge' to merge)
283
283
284 #endif
284 #endif
285
285
286 Cannot produce streaming clone bundles with "hg bundle"
286 Cannot produce streaming clone bundles with "hg bundle"
287
287
288 $ hg -R test bundle -t packed1 packed.hg
288 $ hg -R test bundle -t packed1 packed.hg
289 abort: packed bundles cannot be produced by "hg bundle"
289 abort: packed bundles cannot be produced by "hg bundle"
290 (use 'hg debugcreatestreamclonebundle')
290 (use 'hg debugcreatestreamclonebundle')
291 [10]
291 [10]
292
292
293 packed1 is produced properly
293 packed1 is produced properly
294
294
295
295
296 #if reporevlogstore rust
296 #if reporevlogstore rust
297
297
298 $ hg -R test debugcreatestreamclonebundle packed.hg
298 $ hg -R test debugcreatestreamclonebundle packed.hg
299 writing 2665 bytes for 6 files
299 writing 2665 bytes for 6 files
300 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
300 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
301
301
302 $ f -B 64 --size --sha1 --hexdump packed.hg
302 $ f -B 64 --size --sha1 --hexdump packed.hg
303 packed.hg: size=2865, sha1=353d10311f4befa195d9a1ca4b8e26518115c702
303 packed.hg: size=2865, sha1=353d10311f4befa195d9a1ca4b8e26518115c702
304 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........|
304 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........|
305 0010: 00 00 00 00 0a 69 00 3b 67 65 6e 65 72 61 6c 64 |.....i.;generald|
305 0010: 00 00 00 00 0a 69 00 3b 67 65 6e 65 72 61 6c 64 |.....i.;generald|
306 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 2d 63 6f 6d 70 |elta,revlog-comp|
306 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 2d 63 6f 6d 70 |elta,revlog-comp|
307 0030: 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c 72 65 76 |ression-zstd,rev|
307 0030: 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c 72 65 76 |ression-zstd,rev|
308 $ hg debugbundle --spec packed.hg
308 $ hg debugbundle --spec packed.hg
309 none-packed1;requirements%3Dgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog
309 none-packed1;requirements%3Dgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog
310 #endif
310 #endif
311
311
312 #if reporevlogstore no-rust zstd
312 #if reporevlogstore no-rust zstd
313
313
314 $ hg -R test debugcreatestreamclonebundle packed.hg
314 $ hg -R test debugcreatestreamclonebundle packed.hg
315 writing 2665 bytes for 6 files
315 writing 2665 bytes for 6 files
316 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
316 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
317
317
318 $ f -B 64 --size --sha1 --hexdump packed.hg
318 $ f -B 64 --size --sha1 --hexdump packed.hg
319 packed.hg: size=2865, sha1=353d10311f4befa195d9a1ca4b8e26518115c702
319 packed.hg: size=2865, sha1=353d10311f4befa195d9a1ca4b8e26518115c702
320 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........|
320 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........|
321 0010: 00 00 00 00 0a 69 00 3b 67 65 6e 65 72 61 6c 64 |.....i.;generald|
321 0010: 00 00 00 00 0a 69 00 3b 67 65 6e 65 72 61 6c 64 |.....i.;generald|
322 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 2d 63 6f 6d 70 |elta,revlog-comp|
322 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 2d 63 6f 6d 70 |elta,revlog-comp|
323 0030: 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c 72 65 76 |ression-zstd,rev|
323 0030: 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c 72 65 76 |ression-zstd,rev|
324 $ hg debugbundle --spec packed.hg
324 $ hg debugbundle --spec packed.hg
325 none-packed1;requirements%3Dgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog
325 none-packed1;requirements%3Dgeneraldelta%2Crevlog-compression-zstd%2Crevlogv1%2Csparserevlog
326 #endif
326 #endif
327
327
328 #if reporevlogstore no-rust no-zstd
328 #if reporevlogstore no-rust no-zstd
329
329
330 $ hg -R test debugcreatestreamclonebundle packed.hg
330 $ hg -R test debugcreatestreamclonebundle packed.hg
331 writing 2664 bytes for 6 files
331 writing 2664 bytes for 6 files
332 bundle requirements: generaldelta, revlogv1, sparserevlog
332 bundle requirements: generaldelta, revlogv1, sparserevlog
333
333
334 $ f -B 64 --size --sha1 --hexdump packed.hg
334 $ f -B 64 --size --sha1 --hexdump packed.hg
335 packed.hg: size=2840, sha1=12bf3eee3eb8a04c503ce2d29b48f0135c7edff5
335 packed.hg: size=2840, sha1=12bf3eee3eb8a04c503ce2d29b48f0135c7edff5
336 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........|
336 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........|
337 0010: 00 00 00 00 0a 68 00 23 67 65 6e 65 72 61 6c 64 |.....h.#generald|
337 0010: 00 00 00 00 0a 68 00 23 67 65 6e 65 72 61 6c 64 |.....h.#generald|
338 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 76 31 2c 73 70 |elta,revlogv1,sp|
338 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 76 31 2c 73 70 |elta,revlogv1,sp|
339 0030: 61 72 73 65 72 65 76 6c 6f 67 00 64 61 74 61 2f |arserevlog.data/|
339 0030: 61 72 73 65 72 65 76 6c 6f 67 00 64 61 74 61 2f |arserevlog.data/|
340 $ hg debugbundle --spec packed.hg
340 $ hg debugbundle --spec packed.hg
341 none-packed1;requirements%3Dgeneraldelta%2Crevlogv1%2Csparserevlog
341 none-packed1;requirements%3Dgeneraldelta%2Crevlogv1%2Csparserevlog
342 #endif
342 #endif
343
343
344 #if reporevlogstore
344 #if reporevlogstore
345
345
346 generaldelta requirement is not listed in stream clone bundles unless used
346 generaldelta requirement is not listed in stream clone bundles unless used
347
347
348 $ hg --config format.usegeneraldelta=false init testnongd
348 $ hg --config format.usegeneraldelta=false init testnongd
349 $ cd testnongd
349 $ cd testnongd
350 $ touch foo
350 $ touch foo
351 $ hg -q commit -A -m initial
351 $ hg -q commit -A -m initial
352 $ cd ..
352 $ cd ..
353
353
354 #endif
354 #endif
355
355
356 #if reporevlogstore rust
356 #if reporevlogstore rust
357
357
358 $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg
358 $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg
359 writing 301 bytes for 3 files
359 writing 301 bytes for 3 files
360 bundle requirements: revlog-compression-zstd, revlogv1
360 bundle requirements: revlog-compression-zstd, revlogv1
361
361
362 $ f -B 64 --size --sha1 --hexdump packednongd.hg
362 $ f -B 64 --size --sha1 --hexdump packednongd.hg
363 packednongd.hg: size=407, sha1=0b8714422b785ba8eb98c916b41ffd5fb994c9b5
363 packednongd.hg: size=407, sha1=0b8714422b785ba8eb98c916b41ffd5fb994c9b5
364 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........|
364 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........|
365 0010: 00 00 00 00 01 2d 00 21 72 65 76 6c 6f 67 2d 63 |.....-.!revlog-c|
365 0010: 00 00 00 00 01 2d 00 21 72 65 76 6c 6f 67 2d 63 |.....-.!revlog-c|
366 0020: 6f 6d 70 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c |ompression-zstd,|
366 0020: 6f 6d 70 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c |ompression-zstd,|
367 0030: 72 65 76 6c 6f 67 76 31 00 64 61 74 61 2f 66 6f |revlogv1.data/fo|
367 0030: 72 65 76 6c 6f 67 76 31 00 64 61 74 61 2f 66 6f |revlogv1.data/fo|
368
368
369 $ hg debugbundle --spec packednongd.hg
369 $ hg debugbundle --spec packednongd.hg
370 none-packed1;requirements%3Drevlog-compression-zstd%2Crevlogv1
370 none-packed1;requirements%3Drevlog-compression-zstd%2Crevlogv1
371
371
372 #endif
372 #endif
373
373
374 #if reporevlogstore no-rust zstd
374 #if reporevlogstore no-rust zstd
375
375
376 $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg
376 $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg
377 writing 301 bytes for 3 files
377 writing 301 bytes for 3 files
378 bundle requirements: revlog-compression-zstd, revlogv1
378 bundle requirements: revlog-compression-zstd, revlogv1
379
379
380 $ f -B 64 --size --sha1 --hexdump packednongd.hg
380 $ f -B 64 --size --sha1 --hexdump packednongd.hg
381 packednongd.hg: size=407, sha1=0b8714422b785ba8eb98c916b41ffd5fb994c9b5
381 packednongd.hg: size=407, sha1=0b8714422b785ba8eb98c916b41ffd5fb994c9b5
382 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........|
382 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........|
383 0010: 00 00 00 00 01 2d 00 21 72 65 76 6c 6f 67 2d 63 |.....-.!revlog-c|
383 0010: 00 00 00 00 01 2d 00 21 72 65 76 6c 6f 67 2d 63 |.....-.!revlog-c|
384 0020: 6f 6d 70 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c |ompression-zstd,|
384 0020: 6f 6d 70 72 65 73 73 69 6f 6e 2d 7a 73 74 64 2c |ompression-zstd,|
385 0030: 72 65 76 6c 6f 67 76 31 00 64 61 74 61 2f 66 6f |revlogv1.data/fo|
385 0030: 72 65 76 6c 6f 67 76 31 00 64 61 74 61 2f 66 6f |revlogv1.data/fo|
386
386
387 $ hg debugbundle --spec packednongd.hg
387 $ hg debugbundle --spec packednongd.hg
388 none-packed1;requirements%3Drevlog-compression-zstd%2Crevlogv1
388 none-packed1;requirements%3Drevlog-compression-zstd%2Crevlogv1
389
389
390
390
391 #endif
391 #endif
392
392
393 #if reporevlogstore no-rust no-zstd
393 #if reporevlogstore no-rust no-zstd
394
394
395 $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg
395 $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg
396 writing 301 bytes for 3 files
396 writing 301 bytes for 3 files
397 bundle requirements: revlogv1
397 bundle requirements: revlogv1
398
398
399 $ f -B 64 --size --sha1 --hexdump packednongd.hg
399 $ f -B 64 --size --sha1 --hexdump packednongd.hg
400 packednongd.hg: size=383, sha1=1d9c230238edd5d38907100b729ba72b1831fe6f
400 packednongd.hg: size=383, sha1=1d9c230238edd5d38907100b729ba72b1831fe6f
401 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........|
401 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........|
402 0010: 00 00 00 00 01 2d 00 09 72 65 76 6c 6f 67 76 31 |.....-..revlogv1|
402 0010: 00 00 00 00 01 2d 00 09 72 65 76 6c 6f 67 76 31 |.....-..revlogv1|
403 0020: 00 64 61 74 61 2f 66 6f 6f 2e 69 00 36 34 0a 00 |.data/foo.i.64..|
403 0020: 00 64 61 74 61 2f 66 6f 6f 2e 69 00 36 34 0a 00 |.data/foo.i.64..|
404 0030: 01 00 01 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
404 0030: 01 00 01 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
405
405
406 $ hg debugbundle --spec packednongd.hg
406 $ hg debugbundle --spec packednongd.hg
407 none-packed1;requirements%3Drevlogv1
407 none-packed1;requirements%3Drevlogv1
408
408
409
409
410 #endif
410 #endif
411
411
412 #if reporevlogstore
412 #if reporevlogstore
413
413
414 Warning emitted when packed bundles contain secret changesets
414 Warning emitted when packed bundles contain secret changesets
415
415
416 $ hg init testsecret
416 $ hg init testsecret
417 $ cd testsecret
417 $ cd testsecret
418 $ touch foo
418 $ touch foo
419 $ hg -q commit -A -m initial
419 $ hg -q commit -A -m initial
420 $ hg phase --force --secret -r .
420 $ hg phase --force --secret -r .
421 $ cd ..
421 $ cd ..
422
422
423 #endif
423 #endif
424
424
425 #if reporevlogstore rust
425 #if reporevlogstore rust
426
426
427 $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
427 $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
428 (warning: stream clone bundle will contain secret revisions)
428 (warning: stream clone bundle will contain secret revisions)
429 writing 301 bytes for 3 files
429 writing 301 bytes for 3 files
430 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
430 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
431
431
432 #endif
432 #endif
433
433
434 #if reporevlogstore no-rust zstd
434 #if reporevlogstore no-rust zstd
435
435
436 $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
436 $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
437 (warning: stream clone bundle will contain secret revisions)
437 (warning: stream clone bundle will contain secret revisions)
438 writing 301 bytes for 3 files
438 writing 301 bytes for 3 files
439 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
439 bundle requirements: generaldelta, revlog-compression-zstd, revlogv1, sparserevlog
440
440
441 #endif
441 #endif
442
442
443 #if reporevlogstore no-rust no-zstd
443 #if reporevlogstore no-rust no-zstd
444
444
445 $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
445 $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
446 (warning: stream clone bundle will contain secret revisions)
446 (warning: stream clone bundle will contain secret revisions)
447 writing 301 bytes for 3 files
447 writing 301 bytes for 3 files
448 bundle requirements: generaldelta, revlogv1, sparserevlog
448 bundle requirements: generaldelta, revlogv1, sparserevlog
449
449
450 #endif
450 #endif
451
451
452 #if reporevlogstore
452 #if reporevlogstore
453
453
454 Unpacking packed1 bundles with "hg unbundle" isn't allowed
454 Unpacking packed1 bundles with "hg unbundle" isn't allowed
455
455
456 $ hg init packed
456 $ hg init packed
457 $ hg -R packed unbundle packed.hg
457 $ hg -R packed unbundle packed.hg
458 abort: packed bundles cannot be applied with "hg unbundle"
458 abort: packed bundles cannot be applied with "hg unbundle"
459 (use "hg debugapplystreamclonebundle")
459 (use "hg debugapplystreamclonebundle")
460 [10]
460 [10]
461
461
462 packed1 can be consumed from debug command
462 packed1 can be consumed from debug command
463
463
464 (this also confirms that streamclone-ed changes are visible via
464 (this also confirms that streamclone-ed changes are visible via
465 @filecache properties to in-process procedures before closing
465 @filecache properties to in-process procedures before closing
466 transaction)
466 transaction)
467
467
468 $ cat > $TESTTMP/showtip.py <<EOF
468 $ cat > $TESTTMP/showtip.py <<EOF
469 >
469 >
470 > def showtip(ui, repo, hooktype, **kwargs):
470 > def showtip(ui, repo, hooktype, **kwargs):
471 > ui.warn(b'%s: %s\n' % (hooktype, repo[b'tip'].hex()[:12]))
471 > ui.warn(b'%s: %s\n' % (hooktype, repo[b'tip'].hex()[:12]))
472 >
472 >
473 > def reposetup(ui, repo):
473 > def reposetup(ui, repo):
474 > # this confirms (and ensures) that (empty) 00changelog.i
474 > # this confirms (and ensures) that (empty) 00changelog.i
475 > # before streamclone is already cached as repo.changelog
475 > # before streamclone is already cached as repo.changelog
476 > ui.setconfig(b'hooks', b'pretxnopen.showtip', showtip)
476 > ui.setconfig(b'hooks', b'pretxnopen.showtip', showtip)
477 >
477 >
478 > # this confirms that streamclone-ed changes are visible to
478 > # this confirms that streamclone-ed changes are visible to
479 > # in-process procedures before closing transaction
479 > # in-process procedures before closing transaction
480 > ui.setconfig(b'hooks', b'pretxnclose.showtip', showtip)
480 > ui.setconfig(b'hooks', b'pretxnclose.showtip', showtip)
481 >
481 >
482 > # this confirms that streamclone-ed changes are still visible
482 > # this confirms that streamclone-ed changes are still visible
483 > # after closing transaction
483 > # after closing transaction
484 > ui.setconfig(b'hooks', b'txnclose.showtip', showtip)
484 > ui.setconfig(b'hooks', b'txnclose.showtip', showtip)
485 > EOF
485 > EOF
486 $ cat >> $HGRCPATH <<EOF
486 $ cat >> $HGRCPATH <<EOF
487 > [extensions]
487 > [extensions]
488 > showtip = $TESTTMP/showtip.py
488 > showtip = $TESTTMP/showtip.py
489 > EOF
489 > EOF
490
490
491 $ hg -R packed debugapplystreamclonebundle packed.hg
491 $ hg -R packed debugapplystreamclonebundle packed.hg
492 6 files to transfer, 2.60 KB of data
492 6 files to transfer, 2.60 KB of data
493 pretxnopen: 000000000000
493 pretxnopen: 000000000000
494 pretxnclose: aa35859c02ea
494 pretxnclose: aa35859c02ea
495 transferred 2.60 KB in * seconds (* */sec) (glob)
495 transferred 2.60 KB in * seconds (* */sec) (glob)
496 txnclose: aa35859c02ea
496 txnclose: aa35859c02ea
497
497
498 (for safety, confirm visibility of streamclone-ed changes by another
498 (for safety, confirm visibility of streamclone-ed changes by another
499 process, too)
499 process, too)
500
500
501 $ hg -R packed tip -T "{node|short}\n"
501 $ hg -R packed tip -T "{node|short}\n"
502 aa35859c02ea
502 aa35859c02ea
503
503
504 $ cat >> $HGRCPATH <<EOF
504 $ cat >> $HGRCPATH <<EOF
505 > [extensions]
505 > [extensions]
506 > showtip = !
506 > showtip = !
507 > EOF
507 > EOF
508
508
509 Does not work on non-empty repo
509 Does not work on non-empty repo
510
510
511 $ hg -R packed debugapplystreamclonebundle packed.hg
511 $ hg -R packed debugapplystreamclonebundle packed.hg
512 abort: cannot apply stream clone bundle on non-empty repo
512 abort: cannot apply stream clone bundle on non-empty repo
513 [255]
513 [255]
514
514
515 #endif
515 #endif
516
516
517 Create partial clones
517 Create partial clones
518
518
519 $ rm -r empty
519 $ rm -r empty
520 $ hg init empty
520 $ hg init empty
521 $ hg clone -r 3 test partial
521 $ hg clone -r 3 test partial
522 adding changesets
522 adding changesets
523 adding manifests
523 adding manifests
524 adding file changes
524 adding file changes
525 added 4 changesets with 4 changes to 1 files
525 added 4 changesets with 4 changes to 1 files
526 new changesets f9ee2f85a263:eebf5a27f8ca
526 new changesets f9ee2f85a263:eebf5a27f8ca
527 updating to branch default
527 updating to branch default
528 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
528 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
529 $ hg clone partial partial2
529 $ hg clone partial partial2
530 updating to branch default
530 updating to branch default
531 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
531 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
532 $ cd partial
532 $ cd partial
533
533
534 #if repobundlerepo
534 #if repobundlerepo
535
535
536 Log -R full.hg in partial
536 Log -R full.hg in partial
537
537
538 $ hg -R bundle://../full.hg log -T phases
538 $ hg -R bundle://../full.hg log -T phases
539 changeset: 8:aa35859c02ea
539 changeset: 8:aa35859c02ea
540 tag: tip
540 tag: tip
541 phase: draft
541 phase: draft
542 parent: 3:eebf5a27f8ca
542 parent: 3:eebf5a27f8ca
543 user: test
543 user: test
544 date: Thu Jan 01 00:00:00 1970 +0000
544 date: Thu Jan 01 00:00:00 1970 +0000
545 summary: 0.3m
545 summary: 0.3m
546
546
547 changeset: 7:a6a34bfa0076
547 changeset: 7:a6a34bfa0076
548 phase: draft
548 phase: draft
549 user: test
549 user: test
550 date: Thu Jan 01 00:00:00 1970 +0000
550 date: Thu Jan 01 00:00:00 1970 +0000
551 summary: 1.3m
551 summary: 1.3m
552
552
553 changeset: 6:7373c1169842
553 changeset: 6:7373c1169842
554 phase: draft
554 phase: draft
555 user: test
555 user: test
556 date: Thu Jan 01 00:00:00 1970 +0000
556 date: Thu Jan 01 00:00:00 1970 +0000
557 summary: 1.3
557 summary: 1.3
558
558
559 changeset: 5:1bb50a9436a7
559 changeset: 5:1bb50a9436a7
560 phase: draft
560 phase: draft
561 user: test
561 user: test
562 date: Thu Jan 01 00:00:00 1970 +0000
562 date: Thu Jan 01 00:00:00 1970 +0000
563 summary: 1.2
563 summary: 1.2
564
564
565 changeset: 4:095197eb4973
565 changeset: 4:095197eb4973
566 phase: draft
566 phase: draft
567 parent: 0:f9ee2f85a263
567 parent: 0:f9ee2f85a263
568 user: test
568 user: test
569 date: Thu Jan 01 00:00:00 1970 +0000
569 date: Thu Jan 01 00:00:00 1970 +0000
570 summary: 1.1
570 summary: 1.1
571
571
572 changeset: 3:eebf5a27f8ca
572 changeset: 3:eebf5a27f8ca
573 phase: public
573 phase: public
574 user: test
574 user: test
575 date: Thu Jan 01 00:00:00 1970 +0000
575 date: Thu Jan 01 00:00:00 1970 +0000
576 summary: 0.3
576 summary: 0.3
577
577
578 changeset: 2:e38ba6f5b7e0
578 changeset: 2:e38ba6f5b7e0
579 phase: public
579 phase: public
580 user: test
580 user: test
581 date: Thu Jan 01 00:00:00 1970 +0000
581 date: Thu Jan 01 00:00:00 1970 +0000
582 summary: 0.2
582 summary: 0.2
583
583
584 changeset: 1:34c2bf6b0626
584 changeset: 1:34c2bf6b0626
585 phase: public
585 phase: public
586 user: test
586 user: test
587 date: Thu Jan 01 00:00:00 1970 +0000
587 date: Thu Jan 01 00:00:00 1970 +0000
588 summary: 0.1
588 summary: 0.1
589
589
590 changeset: 0:f9ee2f85a263
590 changeset: 0:f9ee2f85a263
591 phase: public
591 phase: public
592 user: test
592 user: test
593 date: Thu Jan 01 00:00:00 1970 +0000
593 date: Thu Jan 01 00:00:00 1970 +0000
594 summary: 0.0
594 summary: 0.0
595
595
596
596
597 Incoming full.hg in partial
597 Incoming full.hg in partial
598
598
599 $ hg incoming bundle://../full.hg
599 $ hg incoming bundle://../full.hg
600 comparing with bundle:../full.hg
600 comparing with bundle:../full.hg
601 searching for changes
601 searching for changes
602 changeset: 4:095197eb4973
602 changeset: 4:095197eb4973
603 parent: 0:f9ee2f85a263
603 parent: 0:f9ee2f85a263
604 user: test
604 user: test
605 date: Thu Jan 01 00:00:00 1970 +0000
605 date: Thu Jan 01 00:00:00 1970 +0000
606 summary: 1.1
606 summary: 1.1
607
607
608 changeset: 5:1bb50a9436a7
608 changeset: 5:1bb50a9436a7
609 user: test
609 user: test
610 date: Thu Jan 01 00:00:00 1970 +0000
610 date: Thu Jan 01 00:00:00 1970 +0000
611 summary: 1.2
611 summary: 1.2
612
612
613 changeset: 6:7373c1169842
613 changeset: 6:7373c1169842
614 user: test
614 user: test
615 date: Thu Jan 01 00:00:00 1970 +0000
615 date: Thu Jan 01 00:00:00 1970 +0000
616 summary: 1.3
616 summary: 1.3
617
617
618 changeset: 7:a6a34bfa0076
618 changeset: 7:a6a34bfa0076
619 user: test
619 user: test
620 date: Thu Jan 01 00:00:00 1970 +0000
620 date: Thu Jan 01 00:00:00 1970 +0000
621 summary: 1.3m
621 summary: 1.3m
622
622
623 changeset: 8:aa35859c02ea
623 changeset: 8:aa35859c02ea
624 tag: tip
624 tag: tip
625 parent: 3:eebf5a27f8ca
625 parent: 3:eebf5a27f8ca
626 user: test
626 user: test
627 date: Thu Jan 01 00:00:00 1970 +0000
627 date: Thu Jan 01 00:00:00 1970 +0000
628 summary: 0.3m
628 summary: 0.3m
629
629
630
630
631 Outgoing -R full.hg vs partial2 in partial
631 Outgoing -R full.hg vs partial2 in partial
632
632
633 $ hg -R bundle://../full.hg outgoing ../partial2
633 $ hg -R bundle://../full.hg outgoing ../partial2
634 comparing with ../partial2
634 comparing with ../partial2
635 searching for changes
635 searching for changes
636 changeset: 4:095197eb4973
636 changeset: 4:095197eb4973
637 parent: 0:f9ee2f85a263
637 parent: 0:f9ee2f85a263
638 user: test
638 user: test
639 date: Thu Jan 01 00:00:00 1970 +0000
639 date: Thu Jan 01 00:00:00 1970 +0000
640 summary: 1.1
640 summary: 1.1
641
641
642 changeset: 5:1bb50a9436a7
642 changeset: 5:1bb50a9436a7
643 user: test
643 user: test
644 date: Thu Jan 01 00:00:00 1970 +0000
644 date: Thu Jan 01 00:00:00 1970 +0000
645 summary: 1.2
645 summary: 1.2
646
646
647 changeset: 6:7373c1169842
647 changeset: 6:7373c1169842
648 user: test
648 user: test
649 date: Thu Jan 01 00:00:00 1970 +0000
649 date: Thu Jan 01 00:00:00 1970 +0000
650 summary: 1.3
650 summary: 1.3
651
651
652 changeset: 7:a6a34bfa0076
652 changeset: 7:a6a34bfa0076
653 user: test
653 user: test
654 date: Thu Jan 01 00:00:00 1970 +0000
654 date: Thu Jan 01 00:00:00 1970 +0000
655 summary: 1.3m
655 summary: 1.3m
656
656
657 changeset: 8:aa35859c02ea
657 changeset: 8:aa35859c02ea
658 tag: tip
658 tag: tip
659 parent: 3:eebf5a27f8ca
659 parent: 3:eebf5a27f8ca
660 user: test
660 user: test
661 date: Thu Jan 01 00:00:00 1970 +0000
661 date: Thu Jan 01 00:00:00 1970 +0000
662 summary: 0.3m
662 summary: 0.3m
663
663
664
664
665 Outgoing -R does-not-exist.hg vs partial2 in partial
665 Outgoing -R does-not-exist.hg vs partial2 in partial
666
666
667 $ hg -R bundle://../does-not-exist.hg outgoing ../partial2
667 $ hg -R bundle://../does-not-exist.hg outgoing ../partial2
668 abort: *../does-not-exist.hg* (glob)
668 abort: *../does-not-exist.hg* (glob)
669 [255]
669 [255]
670
670
671 #endif
671 #endif
672
672
673 $ cd ..
673 $ cd ..
674
674
675 hide outer repo
675 hide outer repo
676 $ hg init
676 $ hg init
677
677
678 Direct clone from bundle (all-history)
678 Direct clone from bundle (all-history)
679
679
680 #if repobundlerepo
680 #if repobundlerepo
681
681
682 $ hg clone full.hg full-clone
682 $ hg clone full.hg full-clone
683 requesting all changes
683 requesting all changes
684 adding changesets
684 adding changesets
685 adding manifests
685 adding manifests
686 adding file changes
686 adding file changes
687 added 9 changesets with 7 changes to 4 files (+1 heads)
687 added 9 changesets with 7 changes to 4 files (+1 heads)
688 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
688 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
689 updating to branch default
689 updating to branch default
690 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
690 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
691 $ hg -R full-clone heads
691 $ hg -R full-clone heads
692 changeset: 8:aa35859c02ea
692 changeset: 8:aa35859c02ea
693 tag: tip
693 tag: tip
694 parent: 3:eebf5a27f8ca
694 parent: 3:eebf5a27f8ca
695 user: test
695 user: test
696 date: Thu Jan 01 00:00:00 1970 +0000
696 date: Thu Jan 01 00:00:00 1970 +0000
697 summary: 0.3m
697 summary: 0.3m
698
698
699 changeset: 7:a6a34bfa0076
699 changeset: 7:a6a34bfa0076
700 user: test
700 user: test
701 date: Thu Jan 01 00:00:00 1970 +0000
701 date: Thu Jan 01 00:00:00 1970 +0000
702 summary: 1.3m
702 summary: 1.3m
703
703
704 $ rm -r full-clone
704 $ rm -r full-clone
705
705
706 When cloning from a non-copiable repository into '', do not
706 When cloning from a non-copiable repository into '', do not
707 recurse infinitely (issue2528)
707 recurse infinitely (issue2528)
708
708
709 $ hg clone full.hg ''
709 $ hg clone full.hg ''
710 abort: empty destination path is not valid
710 abort: empty destination path is not valid
711 [10]
711 [10]
712
712
713 test for https://bz.mercurial-scm.org/216
713 test for https://bz.mercurial-scm.org/216
714
714
715 Unbundle incremental bundles into fresh empty in one go
715 Unbundle incremental bundles into fresh empty in one go
716
716
717 $ rm -r empty
717 $ rm -r empty
718 $ hg init empty
718 $ hg init empty
719 $ hg -R test bundle --base null -r 0 ../0.hg
719 $ hg -R test bundle --base null -r 0 ../0.hg
720 1 changesets found
720 1 changesets found
721 $ hg -R test bundle --exact -r 1 ../1.hg
721 $ hg -R test bundle --exact -r 1 ../1.hg
722 1 changesets found
722 1 changesets found
723 $ hg -R empty unbundle -u ../0.hg ../1.hg
723 $ hg -R empty unbundle -u ../0.hg ../1.hg
724 adding changesets
724 adding changesets
725 adding manifests
725 adding manifests
726 adding file changes
726 adding file changes
727 added 1 changesets with 1 changes to 1 files
727 added 1 changesets with 1 changes to 1 files
728 new changesets f9ee2f85a263 (1 drafts)
728 new changesets f9ee2f85a263 (1 drafts)
729 adding changesets
729 adding changesets
730 adding manifests
730 adding manifests
731 adding file changes
731 adding file changes
732 added 1 changesets with 1 changes to 1 files
732 added 1 changesets with 1 changes to 1 files
733 new changesets 34c2bf6b0626 (1 drafts)
733 new changesets 34c2bf6b0626 (1 drafts)
734 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
734 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
735
735
736 View full contents of the bundle
736 View full contents of the bundle
737 $ hg -R test bundle --base null -r 3 ../partial.hg
737 $ hg -R test bundle --base null -r 3 ../partial.hg
738 4 changesets found
738 4 changesets found
739 $ cd test
739 $ cd test
740 $ hg -R ../../partial.hg log -r "bundle()"
740 $ hg -R ../../partial.hg log -r "bundle()"
741 changeset: 0:f9ee2f85a263
741 changeset: 0:f9ee2f85a263
742 user: test
742 user: test
743 date: Thu Jan 01 00:00:00 1970 +0000
743 date: Thu Jan 01 00:00:00 1970 +0000
744 summary: 0.0
744 summary: 0.0
745
745
746 changeset: 1:34c2bf6b0626
746 changeset: 1:34c2bf6b0626
747 user: test
747 user: test
748 date: Thu Jan 01 00:00:00 1970 +0000
748 date: Thu Jan 01 00:00:00 1970 +0000
749 summary: 0.1
749 summary: 0.1
750
750
751 changeset: 2:e38ba6f5b7e0
751 changeset: 2:e38ba6f5b7e0
752 user: test
752 user: test
753 date: Thu Jan 01 00:00:00 1970 +0000
753 date: Thu Jan 01 00:00:00 1970 +0000
754 summary: 0.2
754 summary: 0.2
755
755
756 changeset: 3:eebf5a27f8ca
756 changeset: 3:eebf5a27f8ca
757 user: test
757 user: test
758 date: Thu Jan 01 00:00:00 1970 +0000
758 date: Thu Jan 01 00:00:00 1970 +0000
759 summary: 0.3
759 summary: 0.3
760
760
761 $ cd ..
761 $ cd ..
762
762
763 #endif
763 #endif
764
764
765 test for 540d1059c802
765 test for 540d1059c802
766
766
767 $ hg init orig
767 $ hg init orig
768 $ cd orig
768 $ cd orig
769 $ echo foo > foo
769 $ echo foo > foo
770 $ hg add foo
770 $ hg add foo
771 $ hg ci -m 'add foo'
771 $ hg ci -m 'add foo'
772
772
773 $ hg clone . ../copy
773 $ hg clone . ../copy
774 updating to branch default
774 updating to branch default
775 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
775 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
776 $ hg tag foo
776 $ hg tag foo
777
777
778 $ cd ../copy
778 $ cd ../copy
779 $ echo >> foo
779 $ echo >> foo
780 $ hg ci -m 'change foo'
780 $ hg ci -m 'change foo'
781 $ hg bundle ../bundle.hg ../orig
781 $ hg bundle ../bundle.hg ../orig
782 searching for changes
782 searching for changes
783 1 changesets found
783 1 changesets found
784
784
785 $ cd ..
785 $ cd ..
786
786
787 #if repobundlerepo
787 #if repobundlerepo
788 $ cd orig
788 $ cd orig
789 $ hg incoming ../bundle.hg
789 $ hg incoming ../bundle.hg
790 comparing with ../bundle.hg
790 comparing with ../bundle.hg
791 searching for changes
791 searching for changes
792 changeset: 2:ed1b79f46b9a
792 changeset: 2:ed1b79f46b9a
793 tag: tip
793 tag: tip
794 parent: 0:bbd179dfa0a7
794 parent: 0:bbd179dfa0a7
795 user: test
795 user: test
796 date: Thu Jan 01 00:00:00 1970 +0000
796 date: Thu Jan 01 00:00:00 1970 +0000
797 summary: change foo
797 summary: change foo
798
798
799 $ cd ..
799 $ cd ..
800
800
801 test bundle with # in the filename (issue2154):
801 test bundle with # in the filename (issue2154):
802
802
803 $ cp bundle.hg 'test#bundle.hg'
803 $ cp bundle.hg 'test#bundle.hg'
804 $ cd orig
804 $ cd orig
805 $ hg incoming '../test#bundle.hg'
805 $ hg incoming '../test#bundle.hg'
806 comparing with ../test
806 comparing with ../test
807 abort: unknown revision 'bundle.hg'
807 abort: unknown revision 'bundle.hg'
808 [10]
808 [10]
809
809
810 note that percent encoding is not handled:
810 note that percent encoding is not handled:
811
811
812 $ hg incoming ../test%23bundle.hg
812 $ hg incoming ../test%23bundle.hg
813 abort: repository ../test%23bundle.hg not found
813 abort: repository ../test%23bundle.hg not found
814 [255]
814 [255]
815 $ cd ..
815 $ cd ..
816
816
817 #endif
817 #endif
818
818
819 test to bundle revisions on the newly created branch (issue3828):
819 test to bundle revisions on the newly created branch (issue3828):
820
820
821 $ hg -q clone -U test test-clone
821 $ hg -q clone -U test test-clone
822 $ cd test
822 $ cd test
823
823
824 $ hg -q branch foo
824 $ hg -q branch foo
825 $ hg commit -m "create foo branch"
825 $ hg commit -m "create foo branch"
826 $ hg -q outgoing ../test-clone
826 $ hg -q outgoing ../test-clone
827 9:b4f5acb1ee27
827 9:b4f5acb1ee27
828 $ hg -q bundle --branch foo foo.hg ../test-clone
828 $ hg -q bundle --branch foo foo.hg ../test-clone
829 #if repobundlerepo
829 #if repobundlerepo
830 $ hg -R foo.hg -q log -r "bundle()"
830 $ hg -R foo.hg -q log -r "bundle()"
831 9:b4f5acb1ee27
831 9:b4f5acb1ee27
832 #endif
832 #endif
833
833
834 $ cd ..
834 $ cd ..
835
835
836 test for https://bz.mercurial-scm.org/1144
836 test for https://bz.mercurial-scm.org/1144
837
837
838 test that verify bundle does not traceback
838 test that verify bundle does not traceback
839
839
840 partial history bundle, fails w/ unknown parent
840 partial history bundle, fails w/ unknown parent
841
841
842 $ hg -R bundle.hg verify
842 $ hg -R bundle.hg verify
843 abort: 00changelog@bbd179dfa0a71671c253b3ae0aa1513b60d199fa: unknown parent
843 abort: 00changelog@bbd179dfa0a71671c253b3ae0aa1513b60d199fa: unknown parent
844 [50]
844 [50]
845
845
846 full history bundle, refuses to verify non-local repo
846 full history bundle, refuses to verify non-local repo
847
847
848 #if repobundlerepo
848 #if repobundlerepo
849 $ hg -R all.hg verify
849 $ hg -R all.hg verify
850 abort: cannot verify bundle or remote repos
850 abort: cannot verify bundle or remote repos
851 [255]
851 [255]
852 #endif
852 #endif
853
853
854 but, regular verify must continue to work
854 but, regular verify must continue to work
855
855
856 $ hg -R orig verify
856 $ hg -R orig verify
857 checking changesets
857 checking changesets
858 checking manifests
858 checking manifests
859 crosschecking files in changesets and manifests
859 crosschecking files in changesets and manifests
860 checking files
860 checking files
861 checked 2 changesets with 2 changes to 2 files
861 checked 2 changesets with 2 changes to 2 files
862
862
863 #if repobundlerepo
863 #if repobundlerepo
864 diff against bundle
864 diff against bundle
865
865
866 $ hg init b
866 $ hg init b
867 $ cd b
867 $ cd b
868 $ hg -R ../all.hg diff -r tip
868 $ hg -R ../all.hg diff -r tip
869 diff -r aa35859c02ea anotherfile
869 diff -r aa35859c02ea anotherfile
870 --- a/anotherfile Thu Jan 01 00:00:00 1970 +0000
870 --- a/anotherfile Thu Jan 01 00:00:00 1970 +0000
871 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
871 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
872 @@ -1,4 +0,0 @@
872 @@ -1,4 +0,0 @@
873 -0
873 -0
874 -1
874 -1
875 -2
875 -2
876 -3
876 -3
877 $ cd ..
877 $ cd ..
878 #endif
878 #endif
879
879
880 bundle single branch
880 bundle single branch
881
881
882 $ hg init branchy
882 $ hg init branchy
883 $ cd branchy
883 $ cd branchy
884 $ echo a >a
884 $ echo a >a
885 $ echo x >x
885 $ echo x >x
886 $ hg ci -Ama
886 $ hg ci -Ama
887 adding a
887 adding a
888 adding x
888 adding x
889 $ echo c >c
889 $ echo c >c
890 $ echo xx >x
890 $ echo xx >x
891 $ hg ci -Amc
891 $ hg ci -Amc
892 adding c
892 adding c
893 $ echo c1 >c1
893 $ echo c1 >c1
894 $ hg ci -Amc1
894 $ hg ci -Amc1
895 adding c1
895 adding c1
896 $ hg up 0
896 $ hg up 0
897 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
897 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
898 $ echo b >b
898 $ echo b >b
899 $ hg ci -Amb
899 $ hg ci -Amb
900 adding b
900 adding b
901 created new head
901 created new head
902 $ echo b1 >b1
902 $ echo b1 >b1
903 $ echo xx >x
903 $ echo xx >x
904 $ hg ci -Amb1
904 $ hg ci -Amb1
905 adding b1
905 adding b1
906 $ hg clone -q -r2 . part
906 $ hg clone -q -r2 . part
907
907
908 == bundling via incoming
908 == bundling via incoming
909
909
910 $ hg in -R part --bundle incoming.hg --template "{node}\n" .
910 $ hg in -R part --bundle incoming.hg --template "{node}\n" .
911 comparing with .
911 comparing with .
912 searching for changes
912 searching for changes
913 1a38c1b849e8b70c756d2d80b0b9a3ac0b7ea11a
913 1a38c1b849e8b70c756d2d80b0b9a3ac0b7ea11a
914 057f4db07f61970e1c11e83be79e9d08adc4dc31
914 057f4db07f61970e1c11e83be79e9d08adc4dc31
915
915
916 == bundling
916 == bundling
917
917
918 $ hg bundle bundle.hg part --debug --config progress.debug=true
918 $ hg bundle bundle.hg part --debug --config progress.debug=true
919 query 1; heads
919 query 1; heads
920 searching for changes
920 searching for changes
921 all remote heads known locally
921 all remote heads known locally
922 2 changesets found
922 2 changesets found
923 list of changesets:
923 list of changesets:
924 1a38c1b849e8b70c756d2d80b0b9a3ac0b7ea11a
924 1a38c1b849e8b70c756d2d80b0b9a3ac0b7ea11a
925 057f4db07f61970e1c11e83be79e9d08adc4dc31
925 057f4db07f61970e1c11e83be79e9d08adc4dc31
926 bundle2-output-bundle: "HG20", (1 params) 2 parts total
926 bundle2-output-bundle: "HG20", (1 params) 2 parts total
927 bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
927 bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
928 changesets: 1/2 chunks (50.00%)
928 changesets: 1/2 chunks (50.00%)
929 changesets: 2/2 chunks (100.00%)
929 changesets: 2/2 chunks (100.00%)
930 manifests: 1/2 chunks (50.00%)
930 manifests: 1/2 chunks (50.00%)
931 manifests: 2/2 chunks (100.00%)
931 manifests: 2/2 chunks (100.00%)
932 files: b 1/3 files (33.33%)
932 files: b 1/3 files (33.33%)
933 files: b1 2/3 files (66.67%)
933 files: b1 2/3 files (66.67%)
934 files: x 3/3 files (100.00%)
934 files: x 3/3 files (100.00%)
935 bundle2-output-part: "cache:rev-branch-cache" (advisory) streamed payload
935 bundle2-output-part: "cache:rev-branch-cache" (advisory) streamed payload
936
936
937 #if repobundlerepo
937 #if repobundlerepo
938 == Test for issue3441
938 == Test for issue3441
939
939
940 $ hg clone -q -r0 . part2
940 $ hg clone -q -r0 . part2
941 $ hg -q -R part2 pull bundle.hg
941 $ hg -q -R part2 pull bundle.hg
942 $ hg -R part2 verify
942 $ hg -R part2 verify
943 checking changesets
943 checking changesets
944 checking manifests
944 checking manifests
945 crosschecking files in changesets and manifests
945 crosschecking files in changesets and manifests
946 checking files
946 checking files
947 checked 3 changesets with 5 changes to 4 files
947 checked 3 changesets with 5 changes to 4 files
948 #endif
948 #endif
949
949
950 == Test bundling no commits
950 == Test bundling no commits
951
951
952 $ hg bundle -r 'public()' no-output.hg
952 $ hg bundle -r 'public()' no-output.hg
953 abort: no commits to bundle
953 abort: no commits to bundle
954 [10]
954 [10]
955
955
956 $ cd ..
956 $ cd ..
957
957
958 When user merges to the revision existing only in the bundle,
958 When user merges to the revision existing only in the bundle,
959 it should show warning that second parent of the working
959 it should show warning that second parent of the working
960 directory does not exist
960 directory does not exist
961
961
962 $ hg init update2bundled
962 $ hg init update2bundled
963 $ cd update2bundled
963 $ cd update2bundled
964 $ cat <<EOF >> .hg/hgrc
964 $ cat <<EOF >> .hg/hgrc
965 > [extensions]
965 > [extensions]
966 > strip =
966 > strip =
967 > EOF
967 > EOF
968 $ echo "aaa" >> a
968 $ echo "aaa" >> a
969 $ hg commit -A -m 0
969 $ hg commit -A -m 0
970 adding a
970 adding a
971 $ echo "bbb" >> b
971 $ echo "bbb" >> b
972 $ hg commit -A -m 1
972 $ hg commit -A -m 1
973 adding b
973 adding b
974 $ echo "ccc" >> c
974 $ echo "ccc" >> c
975 $ hg commit -A -m 2
975 $ hg commit -A -m 2
976 adding c
976 adding c
977 $ hg update -r 1
977 $ hg update -r 1
978 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
978 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
979 $ echo "ddd" >> d
979 $ echo "ddd" >> d
980 $ hg commit -A -m 3
980 $ hg commit -A -m 3
981 adding d
981 adding d
982 created new head
982 created new head
983 $ hg update -r 2
983 $ hg update -r 2
984 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
984 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
985 $ hg log -G
985 $ hg log -G
986 o changeset: 3:8bd3e1f196af
986 o changeset: 3:8bd3e1f196af
987 | tag: tip
987 | tag: tip
988 | parent: 1:a01eca7af26d
988 | parent: 1:a01eca7af26d
989 | user: test
989 | user: test
990 | date: Thu Jan 01 00:00:00 1970 +0000
990 | date: Thu Jan 01 00:00:00 1970 +0000
991 | summary: 3
991 | summary: 3
992 |
992 |
993 | @ changeset: 2:4652c276ac4f
993 | @ changeset: 2:4652c276ac4f
994 |/ user: test
994 |/ user: test
995 | date: Thu Jan 01 00:00:00 1970 +0000
995 | date: Thu Jan 01 00:00:00 1970 +0000
996 | summary: 2
996 | summary: 2
997 |
997 |
998 o changeset: 1:a01eca7af26d
998 o changeset: 1:a01eca7af26d
999 | user: test
999 | user: test
1000 | date: Thu Jan 01 00:00:00 1970 +0000
1000 | date: Thu Jan 01 00:00:00 1970 +0000
1001 | summary: 1
1001 | summary: 1
1002 |
1002 |
1003 o changeset: 0:4fe08cd4693e
1003 o changeset: 0:4fe08cd4693e
1004 user: test
1004 user: test
1005 date: Thu Jan 01 00:00:00 1970 +0000
1005 date: Thu Jan 01 00:00:00 1970 +0000
1006 summary: 0
1006 summary: 0
1007
1007
1008
1008
1009 #if repobundlerepo
1009 #if repobundlerepo
1010 $ hg bundle --base 1 -r 3 ../update2bundled.hg
1010 $ hg bundle --base 1 -r 3 ../update2bundled.hg
1011 1 changesets found
1011 1 changesets found
1012 $ hg strip -r 3
1012 $ hg strip -r 3
1013 saved backup bundle to $TESTTMP/update2bundled/.hg/strip-backup/8bd3e1f196af-017e56d8-backup.hg
1013 saved backup bundle to $TESTTMP/update2bundled/.hg/strip-backup/8bd3e1f196af-017e56d8-backup.hg
1014 $ hg merge -R ../update2bundled.hg -r 3
1014 $ hg merge -R ../update2bundled.hg -r 3
1015 setting parent to node 8bd3e1f196af289b2b121be08031e76d7ae92098 that only exists in the bundle
1015 setting parent to node 8bd3e1f196af289b2b121be08031e76d7ae92098 that only exists in the bundle
1016 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1016 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1017 (branch merge, don't forget to commit)
1017 (branch merge, don't forget to commit)
1018
1018
1019 When user updates to the revision existing only in the bundle,
1019 When user updates to the revision existing only in the bundle,
1020 it should show warning
1020 it should show warning
1021
1021
1022 $ hg update -R ../update2bundled.hg --clean -r 3
1022 $ hg update -R ../update2bundled.hg --clean -r 3
1023 setting parent to node 8bd3e1f196af289b2b121be08031e76d7ae92098 that only exists in the bundle
1023 setting parent to node 8bd3e1f196af289b2b121be08031e76d7ae92098 that only exists in the bundle
1024 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
1024 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
1025
1025
1026 When user updates to the revision existing in the local repository
1026 When user updates to the revision existing in the local repository
1027 the warning shouldn't be emitted
1027 the warning shouldn't be emitted
1028
1028
1029 $ hg update -R ../update2bundled.hg -r 0
1029 $ hg update -R ../update2bundled.hg -r 0
1030 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
1030 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
1031 #endif
1031 #endif
1032
1032
1033 Test the option that create slim bundle
1033 Test the option that create slim bundle
1034
1034
1035 $ hg bundle -a --config devel.bundle.delta=p1 ./slim.hg
1035 $ hg bundle -a --config devel.bundle.delta=p1 ./slim.hg
1036 3 changesets found
1036 3 changesets found
1037
1037
1038 Test the option that create and no-delta's bundle
1038 Test the option that create and no-delta's bundle
1039 $ hg bundle -a --config devel.bundle.delta=full ./full.hg
1039 $ hg bundle -a --config devel.bundle.delta=full ./full.hg
1040 3 changesets found
1040 3 changesets found
1041
1041
1042
1042
1043 Test the debug statistic when building a bundle
1043 Test the debug statistic when building a bundle
1044 -----------------------------------------------
1044 -----------------------------------------------
1045
1045
1046 $ hg bundle -a ./default.hg --config debug.bundling-stats=yes
1046 $ hg bundle -a ./default.hg --config debug.bundling-stats=yes
1047 3 changesets found
1047 3 changesets found
1048 DEBUG-BUNDLING: revisions: 9
1048 DEBUG-BUNDLING: revisions: 9
1049 DEBUG-BUNDLING: changelog: 3
1049 DEBUG-BUNDLING: changelog: 3
1050 DEBUG-BUNDLING: manifest: 3
1050 DEBUG-BUNDLING: manifest: 3
1051 DEBUG-BUNDLING: files: 3 (for 3 revlogs)
1051 DEBUG-BUNDLING: files: 3 (for 3 revlogs)
1052 DEBUG-BUNDLING: deltas:
1052 DEBUG-BUNDLING: deltas:
1053 DEBUG-BUNDLING: from-storage: 2 (100% of available 2)
1053 DEBUG-BUNDLING: from-storage: 2 (100% of available 2)
1054 DEBUG-BUNDLING: computed: 7
1054 DEBUG-BUNDLING: computed: 7
1055 DEBUG-BUNDLING: full: 7 (100% of native 7)
1055 DEBUG-BUNDLING: full: 7 (100% of native 7)
1056 DEBUG-BUNDLING: changelog: 3 (100% of native 3)
1056 DEBUG-BUNDLING: changelog: 3 (100% of native 3)
1057 DEBUG-BUNDLING: manifests: 1 (100% of native 1)
1057 DEBUG-BUNDLING: manifests: 1 (100% of native 1)
1058 DEBUG-BUNDLING: files: 3 (100% of native 3)
1058 DEBUG-BUNDLING: files: 3 (100% of native 3)
1059
1059
1060 Test the debug output when applying delta
1060 Test the debug output when applying delta
1061 -----------------------------------------
1061 -----------------------------------------
1062
1062
1063 $ hg init foo
1063 $ hg init foo
1064 $ hg -R foo unbundle ./slim.hg \
1064 $ hg -R foo unbundle ./slim.hg \
1065 > --config debug.revlog.debug-delta=yes \
1065 > --config debug.revlog.debug-delta=yes \
1066 > --config storage.revlog.reuse-external-delta=no \
1066 > --config storage.revlog.reuse-external-delta=no \
1067 > --config storage.revlog.reuse-external-delta-parent=no
1067 > --config storage.revlog.reuse-external-delta-parent=no
1068 adding changesets
1068 adding changesets
1069 DBG-DELTAS: CHANGELOG: rev=0: delta-base=0 is-cached=0 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1069 DBG-DELTAS: CHANGELOG: rev=0: delta-base=0 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1070 DBG-DELTAS: CHANGELOG: rev=1: delta-base=1 is-cached=0 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
1070 DBG-DELTAS: CHANGELOG: rev=1: delta-base=1 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
1071 DBG-DELTAS: CHANGELOG: rev=2: delta-base=2 is-cached=0 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
1071 DBG-DELTAS: CHANGELOG: rev=2: delta-base=2 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
1072 adding manifests
1072 adding manifests
1073 DBG-DELTAS: MANIFESTLOG: rev=0: delta-base=0 is-cached=0 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1073 DBG-DELTAS: MANIFESTLOG: rev=0: delta-base=0 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1074 DBG-DELTAS: MANIFESTLOG: rev=1: delta-base=0 is-cached=1 - search-rounds=1 try-count=1 - delta-type=delta snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
1074 DBG-DELTAS: MANIFESTLOG: rev=1: delta-base=0 is-cached=1 - search-rounds=1 try-count=1 - delta-type=delta snap-depth=0 - p1-chain-length=0 p2-chain-length=-1 - duration=* (glob)
1075 DBG-DELTAS: MANIFESTLOG: rev=2: delta-base=1 is-cached=1 - search-rounds=1 try-count=1 - delta-type=delta snap-depth=0 - p1-chain-length=1 p2-chain-length=-1 - duration=* (glob)
1075 DBG-DELTAS: MANIFESTLOG: rev=2: delta-base=1 is-cached=1 - search-rounds=1 try-count=1 - delta-type=delta snap-depth=0 - p1-chain-length=1 p2-chain-length=-1 - duration=* (glob)
1076 adding file changes
1076 adding file changes
1077 DBG-DELTAS: FILELOG:a: rev=0: delta-base=0 is-cached=0 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1077 DBG-DELTAS: FILELOG:a: rev=0: delta-base=0 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1078 DBG-DELTAS: FILELOG:b: rev=0: delta-base=0 is-cached=0 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1078 DBG-DELTAS: FILELOG:b: rev=0: delta-base=0 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1079 DBG-DELTAS: FILELOG:c: rev=0: delta-base=0 is-cached=0 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1079 DBG-DELTAS: FILELOG:c: rev=0: delta-base=0 is-cached=1 - search-rounds=0 try-count=0 - delta-type=full snap-depth=0 - p1-chain-length=-1 p2-chain-length=-1 - duration=* (glob)
1080 added 3 changesets with 3 changes to 3 files
1080 added 3 changesets with 3 changes to 3 files
1081 new changesets 4fe08cd4693e:4652c276ac4f (3 drafts)
1081 new changesets 4fe08cd4693e:4652c276ac4f (3 drafts)
1082 (run 'hg update' to get a working copy)
1082 (run 'hg update' to get a working copy)
1083
1083
1084
1084
1085 Test the debug statistic when applying a bundle
1085 Test the debug statistic when applying a bundle
1086 -----------------------------------------------
1086 -----------------------------------------------
1087
1087
1088 $ hg init bar
1088 $ hg init bar
1089 $ hg -R bar unbundle ./default.hg --config debug.unbundling-stats=yes
1089 $ hg -R bar unbundle ./default.hg --config debug.unbundling-stats=yes
1090 adding changesets
1090 adding changesets
1091 adding manifests
1091 adding manifests
1092 adding file changes
1092 adding file changes
1093 DEBUG-UNBUNDLING: revisions: 9
1093 DEBUG-UNBUNDLING: revisions: 9
1094 DEBUG-UNBUNDLING: changelog: 3 ( 33%)
1094 DEBUG-UNBUNDLING: changelog: 3 ( 33%)
1095 DEBUG-UNBUNDLING: manifests: 3 ( 33%)
1095 DEBUG-UNBUNDLING: manifests: 3 ( 33%)
1096 DEBUG-UNBUNDLING: files: 3 ( 33%)
1096 DEBUG-UNBUNDLING: files: 3 ( 33%)
1097 DEBUG-UNBUNDLING: total-time: ?????????????? seconds (glob)
1097 DEBUG-UNBUNDLING: total-time: ?????????????? seconds (glob)
1098 DEBUG-UNBUNDLING: changelog: ?????????????? seconds (???%) (glob)
1098 DEBUG-UNBUNDLING: changelog: ?????????????? seconds (???%) (glob)
1099 DEBUG-UNBUNDLING: manifests: ?????????????? seconds (???%) (glob)
1099 DEBUG-UNBUNDLING: manifests: ?????????????? seconds (???%) (glob)
1100 DEBUG-UNBUNDLING: files: ?????????????? seconds (???%) (glob)
1100 DEBUG-UNBUNDLING: files: ?????????????? seconds (???%) (glob)
1101 DEBUG-UNBUNDLING: type-count:
1101 DEBUG-UNBUNDLING: type-count:
1102 DEBUG-UNBUNDLING: changelog:
1102 DEBUG-UNBUNDLING: changelog:
1103 DEBUG-UNBUNDLING: full: 3
1103 DEBUG-UNBUNDLING: full: 3
1104 DEBUG-UNBUNDLING: cached: 0 ( 0%)
1104 DEBUG-UNBUNDLING: cached: 3 (100%)
1105 DEBUG-UNBUNDLING: manifests:
1105 DEBUG-UNBUNDLING: manifests:
1106 DEBUG-UNBUNDLING: full: 1
1106 DEBUG-UNBUNDLING: full: 1
1107 DEBUG-UNBUNDLING: cached: 0 ( 0%)
1107 DEBUG-UNBUNDLING: cached: 1 (100%)
1108 DEBUG-UNBUNDLING: delta: 2
1108 DEBUG-UNBUNDLING: delta: 2
1109 DEBUG-UNBUNDLING: cached: 2 (100%)
1109 DEBUG-UNBUNDLING: cached: 2 (100%)
1110 DEBUG-UNBUNDLING: files:
1110 DEBUG-UNBUNDLING: files:
1111 DEBUG-UNBUNDLING: full: 3
1111 DEBUG-UNBUNDLING: full: 3
1112 DEBUG-UNBUNDLING: cached: 0 ( 0%)
1112 DEBUG-UNBUNDLING: cached: 3 (100%)
1113 DEBUG-UNBUNDLING: type-time:
1113 DEBUG-UNBUNDLING: type-time:
1114 DEBUG-UNBUNDLING: changelog:
1114 DEBUG-UNBUNDLING: changelog:
1115 DEBUG-UNBUNDLING: full: ?????????????? seconds (???% of total) (glob)
1115 DEBUG-UNBUNDLING: full: ?????????????? seconds (???% of total) (glob)
1116 DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob)
1116 DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob)
1117 DEBUG-UNBUNDLING: manifests:
1117 DEBUG-UNBUNDLING: manifests:
1118 DEBUG-UNBUNDLING: full: ?????????????? seconds (???% of total) (glob)
1118 DEBUG-UNBUNDLING: full: ?????????????? seconds (???% of total) (glob)
1119 DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob)
1119 DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob)
1120 DEBUG-UNBUNDLING: delta: ?????????????? seconds (???% of total) (glob)
1120 DEBUG-UNBUNDLING: delta: ?????????????? seconds (???% of total) (glob)
1121 DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob)
1121 DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob)
1122 DEBUG-UNBUNDLING: files:
1122 DEBUG-UNBUNDLING: files:
1123 DEBUG-UNBUNDLING: full: ?????????????? seconds (???% of total) (glob)
1123 DEBUG-UNBUNDLING: full: ?????????????? seconds (???% of total) (glob)
1124 DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob)
1124 DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob)
1125 added 3 changesets with 3 changes to 3 files
1125 added 3 changesets with 3 changes to 3 files
1126 new changesets 4fe08cd4693e:4652c276ac4f (3 drafts)
1126 new changesets 4fe08cd4693e:4652c276ac4f (3 drafts)
1127 (run 'hg update' to get a working copy)
1127 (run 'hg update' to get a working copy)
General Comments 0
You need to be logged in to leave comments. Login now