##// END OF EJS Templates
test-sparse-revlog: make the large bundle generation more robust and useful...
marmoute -
r53337:24c3b3db default
parent child Browse files
Show More
@@ -1,281 +1,318
1 1 #!/usr/bin/env python3
2 2 #
3 3 # generate-branchy-bundle - generate a branch for a "large" branchy repository
4 4 #
5 5 # Copyright 2018 Octobus, contact@octobus.net
6 6 #
7 7 # This software may be used and distributed according to the terms of the
8 8 # GNU General Public License version 2 or any later version.
9 9 #
10 10 # This script generates a repository suitable for testing delta computation
11 11 # strategies.
12 12 #
13 13 # The repository update a single "large" file with many updates. One fixed part
14 14 # of the files always get updated while the rest of the lines get updated over
15 15 # time. This update happens over many topological branches, some getting merged
16 16 # back.
17 #
18 # --lazy will skip generating the file if one exist with the right content
19 # already.
20 # --validate make sure the generated bundle has the expected content.
17 21
18 22
19 23 import hashlib
20 24 import os
21 25 import shutil
22 26 import subprocess
23 27 import sys
24 28 import tempfile
25 29
26 30 import mercurial.context
27 31 import mercurial.hg
28 32 import mercurial.ui
29 33
30 34 BUNDLE_NAME = 'big-file-churn.hg'
31 35
32 36 # constants for generating the repository
33 37 NB_CHANGESET = 5000
34 38 PERIOD_MERGING = 8
35 39 PERIOD_BRANCHING = 7
36 40 MOVE_BACK_MIN = 3
37 41 MOVE_BACK_RANGE = 5
38 42
39 43 # constants for generating the large file we keep updating
40 44 #
41 45 # At each revision, the beginning on the file change,
42 46 # and set of other lines changes too.
43 47 FILENAME = 'SPARSE-REVLOG-TEST-FILE'
44 48 NB_LINES = 10500
45 49 ALWAYS_CHANGE_LINES = 500
46 50 OTHER_CHANGES = 300
47 51
48 52
49 53 def build_graph():
50 54 heads = {0}
51 55 graph = {0: (None, None)}
52 56 for idx in range(1, NB_CHANGESET + 1):
53 57 p, _ = parents = [idx - 1, None]
54 58 if (idx % PERIOD_BRANCHING) == 0:
55 59 back = MOVE_BACK_MIN + (idx % MOVE_BACK_RANGE)
56 60 for _ in range(back):
57 61 p = graph.get(p, (p,))[0]
58 62 parents[0] = p
59 63 if (idx % PERIOD_MERGING) == 0:
60 64 parents[1] = min(heads)
61 65 for p in parents:
62 66 heads.discard(p)
63 67 heads.add(idx)
64 68 graph[idx] = tuple(parents)
65 69 return graph
66 70
67 71
68 72 GRAPH = build_graph()
69 73
70 74
71 75 def nextcontent(previous_content):
72 76 """utility to produce a new file content from the previous one"""
73 77 return hashlib.md5(previous_content).hexdigest().encode('ascii')
74 78
75 79
76 80 def filecontent(iteridx, oldcontent):
77 81 """generate a new file content
78 82
79 83 The content is generated according the iteration index and previous
80 84 content"""
81 85
82 86 # initial call
83 87 if iteridx == 0:
84 88 current = b''
85 89 else:
86 90 current = b"%d" % iteridx
87 91
88 92 for idx in range(NB_LINES):
89 93 do_change_line = True
90 94 if oldcontent is not None and ALWAYS_CHANGE_LINES < idx:
91 95 do_change_line = not ((idx - iteridx) % OTHER_CHANGES)
92 96
93 97 if do_change_line:
94 98 to_write = current + b'\n'
95 99 current = nextcontent(current)
96 100 else:
97 101 to_write = oldcontent[idx]
98 102 yield to_write
99 103
100 104
101 105 def merge_content(base, left, right):
102 106 """merge two file content to produce a new one
103 107
104 108 use unambiguous update on each side when possible, and produce a new line
105 109 whenever a merge is needed. Similar to what the manifest would do.
106 110 """
107 111 for old, left, right in zip(base, left, right):
108 112 if old == left and old == right:
109 113 yield old
110 114 elif old == left and old != right:
111 115 yield right
112 116 elif old != left and old == right:
113 117 yield left
114 118 else:
115 119 yield nextcontent(left + right)
116 120
117 121
118 122 def ancestors(graph, rev):
119 123 """return the set of ancestors of revision <rev>"""
120 124 to_proceed = {rev}
121 125 seen = set(to_proceed)
122 126 while to_proceed:
123 127 current = to_proceed.pop()
124 128 for p in graph[current]:
125 129 if p is None:
126 130 continue
127 131 if p in seen:
128 132 continue
129 133 to_proceed.add(p)
130 134 seen.add(p)
131 135 return seen
132 136
133 137
134 138 def gca(graph, left, right):
135 139 """find the greater common ancestors of left and right
136 140
137 141 Note that the algorithm is stupid and NΒ² when run on all merge, however
138 142 this should not be a too much issue given the current scale.
139 143 """
140 144 return max(ancestors(graph, left) & ancestors(graph, right))
141 145
142 146
143 147 def make_one_content_fn(idx, base, left, right):
144 148 """build a function that build the content on demand
145 149
146 150 The dependency are kept are reference to make sure they are not
147 151 garbage-collected until we use them. Once we computed the current content,
148 152 we make sure to drop their reference to allow them to be garbage collected.
149 153 """
150 154
151 155 def content_fn(idx=idx, base=base, left=left, right=right):
152 156 if left is None:
153 157 new = filecontent(idx, None)
154 158 elif base is None:
155 159 new = filecontent(idx, left())
156 160 else:
157 161 merged = merge_content(base(), left(), right())
158 162 new = filecontent(idx, list(merged))
159 163 return list(new)
160 164
161 165 del idx
162 166 del base
163 167 del left
164 168 del right
165 169
166 170 value = None
167 171 cf = [content_fn]
168 172 del content_fn
169 173
170 174 def final_fn():
171 175 nonlocal value
172 176 if value is None:
173 177 content_fn = cf.pop()
174 178 value = list(content_fn())
175 179 del content_fn
176 180 return value
177 181
178 182 return final_fn
179 183
180 184
181 185 def build_content_graph(graph):
182 186 """produce file content for all revision
183 187
184 188 The content will be generated on demande and cached. Cleanup the
185 189 dictionnary are you use it to reduce memory usage.
186 190 """
187 191 content = {}
188 192 for idx, (p1, p2) in graph.items():
189 193 base = left = right = None
190 194 if p1 is not None:
191 195 left = content[p1]
192 196 if p2 is not None:
193 197 right = content[p2]
194 198 base_rev = gca(graph, p1, p2)
195 199 base = content[base_rev]
196 200 content[idx] = make_one_content_fn(idx, base, left, right)
197 201 return content
198 202
199 203
200 204 CONTENT = build_content_graph(GRAPH)
201 205
202 206
203 207 def hg(command, *args):
204 208 """call a mercurial command with appropriate config and argument"""
205 209 env = os.environ.copy()
206 210 if 'CHGHG' in env:
207 211 full_cmd = ['chg']
208 212 else:
209 213 full_cmd = ['hg']
210 214 full_cmd.append('--quiet')
211 215 full_cmd.append(command)
212 216 if command == 'commit':
213 217 # reproducible commit metadata
214 218 full_cmd.extend(['--date', '0 0', '--user', 'test'])
215 219 elif command == 'merge':
216 220 # avoid conflicts by picking the local variant
217 221 full_cmd.extend(['--tool', ':merge-local'])
218 222 full_cmd.extend(args)
219 223 env['HGRCPATH'] = ''
220 224 return subprocess.check_call(full_cmd, env=env)
221 225
222 226
223 227 def write_repo(path):
224 228 """write repository content in memory"""
225 229 repo = mercurial.hg.repository(
226 230 mercurial.ui.ui.load(),
227 231 path=path.encode('utf-8'),
228 232 )
229 233 nodemap = {None: repo.nodeconstants.nullid}
230 234 with repo.lock(), repo.transaction(b'bundle-generation'):
231 235 for idx, (p1, p2) in GRAPH.items():
232 236 if sys.stdout.isatty():
233 237 print("generating commit #%d/%d" % (idx, NB_CHANGESET))
234 238
235 239 file_fn = lambda repo, memctx, path: mercurial.context.memfilectx(
236 240 repo,
237 241 memctx,
238 242 path,
239 243 data=b''.join(CONTENT.pop(idx)()),
240 244 )
241 245
242 246 mc = mercurial.context.memctx(
243 247 repo,
244 248 (nodemap[p1], nodemap[p2]),
245 249 b'commit #%d' % idx if idx else b'initial commit',
246 250 [FILENAME.encode('ascii')],
247 251 file_fn,
248 252 user=b"test",
249 253 date=(0, 0),
250 254 )
251 255 nodemap[idx] = repo.commitctx(mc)
252 256
253 257
254 def run(target):
258 def compute_md5(target):
259 with open(target, 'rb') as bundle:
260 data = bundle.read()
261 return hashlib.md5(data).hexdigest()
262
263
264 def write_md5(target, md5):
265 with open(target + '.md5', 'wb') as md5file:
266 md5file.write(md5.encode('ascii') + b'\n')
267
268
269 def read_md5(target):
270 with open(target + '.md5', 'rb') as md5file:
271 return md5file.read().strip().decode('ascii')
272
273
274 def up_to_date_target(target):
275 """return true if the file already exist at the right"""
276 try:
277 found = compute_md5(target)
278 expected = read_md5(target)
279 except OSError:
280 return False
281 return found == expected
282
283
284 def run(target, validate=False):
255 285 tmpdir = tempfile.mkdtemp(prefix='tmp-hg-test-big-file-bundle-')
256 286 try:
257 287 os.chdir(tmpdir)
258 288 hg(
259 289 'init',
260 290 '--config',
261 291 'format.maxchainlen=%d' % NB_CHANGESET,
262 292 )
263 293 write_repo(tmpdir)
264 294 hg('bundle', '--all', target, '--config', 'devel.bundle.delta=p1')
265 with open(target, 'rb') as bundle:
266 data = bundle.read()
267 digest = hashlib.md5(data).hexdigest()
268 with open(target + '.md5', 'wb') as md5file:
269 md5file.write(digest.encode('ascii') + b'\n')
270 if sys.stdout.isatty():
271 print('bundle generated at "%s" md5: %s' % (target, digest))
272
295 digest = compute_md5(target)
296 if not validate:
297 write_md5(target, digest)
298 else:
299 expected = read_md5(target)
300 if expected != digest:
301 msg = "bundle generated does not match the expected content\n"
302 msg += " expected: %s\n" % expected
303 msg += " got: %s" % digest
304 print(msg, file=sys.stderr)
305 return 1
273 306 finally:
274 307 shutil.rmtree(tmpdir)
275 308 return 0
276 309
277 310
278 311 if __name__ == '__main__':
279 312 orig = os.path.realpath(os.path.dirname(sys.argv[0]))
280 313 target = os.path.join(orig, os.pardir, 'cache', BUNDLE_NAME)
281 sys.exit(run(target))
314 lazy = '--lazy' in sys.argv[1:]
315 validate = '--validate' in sys.argv[1:]
316 if lazy and up_to_date_target(target):
317 sys.exit(0)
318 sys.exit(run(target, validate=validate))
@@ -1,520 +1,524
1 1 ====================================
2 2 Test delta choice with sparse revlog
3 3 ====================================
4 4
5 5 Sparse-revlog usually shows the most gain on Manifest. However, it is simpler
6 6 to general an appropriate file, so we test with a single file instead. The
7 7 goal is to observe intermediate snapshot being created.
8 8
9 9 We need a large enough file. Part of the content needs to be replaced
10 10 repeatedly while some of it changes rarely.
11 11
12 12 $ bundlepath="$TESTDIR/artifacts/cache/big-file-churn.hg"
13 13
14 #if pure
14 15 $ expectedhash=`cat "$bundlepath".md5`
15
16 #if slow
17
18 $ if [ ! -f "$bundlepath" ]; then
19 > "$TESTDIR"/artifacts/scripts/generate-churning-bundle.py > /dev/null
20 > fi
21
22 #else
23
24 16 $ if [ ! -f "$bundlepath" ]; then
25 17 > echo 'skipped: missing artifact, run "'"$TESTDIR"'/artifacts/scripts/generate-churning-bundle.py"'
26 18 > exit 80
27 19 > fi
28
29 #endif
30
31 20 $ currenthash=`f -M "$bundlepath" | cut -d = -f 2`
32 21 $ if [ "$currenthash" != "$expectedhash" ]; then
33 22 > echo 'skipped: outdated artifact, md5 "'"$currenthash"'" expected "'"$expectedhash"'" run "'"$TESTDIR"'/artifacts/scripts/generate-churning-bundle.py"'
34 23 > exit 80
35 24 > fi
25 #else
26
27 #if slow
28 $ LAZY_GEN=""
29
30 #else
31 $ LAZY_GEN="--lazy"
32 #endif
33
34 #endif
35
36 If the validation fails, either something is broken or the expected md5 need updating.
37 To update the md5, invoke the script without --validate
38
39 $ "$TESTDIR"/artifacts/scripts/generate-churning-bundle.py --validate $LAZY_GEN > /dev/null
36 40
37 41 $ cat >> $HGRCPATH << EOF
38 42 > [format]
39 43 > sparse-revlog = yes
40 44 > maxchainlen = 15
41 45 > revlog-compression=zlib
42 46 > [storage]
43 47 > revlog.optimize-delta-parent-choice = yes
44 48 > revlog.reuse-external-delta-parent = no
45 49 > revlog.reuse-external-delta = no
46 50 > EOF
47 51 $ hg init sparse-repo
48 52 $ cd sparse-repo
49 53 $ hg unbundle $bundlepath
50 54 adding changesets
51 55 adding manifests
52 56 adding file changes
53 57 added 5001 changesets with 5001 changes to 1 files (+89 heads)
54 58 new changesets 9706f5af64f4:e4eee5e41c37 (5001 drafts)
55 59 (run 'hg heads' to see heads, 'hg merge' to merge)
56 60 $ hg up
57 61 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
58 62 updated to "e4eee5e41c37: commit #5000"
59 63 89 other heads for branch "default"
60 64
61 65 Sanity check the graph shape
62 66
63 67 $ hg log -T '{rev} {p1rev} {p2rev}\n' --rev '0:100'
64 68 0 -1 -1
65 69 1 0 -1
66 70 2 1 -1
67 71 3 2 -1
68 72 4 3 -1
69 73 5 4 -1
70 74 6 5 -1
71 75 7 1 -1
72 76 8 7 6
73 77 9 8 -1
74 78 10 9 -1
75 79 11 10 -1
76 80 12 11 -1
77 81 13 12 -1
78 82 14 1 -1
79 83 15 14 -1
80 84 16 15 13
81 85 17 16 -1
82 86 18 17 -1
83 87 19 18 -1
84 88 20 19 -1
85 89 21 16 -1
86 90 22 21 -1
87 91 23 22 -1
88 92 24 23 20
89 93 25 24 -1
90 94 26 25 -1
91 95 27 26 -1
92 96 28 21 -1
93 97 29 28 -1
94 98 30 29 -1
95 99 31 30 -1
96 100 32 31 27
97 101 33 32 -1
98 102 34 33 -1
99 103 35 31 -1
100 104 36 35 -1
101 105 37 36 -1
102 106 38 37 -1
103 107 39 38 -1
104 108 40 39 34
105 109 41 40 -1
106 110 42 36 -1
107 111 43 42 -1
108 112 44 43 -1
109 113 45 44 -1
110 114 46 45 -1
111 115 47 46 -1
112 116 48 47 41
113 117 49 36 -1
114 118 50 49 -1
115 119 51 50 -1
116 120 52 51 -1
117 121 53 52 -1
118 122 54 53 -1
119 123 55 54 -1
120 124 56 51 48
121 125 57 56 -1
122 126 58 57 -1
123 127 59 58 -1
124 128 60 59 -1
125 129 61 60 -1
126 130 62 61 -1
127 131 63 56 -1
128 132 64 63 55
129 133 65 64 -1
130 134 66 65 -1
131 135 67 66 -1
132 136 68 67 -1
133 137 69 68 -1
134 138 70 66 -1
135 139 71 70 -1
136 140 72 71 62
137 141 73 72 -1
138 142 74 73 -1
139 143 75 74 -1
140 144 76 75 -1
141 145 77 71 -1
142 146 78 77 -1
143 147 79 78 -1
144 148 80 79 69
145 149 81 80 -1
146 150 82 81 -1
147 151 83 82 -1
148 152 84 71 -1
149 153 85 84 -1
150 154 86 85 -1
151 155 87 86 -1
152 156 88 87 76
153 157 89 88 -1
154 158 90 89 -1
155 159 91 86 -1
156 160 92 91 -1
157 161 93 92 -1
158 162 94 93 -1
159 163 95 94 -1
160 164 96 95 83
161 165 97 96 -1
162 166 98 91 -1
163 167 99 98 -1
164 168 100 99 -1
165 169
166 170 sanity check the change pattern
167 171
168 172 $ hg log --stat -r 0:3
169 173 changeset: 0:9706f5af64f4
170 174 user: test
171 175 date: Thu Jan 01 00:00:00 1970 +0000
172 176 summary: initial commit
173 177
174 178 SPARSE-REVLOG-TEST-FILE | 10500 ++++++++++++++++++++++++++++++++++++++++++++++
175 179 1 files changed, 10500 insertions(+), 0 deletions(-)
176 180
177 181 changeset: 1:724907deaa5e
178 182 user: test
179 183 date: Thu Jan 01 00:00:00 1970 +0000
180 184 summary: commit #1
181 185
182 186 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
183 187 1 files changed, 534 insertions(+), 534 deletions(-)
184 188
185 189 changeset: 2:62c41bce3e5d
186 190 user: test
187 191 date: Thu Jan 01 00:00:00 1970 +0000
188 192 summary: commit #2
189 193
190 194 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
191 195 1 files changed, 534 insertions(+), 534 deletions(-)
192 196
193 197 changeset: 3:348a9cbd6959
194 198 user: test
195 199 date: Thu Jan 01 00:00:00 1970 +0000
196 200 summary: commit #3
197 201
198 202 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
199 203 1 files changed, 534 insertions(+), 534 deletions(-)
200 204
201 205
202 206 $ f -s .hg/store/data/*.d
203 207 .hg/store/data/_s_p_a_r_s_e-_r_e_v_l_o_g-_t_e_s_t-_f_i_l_e.d: size=81370673
204 208 $ hg debugrevlog *
205 209 format : 1
206 210 flags : generaldelta
207 211
208 212 revisions : 5001
209 213 merges : 625 (12.50%)
210 214 normal : 4376 (87.50%)
211 215 revisions : 5001
212 216 empty : 0 ( 0.00%)
213 217 text : 0 (100.00%)
214 218 delta : 0 (100.00%)
215 219 snapshot : 360 ( 7.20%)
216 220 lvl-0 : 11 ( 0.22%)
217 221 lvl-1 : 50 ( 1.00%) non-ancestor-bases: 41 (82.00%)
218 222 lvl-2 : 128 ( 2.56%) non-ancestor-bases: 119 (92.97%)
219 223 lvl-3 : 122 ( 2.44%) non-ancestor-bases: 111 (90.98%)
220 224 lvl-4 : 49 ( 0.98%) non-ancestor-bases: 46 (93.88%)
221 225 deltas : 4641 (92.80%)
222 226 revision size : 81370673
223 227 snapshot : 16282100 (20.01%)
224 228 lvl-0 : 2188012 ( 2.69%)
225 229 lvl-1 : 4848143 ( 5.96%)
226 230 lvl-2 : 5366175 ( 6.59%)
227 231 lvl-3 : 3085157 ( 3.79%)
228 232 lvl-4 : 794613 ( 0.98%)
229 233 deltas : 65088573 (79.99%)
230 234
231 235 chunks : 5001
232 236 0x78 (x) : 5001 (100.00%)
233 237 chunks size : 81370673
234 238 0x78 (x) : 81370673 (100.00%)
235 239
236 240
237 241 total-stored-content: 1 717 863 086 bytes
238 242
239 243 avg chain length : 8
240 244 max chain length : 15
241 245 max chain reach : 18326506
242 246 compression ratio : 21
243 247
244 248 uncompressed data size (min/max/avg) : 339930 / 346471 / 343503
245 249 full revision size (min/max/avg) : 196682 / 201129 / 198910
246 250 inter-snapshot size (min/max/avg) : 11620 / 172223 / 40384
247 251 level-1 (min/max/avg) : 14329 / 172223 / 96962
248 252 level-2 (min/max/avg) : 11664 / 86421 / 41923
249 253 level-3 (min/max/avg) : 11620 / 42674 / 25288
250 254 level-4 (min/max/avg) : 11631 / 21209 / 16216
251 255 delta size (min/max/avg) : 10610 / 190651 / 14024
252 256
253 257 deltas against prev : 3916 (84.38%)
254 258 where prev = p1 : 3916 (100.00%)
255 259 where prev = p2 : 0 ( 0.00%)
256 260 other-ancestor : 0 ( 0.00%)
257 261 unrelated : 0 ( 0.00%)
258 262 deltas against p1 : 667 (14.37%)
259 263 deltas against p2 : 58 ( 1.25%)
260 264 deltas against ancs : 0 ( 0.00%)
261 265 deltas against other : 0 ( 0.00%)
262 266
263 267
264 268 Test `debug-delta-find`
265 269 -----------------------
266 270
267 271 $ ls -1
268 272 SPARSE-REVLOG-TEST-FILE
269 273 $ hg debugdeltachain SPARSE-REVLOG-TEST-FILE | grep snap | tail -1
270 274 4996 4995 -1 11 3 4947 snap
271 275 $ LAST_SNAP=`hg debugdeltachain SPARSE-REVLOG-TEST-FILE | grep snap | tail -1| sed 's/^ \+//'| cut -d ' ' -f 1`
272 276 $ echo Last Snapshot: $LAST_SNAP
273 277 Last Snapshot: 4996
274 278 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE $LAST_SNAP
275 279 DBG-DELTAS-SEARCH: SEARCH rev=4996
276 280 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
277 281 DBG-DELTAS-SEARCH: CANDIDATE: rev=4964
278 282 DBG-DELTAS-SEARCH: type=snapshot-3
279 283 DBG-DELTAS-SEARCH: size=15153
280 284 DBG-DELTAS-SEARCH: base=4958
281 285 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
282 286 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
283 287 DBG-DELTAS-SEARCH: DELTA: length=36297 (BAD)
284 288 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
285 289 DBG-DELTAS-SEARCH: CANDIDATE: rev=4958
286 290 DBG-DELTAS-SEARCH: type=snapshot-2
287 291 DBG-DELTAS-SEARCH: size=30977
288 292 DBG-DELTAS-SEARCH: base=4947
289 293 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
290 294 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
291 295 DBG-DELTAS-SEARCH: DELTA: length=36578 (BAD)
292 296 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - search-down
293 297 DBG-DELTAS-SEARCH: CANDIDATE: rev=4947
294 298 DBG-DELTAS-SEARCH: type=snapshot-1
295 299 DBG-DELTAS-SEARCH: size=164878
296 300 DBG-DELTAS-SEARCH: base=4667
297 301 DBG-DELTAS-SEARCH: uncompressed-delta-size=87938
298 302 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
299 303 DBG-DELTAS-SEARCH: DELTA: length=52101 (GOOD)
300 304 DBG-DELTAS-SEARCH: ROUND #4 - 1 candidates - refine-down
301 305 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
302 306 DBG-DELTAS-SEARCH: CANDIDATE: rev=4667
303 307 DBG-DELTAS-SEARCH: type=snapshot-0
304 308 DBG-DELTAS-SEARCH: size=196699
305 309 DBG-DELTAS-SEARCH: base=-1
306 310 DBG-DELTAS-SEARCH: uncompressed-delta-size=281309
307 311 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
308 312 DBG-DELTAS-SEARCH: DELTA: length=165408 (GOOD)
309 313 DBG-DELTAS-SEARCH: ROUND #5 - 1 candidates - refine-up
310 314 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
311 315 DBG-DELTAS-SEARCH: CANDIDATE: rev=4954
312 316 DBG-DELTAS-SEARCH: type=snapshot-2
313 317 DBG-DELTAS-SEARCH: size=58198
314 318 DBG-DELTAS-SEARCH: base=4947
315 319 DBG-DELTAS-SEARCH: uncompressed-delta-size=92195
316 320 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
317 321 DBG-DELTAS-SEARCH: DELTA: length=54601 (BAD)
318 322 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4996: delta-base=4947 is-cached=0 - search-rounds=5 try-count=5 - delta-type=snapshot snap-depth=2 - p1-chain-length=15 p2-chain-length=-1 - duration=*.?????? (glob)
319 323
320 324 $ cat << EOF >>.hg/hgrc
321 325 > [storage]
322 326 > revlog.optimize-delta-parent-choice = no
323 327 > revlog.reuse-external-delta = yes
324 328 > EOF
325 329
326 330 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE $LAST_SNAP --quiet
327 331 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4996: delta-base=4947 is-cached=0 - search-rounds=5 try-count=5 - delta-type=snapshot snap-depth=2 - p1-chain-length=15 p2-chain-length=-1 - duration=*.?????? (glob)
328 332 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE $LAST_SNAP --source full
329 333 DBG-DELTAS-SEARCH: SEARCH rev=4996
330 334 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
331 335 DBG-DELTAS-SEARCH: CANDIDATE: rev=4964
332 336 DBG-DELTAS-SEARCH: type=snapshot-3
333 337 DBG-DELTAS-SEARCH: size=15153
334 338 DBG-DELTAS-SEARCH: base=4958
335 339 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
336 340 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
337 341 DBG-DELTAS-SEARCH: DELTA: length=36297 (BAD)
338 342 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
339 343 DBG-DELTAS-SEARCH: CANDIDATE: rev=4958
340 344 DBG-DELTAS-SEARCH: type=snapshot-2
341 345 DBG-DELTAS-SEARCH: size=30977
342 346 DBG-DELTAS-SEARCH: base=4947
343 347 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
344 348 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
345 349 DBG-DELTAS-SEARCH: DELTA: length=36578 (BAD)
346 350 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - search-down
347 351 DBG-DELTAS-SEARCH: CANDIDATE: rev=4947
348 352 DBG-DELTAS-SEARCH: type=snapshot-1
349 353 DBG-DELTAS-SEARCH: size=164878
350 354 DBG-DELTAS-SEARCH: base=4667
351 355 DBG-DELTAS-SEARCH: uncompressed-delta-size=87938
352 356 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
353 357 DBG-DELTAS-SEARCH: DELTA: length=52101 (GOOD)
354 358 DBG-DELTAS-SEARCH: ROUND #4 - 1 candidates - refine-down
355 359 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
356 360 DBG-DELTAS-SEARCH: CANDIDATE: rev=4667
357 361 DBG-DELTAS-SEARCH: type=snapshot-0
358 362 DBG-DELTAS-SEARCH: size=196699
359 363 DBG-DELTAS-SEARCH: base=-1
360 364 DBG-DELTAS-SEARCH: uncompressed-delta-size=281309
361 365 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
362 366 DBG-DELTAS-SEARCH: DELTA: length=165408 (GOOD)
363 367 DBG-DELTAS-SEARCH: ROUND #5 - 1 candidates - refine-up
364 368 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
365 369 DBG-DELTAS-SEARCH: CANDIDATE: rev=4954
366 370 DBG-DELTAS-SEARCH: type=snapshot-2
367 371 DBG-DELTAS-SEARCH: size=58198
368 372 DBG-DELTAS-SEARCH: base=4947
369 373 DBG-DELTAS-SEARCH: uncompressed-delta-size=92195
370 374 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
371 375 DBG-DELTAS-SEARCH: DELTA: length=54601 (BAD)
372 376 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4996: delta-base=4947 is-cached=0 - search-rounds=5 try-count=5 - delta-type=snapshot snap-depth=2 - p1-chain-length=15 p2-chain-length=-1 - duration=*.?????? (glob)
373 377 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE $LAST_SNAP --source storage
374 378 DBG-DELTAS-SEARCH: SEARCH rev=4996
375 379 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - cached-delta
376 380 DBG-DELTAS-SEARCH: CANDIDATE: rev=4947
377 381 DBG-DELTAS-SEARCH: type=snapshot-1
378 382 DBG-DELTAS-SEARCH: size=164878
379 383 DBG-DELTAS-SEARCH: base=4667
380 384 DBG-DELTAS-SEARCH: uncompressed-delta-size=87938
381 385 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
382 386 DBG-DELTAS-SEARCH: DELTA: length=52101 (GOOD)
383 387 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4996: delta-base=4947 is-cached=1 - search-rounds=1 try-count=1 - delta-type=delta snap-depth=-1 - p1-chain-length=15 p2-chain-length=-1 - duration=*.?????? (glob)
384 388 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE $LAST_SNAP --source p1
385 389 DBG-DELTAS-SEARCH: SEARCH rev=4996
386 390 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
387 391 DBG-DELTAS-SEARCH: CANDIDATE: rev=4964
388 392 DBG-DELTAS-SEARCH: type=snapshot-3
389 393 DBG-DELTAS-SEARCH: size=15153
390 394 DBG-DELTAS-SEARCH: base=4958
391 395 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
392 396 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
393 397 DBG-DELTAS-SEARCH: DELTA: length=36297 (BAD)
394 398 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
395 399 DBG-DELTAS-SEARCH: CANDIDATE: rev=4958
396 400 DBG-DELTAS-SEARCH: type=snapshot-2
397 401 DBG-DELTAS-SEARCH: size=30977
398 402 DBG-DELTAS-SEARCH: base=4947
399 403 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
400 404 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
401 405 DBG-DELTAS-SEARCH: DELTA: length=36578 (BAD)
402 406 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - search-down
403 407 DBG-DELTAS-SEARCH: CANDIDATE: rev=4947
404 408 DBG-DELTAS-SEARCH: type=snapshot-1
405 409 DBG-DELTAS-SEARCH: size=164878
406 410 DBG-DELTAS-SEARCH: base=4667
407 411 DBG-DELTAS-SEARCH: uncompressed-delta-size=87938
408 412 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
409 413 DBG-DELTAS-SEARCH: DELTA: length=52101 (GOOD)
410 414 DBG-DELTAS-SEARCH: ROUND #4 - 1 candidates - refine-down
411 415 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
412 416 DBG-DELTAS-SEARCH: CANDIDATE: rev=4667
413 417 DBG-DELTAS-SEARCH: type=snapshot-0
414 418 DBG-DELTAS-SEARCH: size=196699
415 419 DBG-DELTAS-SEARCH: base=-1
416 420 DBG-DELTAS-SEARCH: uncompressed-delta-size=281309
417 421 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
418 422 DBG-DELTAS-SEARCH: DELTA: length=165408 (GOOD)
419 423 DBG-DELTAS-SEARCH: ROUND #5 - 1 candidates - refine-up
420 424 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
421 425 DBG-DELTAS-SEARCH: CANDIDATE: rev=4954
422 426 DBG-DELTAS-SEARCH: type=snapshot-2
423 427 DBG-DELTAS-SEARCH: size=58198
424 428 DBG-DELTAS-SEARCH: base=4947
425 429 DBG-DELTAS-SEARCH: uncompressed-delta-size=92195
426 430 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
427 431 DBG-DELTAS-SEARCH: DELTA: length=54601 (BAD)
428 432 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4996: delta-base=4947 is-cached=0 - search-rounds=5 try-count=5 - delta-type=snapshot snap-depth=2 - p1-chain-length=15 p2-chain-length=-1 - duration=*.?????? (glob)
429 433 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE $LAST_SNAP --source p2
430 434 DBG-DELTAS-SEARCH: SEARCH rev=4996
431 435 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
432 436 DBG-DELTAS-SEARCH: CANDIDATE: rev=4964
433 437 DBG-DELTAS-SEARCH: type=snapshot-3
434 438 DBG-DELTAS-SEARCH: size=15153
435 439 DBG-DELTAS-SEARCH: base=4958
436 440 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
437 441 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
438 442 DBG-DELTAS-SEARCH: DELTA: length=36297 (BAD)
439 443 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
440 444 DBG-DELTAS-SEARCH: CANDIDATE: rev=4958
441 445 DBG-DELTAS-SEARCH: type=snapshot-2
442 446 DBG-DELTAS-SEARCH: size=30977
443 447 DBG-DELTAS-SEARCH: base=4947
444 448 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
445 449 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
446 450 DBG-DELTAS-SEARCH: DELTA: length=36578 (BAD)
447 451 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - search-down
448 452 DBG-DELTAS-SEARCH: CANDIDATE: rev=4947
449 453 DBG-DELTAS-SEARCH: type=snapshot-1
450 454 DBG-DELTAS-SEARCH: size=164878
451 455 DBG-DELTAS-SEARCH: base=4667
452 456 DBG-DELTAS-SEARCH: uncompressed-delta-size=87938
453 457 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
454 458 DBG-DELTAS-SEARCH: DELTA: length=52101 (GOOD)
455 459 DBG-DELTAS-SEARCH: ROUND #4 - 1 candidates - refine-down
456 460 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
457 461 DBG-DELTAS-SEARCH: CANDIDATE: rev=4667
458 462 DBG-DELTAS-SEARCH: type=snapshot-0
459 463 DBG-DELTAS-SEARCH: size=196699
460 464 DBG-DELTAS-SEARCH: base=-1
461 465 DBG-DELTAS-SEARCH: uncompressed-delta-size=281309
462 466 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
463 467 DBG-DELTAS-SEARCH: DELTA: length=165408 (GOOD)
464 468 DBG-DELTAS-SEARCH: ROUND #5 - 1 candidates - refine-up
465 469 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
466 470 DBG-DELTAS-SEARCH: CANDIDATE: rev=4954
467 471 DBG-DELTAS-SEARCH: type=snapshot-2
468 472 DBG-DELTAS-SEARCH: size=58198
469 473 DBG-DELTAS-SEARCH: base=4947
470 474 DBG-DELTAS-SEARCH: uncompressed-delta-size=92195
471 475 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
472 476 DBG-DELTAS-SEARCH: DELTA: length=54601 (BAD)
473 477 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4996: delta-base=4947 is-cached=0 - search-rounds=5 try-count=5 - delta-type=snapshot snap-depth=2 - p1-chain-length=15 p2-chain-length=-1 - duration=*.?????? (glob)
474 478 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE $LAST_SNAP --source prev
475 479 DBG-DELTAS-SEARCH: SEARCH rev=4996
476 480 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
477 481 DBG-DELTAS-SEARCH: CANDIDATE: rev=4964
478 482 DBG-DELTAS-SEARCH: type=snapshot-3
479 483 DBG-DELTAS-SEARCH: size=15153
480 484 DBG-DELTAS-SEARCH: base=4958
481 485 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
482 486 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
483 487 DBG-DELTAS-SEARCH: DELTA: length=36297 (BAD)
484 488 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
485 489 DBG-DELTAS-SEARCH: CANDIDATE: rev=4958
486 490 DBG-DELTAS-SEARCH: type=snapshot-2
487 491 DBG-DELTAS-SEARCH: size=30977
488 492 DBG-DELTAS-SEARCH: base=4947
489 493 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
490 494 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
491 495 DBG-DELTAS-SEARCH: DELTA: length=36578 (BAD)
492 496 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - search-down
493 497 DBG-DELTAS-SEARCH: CANDIDATE: rev=4947
494 498 DBG-DELTAS-SEARCH: type=snapshot-1
495 499 DBG-DELTAS-SEARCH: size=164878
496 500 DBG-DELTAS-SEARCH: base=4667
497 501 DBG-DELTAS-SEARCH: uncompressed-delta-size=87938
498 502 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
499 503 DBG-DELTAS-SEARCH: DELTA: length=52101 (GOOD)
500 504 DBG-DELTAS-SEARCH: ROUND #4 - 1 candidates - refine-down
501 505 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
502 506 DBG-DELTAS-SEARCH: CANDIDATE: rev=4667
503 507 DBG-DELTAS-SEARCH: type=snapshot-0
504 508 DBG-DELTAS-SEARCH: size=196699
505 509 DBG-DELTAS-SEARCH: base=-1
506 510 DBG-DELTAS-SEARCH: uncompressed-delta-size=281309
507 511 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
508 512 DBG-DELTAS-SEARCH: DELTA: length=165408 (GOOD)
509 513 DBG-DELTAS-SEARCH: ROUND #5 - 1 candidates - refine-up
510 514 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
511 515 DBG-DELTAS-SEARCH: CANDIDATE: rev=4954
512 516 DBG-DELTAS-SEARCH: type=snapshot-2
513 517 DBG-DELTAS-SEARCH: size=58198
514 518 DBG-DELTAS-SEARCH: base=4947
515 519 DBG-DELTAS-SEARCH: uncompressed-delta-size=92195
516 520 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
517 521 DBG-DELTAS-SEARCH: DELTA: length=54601 (BAD)
518 522 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4996: delta-base=4947 is-cached=0 - search-rounds=5 try-count=5 - delta-type=snapshot snap-depth=2 - p1-chain-length=15 p2-chain-length=-1 - duration=*.?????? (glob)
519 523
520 524 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now