##// END OF EJS Templates
test-sparse-revlog: make the large bundle generation more robust and useful...
marmoute -
r53337:24c3b3db default
parent child Browse files
Show More
@@ -1,281 +1,318
1 #!/usr/bin/env python3
1 #!/usr/bin/env python3
2 #
2 #
3 # generate-branchy-bundle - generate a branch for a "large" branchy repository
3 # generate-branchy-bundle - generate a branch for a "large" branchy repository
4 #
4 #
5 # Copyright 2018 Octobus, contact@octobus.net
5 # Copyright 2018 Octobus, contact@octobus.net
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9 #
9 #
10 # This script generates a repository suitable for testing delta computation
10 # This script generates a repository suitable for testing delta computation
11 # strategies.
11 # strategies.
12 #
12 #
13 # The repository update a single "large" file with many updates. One fixed part
13 # The repository update a single "large" file with many updates. One fixed part
14 # of the files always get updated while the rest of the lines get updated over
14 # of the files always get updated while the rest of the lines get updated over
15 # time. This update happens over many topological branches, some getting merged
15 # time. This update happens over many topological branches, some getting merged
16 # back.
16 # back.
17 #
18 # --lazy will skip generating the file if one exist with the right content
19 # already.
20 # --validate make sure the generated bundle has the expected content.
17
21
18
22
19 import hashlib
23 import hashlib
20 import os
24 import os
21 import shutil
25 import shutil
22 import subprocess
26 import subprocess
23 import sys
27 import sys
24 import tempfile
28 import tempfile
25
29
26 import mercurial.context
30 import mercurial.context
27 import mercurial.hg
31 import mercurial.hg
28 import mercurial.ui
32 import mercurial.ui
29
33
30 BUNDLE_NAME = 'big-file-churn.hg'
34 BUNDLE_NAME = 'big-file-churn.hg'
31
35
32 # constants for generating the repository
36 # constants for generating the repository
33 NB_CHANGESET = 5000
37 NB_CHANGESET = 5000
34 PERIOD_MERGING = 8
38 PERIOD_MERGING = 8
35 PERIOD_BRANCHING = 7
39 PERIOD_BRANCHING = 7
36 MOVE_BACK_MIN = 3
40 MOVE_BACK_MIN = 3
37 MOVE_BACK_RANGE = 5
41 MOVE_BACK_RANGE = 5
38
42
39 # constants for generating the large file we keep updating
43 # constants for generating the large file we keep updating
40 #
44 #
41 # At each revision, the beginning on the file change,
45 # At each revision, the beginning on the file change,
42 # and set of other lines changes too.
46 # and set of other lines changes too.
43 FILENAME = 'SPARSE-REVLOG-TEST-FILE'
47 FILENAME = 'SPARSE-REVLOG-TEST-FILE'
44 NB_LINES = 10500
48 NB_LINES = 10500
45 ALWAYS_CHANGE_LINES = 500
49 ALWAYS_CHANGE_LINES = 500
46 OTHER_CHANGES = 300
50 OTHER_CHANGES = 300
47
51
48
52
49 def build_graph():
53 def build_graph():
50 heads = {0}
54 heads = {0}
51 graph = {0: (None, None)}
55 graph = {0: (None, None)}
52 for idx in range(1, NB_CHANGESET + 1):
56 for idx in range(1, NB_CHANGESET + 1):
53 p, _ = parents = [idx - 1, None]
57 p, _ = parents = [idx - 1, None]
54 if (idx % PERIOD_BRANCHING) == 0:
58 if (idx % PERIOD_BRANCHING) == 0:
55 back = MOVE_BACK_MIN + (idx % MOVE_BACK_RANGE)
59 back = MOVE_BACK_MIN + (idx % MOVE_BACK_RANGE)
56 for _ in range(back):
60 for _ in range(back):
57 p = graph.get(p, (p,))[0]
61 p = graph.get(p, (p,))[0]
58 parents[0] = p
62 parents[0] = p
59 if (idx % PERIOD_MERGING) == 0:
63 if (idx % PERIOD_MERGING) == 0:
60 parents[1] = min(heads)
64 parents[1] = min(heads)
61 for p in parents:
65 for p in parents:
62 heads.discard(p)
66 heads.discard(p)
63 heads.add(idx)
67 heads.add(idx)
64 graph[idx] = tuple(parents)
68 graph[idx] = tuple(parents)
65 return graph
69 return graph
66
70
67
71
68 GRAPH = build_graph()
72 GRAPH = build_graph()
69
73
70
74
71 def nextcontent(previous_content):
75 def nextcontent(previous_content):
72 """utility to produce a new file content from the previous one"""
76 """utility to produce a new file content from the previous one"""
73 return hashlib.md5(previous_content).hexdigest().encode('ascii')
77 return hashlib.md5(previous_content).hexdigest().encode('ascii')
74
78
75
79
76 def filecontent(iteridx, oldcontent):
80 def filecontent(iteridx, oldcontent):
77 """generate a new file content
81 """generate a new file content
78
82
79 The content is generated according the iteration index and previous
83 The content is generated according the iteration index and previous
80 content"""
84 content"""
81
85
82 # initial call
86 # initial call
83 if iteridx == 0:
87 if iteridx == 0:
84 current = b''
88 current = b''
85 else:
89 else:
86 current = b"%d" % iteridx
90 current = b"%d" % iteridx
87
91
88 for idx in range(NB_LINES):
92 for idx in range(NB_LINES):
89 do_change_line = True
93 do_change_line = True
90 if oldcontent is not None and ALWAYS_CHANGE_LINES < idx:
94 if oldcontent is not None and ALWAYS_CHANGE_LINES < idx:
91 do_change_line = not ((idx - iteridx) % OTHER_CHANGES)
95 do_change_line = not ((idx - iteridx) % OTHER_CHANGES)
92
96
93 if do_change_line:
97 if do_change_line:
94 to_write = current + b'\n'
98 to_write = current + b'\n'
95 current = nextcontent(current)
99 current = nextcontent(current)
96 else:
100 else:
97 to_write = oldcontent[idx]
101 to_write = oldcontent[idx]
98 yield to_write
102 yield to_write
99
103
100
104
101 def merge_content(base, left, right):
105 def merge_content(base, left, right):
102 """merge two file content to produce a new one
106 """merge two file content to produce a new one
103
107
104 use unambiguous update on each side when possible, and produce a new line
108 use unambiguous update on each side when possible, and produce a new line
105 whenever a merge is needed. Similar to what the manifest would do.
109 whenever a merge is needed. Similar to what the manifest would do.
106 """
110 """
107 for old, left, right in zip(base, left, right):
111 for old, left, right in zip(base, left, right):
108 if old == left and old == right:
112 if old == left and old == right:
109 yield old
113 yield old
110 elif old == left and old != right:
114 elif old == left and old != right:
111 yield right
115 yield right
112 elif old != left and old == right:
116 elif old != left and old == right:
113 yield left
117 yield left
114 else:
118 else:
115 yield nextcontent(left + right)
119 yield nextcontent(left + right)
116
120
117
121
118 def ancestors(graph, rev):
122 def ancestors(graph, rev):
119 """return the set of ancestors of revision <rev>"""
123 """return the set of ancestors of revision <rev>"""
120 to_proceed = {rev}
124 to_proceed = {rev}
121 seen = set(to_proceed)
125 seen = set(to_proceed)
122 while to_proceed:
126 while to_proceed:
123 current = to_proceed.pop()
127 current = to_proceed.pop()
124 for p in graph[current]:
128 for p in graph[current]:
125 if p is None:
129 if p is None:
126 continue
130 continue
127 if p in seen:
131 if p in seen:
128 continue
132 continue
129 to_proceed.add(p)
133 to_proceed.add(p)
130 seen.add(p)
134 seen.add(p)
131 return seen
135 return seen
132
136
133
137
134 def gca(graph, left, right):
138 def gca(graph, left, right):
135 """find the greater common ancestors of left and right
139 """find the greater common ancestors of left and right
136
140
137 Note that the algorithm is stupid and NΒ² when run on all merge, however
141 Note that the algorithm is stupid and NΒ² when run on all merge, however
138 this should not be a too much issue given the current scale.
142 this should not be a too much issue given the current scale.
139 """
143 """
140 return max(ancestors(graph, left) & ancestors(graph, right))
144 return max(ancestors(graph, left) & ancestors(graph, right))
141
145
142
146
143 def make_one_content_fn(idx, base, left, right):
147 def make_one_content_fn(idx, base, left, right):
144 """build a function that build the content on demand
148 """build a function that build the content on demand
145
149
146 The dependency are kept are reference to make sure they are not
150 The dependency are kept are reference to make sure they are not
147 garbage-collected until we use them. Once we computed the current content,
151 garbage-collected until we use them. Once we computed the current content,
148 we make sure to drop their reference to allow them to be garbage collected.
152 we make sure to drop their reference to allow them to be garbage collected.
149 """
153 """
150
154
151 def content_fn(idx=idx, base=base, left=left, right=right):
155 def content_fn(idx=idx, base=base, left=left, right=right):
152 if left is None:
156 if left is None:
153 new = filecontent(idx, None)
157 new = filecontent(idx, None)
154 elif base is None:
158 elif base is None:
155 new = filecontent(idx, left())
159 new = filecontent(idx, left())
156 else:
160 else:
157 merged = merge_content(base(), left(), right())
161 merged = merge_content(base(), left(), right())
158 new = filecontent(idx, list(merged))
162 new = filecontent(idx, list(merged))
159 return list(new)
163 return list(new)
160
164
161 del idx
165 del idx
162 del base
166 del base
163 del left
167 del left
164 del right
168 del right
165
169
166 value = None
170 value = None
167 cf = [content_fn]
171 cf = [content_fn]
168 del content_fn
172 del content_fn
169
173
170 def final_fn():
174 def final_fn():
171 nonlocal value
175 nonlocal value
172 if value is None:
176 if value is None:
173 content_fn = cf.pop()
177 content_fn = cf.pop()
174 value = list(content_fn())
178 value = list(content_fn())
175 del content_fn
179 del content_fn
176 return value
180 return value
177
181
178 return final_fn
182 return final_fn
179
183
180
184
181 def build_content_graph(graph):
185 def build_content_graph(graph):
182 """produce file content for all revision
186 """produce file content for all revision
183
187
184 The content will be generated on demande and cached. Cleanup the
188 The content will be generated on demande and cached. Cleanup the
185 dictionnary are you use it to reduce memory usage.
189 dictionnary are you use it to reduce memory usage.
186 """
190 """
187 content = {}
191 content = {}
188 for idx, (p1, p2) in graph.items():
192 for idx, (p1, p2) in graph.items():
189 base = left = right = None
193 base = left = right = None
190 if p1 is not None:
194 if p1 is not None:
191 left = content[p1]
195 left = content[p1]
192 if p2 is not None:
196 if p2 is not None:
193 right = content[p2]
197 right = content[p2]
194 base_rev = gca(graph, p1, p2)
198 base_rev = gca(graph, p1, p2)
195 base = content[base_rev]
199 base = content[base_rev]
196 content[idx] = make_one_content_fn(idx, base, left, right)
200 content[idx] = make_one_content_fn(idx, base, left, right)
197 return content
201 return content
198
202
199
203
200 CONTENT = build_content_graph(GRAPH)
204 CONTENT = build_content_graph(GRAPH)
201
205
202
206
203 def hg(command, *args):
207 def hg(command, *args):
204 """call a mercurial command with appropriate config and argument"""
208 """call a mercurial command with appropriate config and argument"""
205 env = os.environ.copy()
209 env = os.environ.copy()
206 if 'CHGHG' in env:
210 if 'CHGHG' in env:
207 full_cmd = ['chg']
211 full_cmd = ['chg']
208 else:
212 else:
209 full_cmd = ['hg']
213 full_cmd = ['hg']
210 full_cmd.append('--quiet')
214 full_cmd.append('--quiet')
211 full_cmd.append(command)
215 full_cmd.append(command)
212 if command == 'commit':
216 if command == 'commit':
213 # reproducible commit metadata
217 # reproducible commit metadata
214 full_cmd.extend(['--date', '0 0', '--user', 'test'])
218 full_cmd.extend(['--date', '0 0', '--user', 'test'])
215 elif command == 'merge':
219 elif command == 'merge':
216 # avoid conflicts by picking the local variant
220 # avoid conflicts by picking the local variant
217 full_cmd.extend(['--tool', ':merge-local'])
221 full_cmd.extend(['--tool', ':merge-local'])
218 full_cmd.extend(args)
222 full_cmd.extend(args)
219 env['HGRCPATH'] = ''
223 env['HGRCPATH'] = ''
220 return subprocess.check_call(full_cmd, env=env)
224 return subprocess.check_call(full_cmd, env=env)
221
225
222
226
223 def write_repo(path):
227 def write_repo(path):
224 """write repository content in memory"""
228 """write repository content in memory"""
225 repo = mercurial.hg.repository(
229 repo = mercurial.hg.repository(
226 mercurial.ui.ui.load(),
230 mercurial.ui.ui.load(),
227 path=path.encode('utf-8'),
231 path=path.encode('utf-8'),
228 )
232 )
229 nodemap = {None: repo.nodeconstants.nullid}
233 nodemap = {None: repo.nodeconstants.nullid}
230 with repo.lock(), repo.transaction(b'bundle-generation'):
234 with repo.lock(), repo.transaction(b'bundle-generation'):
231 for idx, (p1, p2) in GRAPH.items():
235 for idx, (p1, p2) in GRAPH.items():
232 if sys.stdout.isatty():
236 if sys.stdout.isatty():
233 print("generating commit #%d/%d" % (idx, NB_CHANGESET))
237 print("generating commit #%d/%d" % (idx, NB_CHANGESET))
234
238
235 file_fn = lambda repo, memctx, path: mercurial.context.memfilectx(
239 file_fn = lambda repo, memctx, path: mercurial.context.memfilectx(
236 repo,
240 repo,
237 memctx,
241 memctx,
238 path,
242 path,
239 data=b''.join(CONTENT.pop(idx)()),
243 data=b''.join(CONTENT.pop(idx)()),
240 )
244 )
241
245
242 mc = mercurial.context.memctx(
246 mc = mercurial.context.memctx(
243 repo,
247 repo,
244 (nodemap[p1], nodemap[p2]),
248 (nodemap[p1], nodemap[p2]),
245 b'commit #%d' % idx if idx else b'initial commit',
249 b'commit #%d' % idx if idx else b'initial commit',
246 [FILENAME.encode('ascii')],
250 [FILENAME.encode('ascii')],
247 file_fn,
251 file_fn,
248 user=b"test",
252 user=b"test",
249 date=(0, 0),
253 date=(0, 0),
250 )
254 )
251 nodemap[idx] = repo.commitctx(mc)
255 nodemap[idx] = repo.commitctx(mc)
252
256
253
257
254 def run(target):
258 def compute_md5(target):
259 with open(target, 'rb') as bundle:
260 data = bundle.read()
261 return hashlib.md5(data).hexdigest()
262
263
264 def write_md5(target, md5):
265 with open(target + '.md5', 'wb') as md5file:
266 md5file.write(md5.encode('ascii') + b'\n')
267
268
269 def read_md5(target):
270 with open(target + '.md5', 'rb') as md5file:
271 return md5file.read().strip().decode('ascii')
272
273
274 def up_to_date_target(target):
275 """return true if the file already exist at the right"""
276 try:
277 found = compute_md5(target)
278 expected = read_md5(target)
279 except OSError:
280 return False
281 return found == expected
282
283
284 def run(target, validate=False):
255 tmpdir = tempfile.mkdtemp(prefix='tmp-hg-test-big-file-bundle-')
285 tmpdir = tempfile.mkdtemp(prefix='tmp-hg-test-big-file-bundle-')
256 try:
286 try:
257 os.chdir(tmpdir)
287 os.chdir(tmpdir)
258 hg(
288 hg(
259 'init',
289 'init',
260 '--config',
290 '--config',
261 'format.maxchainlen=%d' % NB_CHANGESET,
291 'format.maxchainlen=%d' % NB_CHANGESET,
262 )
292 )
263 write_repo(tmpdir)
293 write_repo(tmpdir)
264 hg('bundle', '--all', target, '--config', 'devel.bundle.delta=p1')
294 hg('bundle', '--all', target, '--config', 'devel.bundle.delta=p1')
265 with open(target, 'rb') as bundle:
295 digest = compute_md5(target)
266 data = bundle.read()
296 if not validate:
267 digest = hashlib.md5(data).hexdigest()
297 write_md5(target, digest)
268 with open(target + '.md5', 'wb') as md5file:
298 else:
269 md5file.write(digest.encode('ascii') + b'\n')
299 expected = read_md5(target)
270 if sys.stdout.isatty():
300 if expected != digest:
271 print('bundle generated at "%s" md5: %s' % (target, digest))
301 msg = "bundle generated does not match the expected content\n"
272
302 msg += " expected: %s\n" % expected
303 msg += " got: %s" % digest
304 print(msg, file=sys.stderr)
305 return 1
273 finally:
306 finally:
274 shutil.rmtree(tmpdir)
307 shutil.rmtree(tmpdir)
275 return 0
308 return 0
276
309
277
310
278 if __name__ == '__main__':
311 if __name__ == '__main__':
279 orig = os.path.realpath(os.path.dirname(sys.argv[0]))
312 orig = os.path.realpath(os.path.dirname(sys.argv[0]))
280 target = os.path.join(orig, os.pardir, 'cache', BUNDLE_NAME)
313 target = os.path.join(orig, os.pardir, 'cache', BUNDLE_NAME)
281 sys.exit(run(target))
314 lazy = '--lazy' in sys.argv[1:]
315 validate = '--validate' in sys.argv[1:]
316 if lazy and up_to_date_target(target):
317 sys.exit(0)
318 sys.exit(run(target, validate=validate))
@@ -1,520 +1,524
1 ====================================
1 ====================================
2 Test delta choice with sparse revlog
2 Test delta choice with sparse revlog
3 ====================================
3 ====================================
4
4
5 Sparse-revlog usually shows the most gain on Manifest. However, it is simpler
5 Sparse-revlog usually shows the most gain on Manifest. However, it is simpler
6 to general an appropriate file, so we test with a single file instead. The
6 to general an appropriate file, so we test with a single file instead. The
7 goal is to observe intermediate snapshot being created.
7 goal is to observe intermediate snapshot being created.
8
8
9 We need a large enough file. Part of the content needs to be replaced
9 We need a large enough file. Part of the content needs to be replaced
10 repeatedly while some of it changes rarely.
10 repeatedly while some of it changes rarely.
11
11
12 $ bundlepath="$TESTDIR/artifacts/cache/big-file-churn.hg"
12 $ bundlepath="$TESTDIR/artifacts/cache/big-file-churn.hg"
13
13
14 #if pure
14 $ expectedhash=`cat "$bundlepath".md5`
15 $ expectedhash=`cat "$bundlepath".md5`
15
16 #if slow
17
18 $ if [ ! -f "$bundlepath" ]; then
19 > "$TESTDIR"/artifacts/scripts/generate-churning-bundle.py > /dev/null
20 > fi
21
22 #else
23
24 $ if [ ! -f "$bundlepath" ]; then
16 $ if [ ! -f "$bundlepath" ]; then
25 > echo 'skipped: missing artifact, run "'"$TESTDIR"'/artifacts/scripts/generate-churning-bundle.py"'
17 > echo 'skipped: missing artifact, run "'"$TESTDIR"'/artifacts/scripts/generate-churning-bundle.py"'
26 > exit 80
18 > exit 80
27 > fi
19 > fi
28
29 #endif
30
31 $ currenthash=`f -M "$bundlepath" | cut -d = -f 2`
20 $ currenthash=`f -M "$bundlepath" | cut -d = -f 2`
32 $ if [ "$currenthash" != "$expectedhash" ]; then
21 $ if [ "$currenthash" != "$expectedhash" ]; then
33 > echo 'skipped: outdated artifact, md5 "'"$currenthash"'" expected "'"$expectedhash"'" run "'"$TESTDIR"'/artifacts/scripts/generate-churning-bundle.py"'
22 > echo 'skipped: outdated artifact, md5 "'"$currenthash"'" expected "'"$expectedhash"'" run "'"$TESTDIR"'/artifacts/scripts/generate-churning-bundle.py"'
34 > exit 80
23 > exit 80
35 > fi
24 > fi
25 #else
26
27 #if slow
28 $ LAZY_GEN=""
29
30 #else
31 $ LAZY_GEN="--lazy"
32 #endif
33
34 #endif
35
36 If the validation fails, either something is broken or the expected md5 need updating.
37 To update the md5, invoke the script without --validate
38
39 $ "$TESTDIR"/artifacts/scripts/generate-churning-bundle.py --validate $LAZY_GEN > /dev/null
36
40
37 $ cat >> $HGRCPATH << EOF
41 $ cat >> $HGRCPATH << EOF
38 > [format]
42 > [format]
39 > sparse-revlog = yes
43 > sparse-revlog = yes
40 > maxchainlen = 15
44 > maxchainlen = 15
41 > revlog-compression=zlib
45 > revlog-compression=zlib
42 > [storage]
46 > [storage]
43 > revlog.optimize-delta-parent-choice = yes
47 > revlog.optimize-delta-parent-choice = yes
44 > revlog.reuse-external-delta-parent = no
48 > revlog.reuse-external-delta-parent = no
45 > revlog.reuse-external-delta = no
49 > revlog.reuse-external-delta = no
46 > EOF
50 > EOF
47 $ hg init sparse-repo
51 $ hg init sparse-repo
48 $ cd sparse-repo
52 $ cd sparse-repo
49 $ hg unbundle $bundlepath
53 $ hg unbundle $bundlepath
50 adding changesets
54 adding changesets
51 adding manifests
55 adding manifests
52 adding file changes
56 adding file changes
53 added 5001 changesets with 5001 changes to 1 files (+89 heads)
57 added 5001 changesets with 5001 changes to 1 files (+89 heads)
54 new changesets 9706f5af64f4:e4eee5e41c37 (5001 drafts)
58 new changesets 9706f5af64f4:e4eee5e41c37 (5001 drafts)
55 (run 'hg heads' to see heads, 'hg merge' to merge)
59 (run 'hg heads' to see heads, 'hg merge' to merge)
56 $ hg up
60 $ hg up
57 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
61 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
58 updated to "e4eee5e41c37: commit #5000"
62 updated to "e4eee5e41c37: commit #5000"
59 89 other heads for branch "default"
63 89 other heads for branch "default"
60
64
61 Sanity check the graph shape
65 Sanity check the graph shape
62
66
63 $ hg log -T '{rev} {p1rev} {p2rev}\n' --rev '0:100'
67 $ hg log -T '{rev} {p1rev} {p2rev}\n' --rev '0:100'
64 0 -1 -1
68 0 -1 -1
65 1 0 -1
69 1 0 -1
66 2 1 -1
70 2 1 -1
67 3 2 -1
71 3 2 -1
68 4 3 -1
72 4 3 -1
69 5 4 -1
73 5 4 -1
70 6 5 -1
74 6 5 -1
71 7 1 -1
75 7 1 -1
72 8 7 6
76 8 7 6
73 9 8 -1
77 9 8 -1
74 10 9 -1
78 10 9 -1
75 11 10 -1
79 11 10 -1
76 12 11 -1
80 12 11 -1
77 13 12 -1
81 13 12 -1
78 14 1 -1
82 14 1 -1
79 15 14 -1
83 15 14 -1
80 16 15 13
84 16 15 13
81 17 16 -1
85 17 16 -1
82 18 17 -1
86 18 17 -1
83 19 18 -1
87 19 18 -1
84 20 19 -1
88 20 19 -1
85 21 16 -1
89 21 16 -1
86 22 21 -1
90 22 21 -1
87 23 22 -1
91 23 22 -1
88 24 23 20
92 24 23 20
89 25 24 -1
93 25 24 -1
90 26 25 -1
94 26 25 -1
91 27 26 -1
95 27 26 -1
92 28 21 -1
96 28 21 -1
93 29 28 -1
97 29 28 -1
94 30 29 -1
98 30 29 -1
95 31 30 -1
99 31 30 -1
96 32 31 27
100 32 31 27
97 33 32 -1
101 33 32 -1
98 34 33 -1
102 34 33 -1
99 35 31 -1
103 35 31 -1
100 36 35 -1
104 36 35 -1
101 37 36 -1
105 37 36 -1
102 38 37 -1
106 38 37 -1
103 39 38 -1
107 39 38 -1
104 40 39 34
108 40 39 34
105 41 40 -1
109 41 40 -1
106 42 36 -1
110 42 36 -1
107 43 42 -1
111 43 42 -1
108 44 43 -1
112 44 43 -1
109 45 44 -1
113 45 44 -1
110 46 45 -1
114 46 45 -1
111 47 46 -1
115 47 46 -1
112 48 47 41
116 48 47 41
113 49 36 -1
117 49 36 -1
114 50 49 -1
118 50 49 -1
115 51 50 -1
119 51 50 -1
116 52 51 -1
120 52 51 -1
117 53 52 -1
121 53 52 -1
118 54 53 -1
122 54 53 -1
119 55 54 -1
123 55 54 -1
120 56 51 48
124 56 51 48
121 57 56 -1
125 57 56 -1
122 58 57 -1
126 58 57 -1
123 59 58 -1
127 59 58 -1
124 60 59 -1
128 60 59 -1
125 61 60 -1
129 61 60 -1
126 62 61 -1
130 62 61 -1
127 63 56 -1
131 63 56 -1
128 64 63 55
132 64 63 55
129 65 64 -1
133 65 64 -1
130 66 65 -1
134 66 65 -1
131 67 66 -1
135 67 66 -1
132 68 67 -1
136 68 67 -1
133 69 68 -1
137 69 68 -1
134 70 66 -1
138 70 66 -1
135 71 70 -1
139 71 70 -1
136 72 71 62
140 72 71 62
137 73 72 -1
141 73 72 -1
138 74 73 -1
142 74 73 -1
139 75 74 -1
143 75 74 -1
140 76 75 -1
144 76 75 -1
141 77 71 -1
145 77 71 -1
142 78 77 -1
146 78 77 -1
143 79 78 -1
147 79 78 -1
144 80 79 69
148 80 79 69
145 81 80 -1
149 81 80 -1
146 82 81 -1
150 82 81 -1
147 83 82 -1
151 83 82 -1
148 84 71 -1
152 84 71 -1
149 85 84 -1
153 85 84 -1
150 86 85 -1
154 86 85 -1
151 87 86 -1
155 87 86 -1
152 88 87 76
156 88 87 76
153 89 88 -1
157 89 88 -1
154 90 89 -1
158 90 89 -1
155 91 86 -1
159 91 86 -1
156 92 91 -1
160 92 91 -1
157 93 92 -1
161 93 92 -1
158 94 93 -1
162 94 93 -1
159 95 94 -1
163 95 94 -1
160 96 95 83
164 96 95 83
161 97 96 -1
165 97 96 -1
162 98 91 -1
166 98 91 -1
163 99 98 -1
167 99 98 -1
164 100 99 -1
168 100 99 -1
165
169
166 sanity check the change pattern
170 sanity check the change pattern
167
171
168 $ hg log --stat -r 0:3
172 $ hg log --stat -r 0:3
169 changeset: 0:9706f5af64f4
173 changeset: 0:9706f5af64f4
170 user: test
174 user: test
171 date: Thu Jan 01 00:00:00 1970 +0000
175 date: Thu Jan 01 00:00:00 1970 +0000
172 summary: initial commit
176 summary: initial commit
173
177
174 SPARSE-REVLOG-TEST-FILE | 10500 ++++++++++++++++++++++++++++++++++++++++++++++
178 SPARSE-REVLOG-TEST-FILE | 10500 ++++++++++++++++++++++++++++++++++++++++++++++
175 1 files changed, 10500 insertions(+), 0 deletions(-)
179 1 files changed, 10500 insertions(+), 0 deletions(-)
176
180
177 changeset: 1:724907deaa5e
181 changeset: 1:724907deaa5e
178 user: test
182 user: test
179 date: Thu Jan 01 00:00:00 1970 +0000
183 date: Thu Jan 01 00:00:00 1970 +0000
180 summary: commit #1
184 summary: commit #1
181
185
182 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
186 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
183 1 files changed, 534 insertions(+), 534 deletions(-)
187 1 files changed, 534 insertions(+), 534 deletions(-)
184
188
185 changeset: 2:62c41bce3e5d
189 changeset: 2:62c41bce3e5d
186 user: test
190 user: test
187 date: Thu Jan 01 00:00:00 1970 +0000
191 date: Thu Jan 01 00:00:00 1970 +0000
188 summary: commit #2
192 summary: commit #2
189
193
190 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
194 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
191 1 files changed, 534 insertions(+), 534 deletions(-)
195 1 files changed, 534 insertions(+), 534 deletions(-)
192
196
193 changeset: 3:348a9cbd6959
197 changeset: 3:348a9cbd6959
194 user: test
198 user: test
195 date: Thu Jan 01 00:00:00 1970 +0000
199 date: Thu Jan 01 00:00:00 1970 +0000
196 summary: commit #3
200 summary: commit #3
197
201
198 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
202 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
199 1 files changed, 534 insertions(+), 534 deletions(-)
203 1 files changed, 534 insertions(+), 534 deletions(-)
200
204
201
205
202 $ f -s .hg/store/data/*.d
206 $ f -s .hg/store/data/*.d
203 .hg/store/data/_s_p_a_r_s_e-_r_e_v_l_o_g-_t_e_s_t-_f_i_l_e.d: size=81370673
207 .hg/store/data/_s_p_a_r_s_e-_r_e_v_l_o_g-_t_e_s_t-_f_i_l_e.d: size=81370673
204 $ hg debugrevlog *
208 $ hg debugrevlog *
205 format : 1
209 format : 1
206 flags : generaldelta
210 flags : generaldelta
207
211
208 revisions : 5001
212 revisions : 5001
209 merges : 625 (12.50%)
213 merges : 625 (12.50%)
210 normal : 4376 (87.50%)
214 normal : 4376 (87.50%)
211 revisions : 5001
215 revisions : 5001
212 empty : 0 ( 0.00%)
216 empty : 0 ( 0.00%)
213 text : 0 (100.00%)
217 text : 0 (100.00%)
214 delta : 0 (100.00%)
218 delta : 0 (100.00%)
215 snapshot : 360 ( 7.20%)
219 snapshot : 360 ( 7.20%)
216 lvl-0 : 11 ( 0.22%)
220 lvl-0 : 11 ( 0.22%)
217 lvl-1 : 50 ( 1.00%) non-ancestor-bases: 41 (82.00%)
221 lvl-1 : 50 ( 1.00%) non-ancestor-bases: 41 (82.00%)
218 lvl-2 : 128 ( 2.56%) non-ancestor-bases: 119 (92.97%)
222 lvl-2 : 128 ( 2.56%) non-ancestor-bases: 119 (92.97%)
219 lvl-3 : 122 ( 2.44%) non-ancestor-bases: 111 (90.98%)
223 lvl-3 : 122 ( 2.44%) non-ancestor-bases: 111 (90.98%)
220 lvl-4 : 49 ( 0.98%) non-ancestor-bases: 46 (93.88%)
224 lvl-4 : 49 ( 0.98%) non-ancestor-bases: 46 (93.88%)
221 deltas : 4641 (92.80%)
225 deltas : 4641 (92.80%)
222 revision size : 81370673
226 revision size : 81370673
223 snapshot : 16282100 (20.01%)
227 snapshot : 16282100 (20.01%)
224 lvl-0 : 2188012 ( 2.69%)
228 lvl-0 : 2188012 ( 2.69%)
225 lvl-1 : 4848143 ( 5.96%)
229 lvl-1 : 4848143 ( 5.96%)
226 lvl-2 : 5366175 ( 6.59%)
230 lvl-2 : 5366175 ( 6.59%)
227 lvl-3 : 3085157 ( 3.79%)
231 lvl-3 : 3085157 ( 3.79%)
228 lvl-4 : 794613 ( 0.98%)
232 lvl-4 : 794613 ( 0.98%)
229 deltas : 65088573 (79.99%)
233 deltas : 65088573 (79.99%)
230
234
231 chunks : 5001
235 chunks : 5001
232 0x78 (x) : 5001 (100.00%)
236 0x78 (x) : 5001 (100.00%)
233 chunks size : 81370673
237 chunks size : 81370673
234 0x78 (x) : 81370673 (100.00%)
238 0x78 (x) : 81370673 (100.00%)
235
239
236
240
237 total-stored-content: 1 717 863 086 bytes
241 total-stored-content: 1 717 863 086 bytes
238
242
239 avg chain length : 8
243 avg chain length : 8
240 max chain length : 15
244 max chain length : 15
241 max chain reach : 18326506
245 max chain reach : 18326506
242 compression ratio : 21
246 compression ratio : 21
243
247
244 uncompressed data size (min/max/avg) : 339930 / 346471 / 343503
248 uncompressed data size (min/max/avg) : 339930 / 346471 / 343503
245 full revision size (min/max/avg) : 196682 / 201129 / 198910
249 full revision size (min/max/avg) : 196682 / 201129 / 198910
246 inter-snapshot size (min/max/avg) : 11620 / 172223 / 40384
250 inter-snapshot size (min/max/avg) : 11620 / 172223 / 40384
247 level-1 (min/max/avg) : 14329 / 172223 / 96962
251 level-1 (min/max/avg) : 14329 / 172223 / 96962
248 level-2 (min/max/avg) : 11664 / 86421 / 41923
252 level-2 (min/max/avg) : 11664 / 86421 / 41923
249 level-3 (min/max/avg) : 11620 / 42674 / 25288
253 level-3 (min/max/avg) : 11620 / 42674 / 25288
250 level-4 (min/max/avg) : 11631 / 21209 / 16216
254 level-4 (min/max/avg) : 11631 / 21209 / 16216
251 delta size (min/max/avg) : 10610 / 190651 / 14024
255 delta size (min/max/avg) : 10610 / 190651 / 14024
252
256
253 deltas against prev : 3916 (84.38%)
257 deltas against prev : 3916 (84.38%)
254 where prev = p1 : 3916 (100.00%)
258 where prev = p1 : 3916 (100.00%)
255 where prev = p2 : 0 ( 0.00%)
259 where prev = p2 : 0 ( 0.00%)
256 other-ancestor : 0 ( 0.00%)
260 other-ancestor : 0 ( 0.00%)
257 unrelated : 0 ( 0.00%)
261 unrelated : 0 ( 0.00%)
258 deltas against p1 : 667 (14.37%)
262 deltas against p1 : 667 (14.37%)
259 deltas against p2 : 58 ( 1.25%)
263 deltas against p2 : 58 ( 1.25%)
260 deltas against ancs : 0 ( 0.00%)
264 deltas against ancs : 0 ( 0.00%)
261 deltas against other : 0 ( 0.00%)
265 deltas against other : 0 ( 0.00%)
262
266
263
267
264 Test `debug-delta-find`
268 Test `debug-delta-find`
265 -----------------------
269 -----------------------
266
270
267 $ ls -1
271 $ ls -1
268 SPARSE-REVLOG-TEST-FILE
272 SPARSE-REVLOG-TEST-FILE
269 $ hg debugdeltachain SPARSE-REVLOG-TEST-FILE | grep snap | tail -1
273 $ hg debugdeltachain SPARSE-REVLOG-TEST-FILE | grep snap | tail -1
270 4996 4995 -1 11 3 4947 snap
274 4996 4995 -1 11 3 4947 snap
271 $ LAST_SNAP=`hg debugdeltachain SPARSE-REVLOG-TEST-FILE | grep snap | tail -1| sed 's/^ \+//'| cut -d ' ' -f 1`
275 $ LAST_SNAP=`hg debugdeltachain SPARSE-REVLOG-TEST-FILE | grep snap | tail -1| sed 's/^ \+//'| cut -d ' ' -f 1`
272 $ echo Last Snapshot: $LAST_SNAP
276 $ echo Last Snapshot: $LAST_SNAP
273 Last Snapshot: 4996
277 Last Snapshot: 4996
274 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE $LAST_SNAP
278 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE $LAST_SNAP
275 DBG-DELTAS-SEARCH: SEARCH rev=4996
279 DBG-DELTAS-SEARCH: SEARCH rev=4996
276 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
280 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
277 DBG-DELTAS-SEARCH: CANDIDATE: rev=4964
281 DBG-DELTAS-SEARCH: CANDIDATE: rev=4964
278 DBG-DELTAS-SEARCH: type=snapshot-3
282 DBG-DELTAS-SEARCH: type=snapshot-3
279 DBG-DELTAS-SEARCH: size=15153
283 DBG-DELTAS-SEARCH: size=15153
280 DBG-DELTAS-SEARCH: base=4958
284 DBG-DELTAS-SEARCH: base=4958
281 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
285 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
282 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
286 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
283 DBG-DELTAS-SEARCH: DELTA: length=36297 (BAD)
287 DBG-DELTAS-SEARCH: DELTA: length=36297 (BAD)
284 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
288 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
285 DBG-DELTAS-SEARCH: CANDIDATE: rev=4958
289 DBG-DELTAS-SEARCH: CANDIDATE: rev=4958
286 DBG-DELTAS-SEARCH: type=snapshot-2
290 DBG-DELTAS-SEARCH: type=snapshot-2
287 DBG-DELTAS-SEARCH: size=30977
291 DBG-DELTAS-SEARCH: size=30977
288 DBG-DELTAS-SEARCH: base=4947
292 DBG-DELTAS-SEARCH: base=4947
289 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
293 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
290 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
294 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
291 DBG-DELTAS-SEARCH: DELTA: length=36578 (BAD)
295 DBG-DELTAS-SEARCH: DELTA: length=36578 (BAD)
292 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - search-down
296 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - search-down
293 DBG-DELTAS-SEARCH: CANDIDATE: rev=4947
297 DBG-DELTAS-SEARCH: CANDIDATE: rev=4947
294 DBG-DELTAS-SEARCH: type=snapshot-1
298 DBG-DELTAS-SEARCH: type=snapshot-1
295 DBG-DELTAS-SEARCH: size=164878
299 DBG-DELTAS-SEARCH: size=164878
296 DBG-DELTAS-SEARCH: base=4667
300 DBG-DELTAS-SEARCH: base=4667
297 DBG-DELTAS-SEARCH: uncompressed-delta-size=87938
301 DBG-DELTAS-SEARCH: uncompressed-delta-size=87938
298 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
302 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
299 DBG-DELTAS-SEARCH: DELTA: length=52101 (GOOD)
303 DBG-DELTAS-SEARCH: DELTA: length=52101 (GOOD)
300 DBG-DELTAS-SEARCH: ROUND #4 - 1 candidates - refine-down
304 DBG-DELTAS-SEARCH: ROUND #4 - 1 candidates - refine-down
301 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
305 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
302 DBG-DELTAS-SEARCH: CANDIDATE: rev=4667
306 DBG-DELTAS-SEARCH: CANDIDATE: rev=4667
303 DBG-DELTAS-SEARCH: type=snapshot-0
307 DBG-DELTAS-SEARCH: type=snapshot-0
304 DBG-DELTAS-SEARCH: size=196699
308 DBG-DELTAS-SEARCH: size=196699
305 DBG-DELTAS-SEARCH: base=-1
309 DBG-DELTAS-SEARCH: base=-1
306 DBG-DELTAS-SEARCH: uncompressed-delta-size=281309
310 DBG-DELTAS-SEARCH: uncompressed-delta-size=281309
307 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
311 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
308 DBG-DELTAS-SEARCH: DELTA: length=165408 (GOOD)
312 DBG-DELTAS-SEARCH: DELTA: length=165408 (GOOD)
309 DBG-DELTAS-SEARCH: ROUND #5 - 1 candidates - refine-up
313 DBG-DELTAS-SEARCH: ROUND #5 - 1 candidates - refine-up
310 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
314 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
311 DBG-DELTAS-SEARCH: CANDIDATE: rev=4954
315 DBG-DELTAS-SEARCH: CANDIDATE: rev=4954
312 DBG-DELTAS-SEARCH: type=snapshot-2
316 DBG-DELTAS-SEARCH: type=snapshot-2
313 DBG-DELTAS-SEARCH: size=58198
317 DBG-DELTAS-SEARCH: size=58198
314 DBG-DELTAS-SEARCH: base=4947
318 DBG-DELTAS-SEARCH: base=4947
315 DBG-DELTAS-SEARCH: uncompressed-delta-size=92195
319 DBG-DELTAS-SEARCH: uncompressed-delta-size=92195
316 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
320 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
317 DBG-DELTAS-SEARCH: DELTA: length=54601 (BAD)
321 DBG-DELTAS-SEARCH: DELTA: length=54601 (BAD)
318 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4996: delta-base=4947 is-cached=0 - search-rounds=5 try-count=5 - delta-type=snapshot snap-depth=2 - p1-chain-length=15 p2-chain-length=-1 - duration=*.?????? (glob)
322 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4996: delta-base=4947 is-cached=0 - search-rounds=5 try-count=5 - delta-type=snapshot snap-depth=2 - p1-chain-length=15 p2-chain-length=-1 - duration=*.?????? (glob)
319
323
320 $ cat << EOF >>.hg/hgrc
324 $ cat << EOF >>.hg/hgrc
321 > [storage]
325 > [storage]
322 > revlog.optimize-delta-parent-choice = no
326 > revlog.optimize-delta-parent-choice = no
323 > revlog.reuse-external-delta = yes
327 > revlog.reuse-external-delta = yes
324 > EOF
328 > EOF
325
329
326 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE $LAST_SNAP --quiet
330 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE $LAST_SNAP --quiet
327 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4996: delta-base=4947 is-cached=0 - search-rounds=5 try-count=5 - delta-type=snapshot snap-depth=2 - p1-chain-length=15 p2-chain-length=-1 - duration=*.?????? (glob)
331 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4996: delta-base=4947 is-cached=0 - search-rounds=5 try-count=5 - delta-type=snapshot snap-depth=2 - p1-chain-length=15 p2-chain-length=-1 - duration=*.?????? (glob)
328 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE $LAST_SNAP --source full
332 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE $LAST_SNAP --source full
329 DBG-DELTAS-SEARCH: SEARCH rev=4996
333 DBG-DELTAS-SEARCH: SEARCH rev=4996
330 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
334 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
331 DBG-DELTAS-SEARCH: CANDIDATE: rev=4964
335 DBG-DELTAS-SEARCH: CANDIDATE: rev=4964
332 DBG-DELTAS-SEARCH: type=snapshot-3
336 DBG-DELTAS-SEARCH: type=snapshot-3
333 DBG-DELTAS-SEARCH: size=15153
337 DBG-DELTAS-SEARCH: size=15153
334 DBG-DELTAS-SEARCH: base=4958
338 DBG-DELTAS-SEARCH: base=4958
335 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
339 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
336 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
340 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
337 DBG-DELTAS-SEARCH: DELTA: length=36297 (BAD)
341 DBG-DELTAS-SEARCH: DELTA: length=36297 (BAD)
338 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
342 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
339 DBG-DELTAS-SEARCH: CANDIDATE: rev=4958
343 DBG-DELTAS-SEARCH: CANDIDATE: rev=4958
340 DBG-DELTAS-SEARCH: type=snapshot-2
344 DBG-DELTAS-SEARCH: type=snapshot-2
341 DBG-DELTAS-SEARCH: size=30977
345 DBG-DELTAS-SEARCH: size=30977
342 DBG-DELTAS-SEARCH: base=4947
346 DBG-DELTAS-SEARCH: base=4947
343 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
347 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
344 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
348 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
345 DBG-DELTAS-SEARCH: DELTA: length=36578 (BAD)
349 DBG-DELTAS-SEARCH: DELTA: length=36578 (BAD)
346 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - search-down
350 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - search-down
347 DBG-DELTAS-SEARCH: CANDIDATE: rev=4947
351 DBG-DELTAS-SEARCH: CANDIDATE: rev=4947
348 DBG-DELTAS-SEARCH: type=snapshot-1
352 DBG-DELTAS-SEARCH: type=snapshot-1
349 DBG-DELTAS-SEARCH: size=164878
353 DBG-DELTAS-SEARCH: size=164878
350 DBG-DELTAS-SEARCH: base=4667
354 DBG-DELTAS-SEARCH: base=4667
351 DBG-DELTAS-SEARCH: uncompressed-delta-size=87938
355 DBG-DELTAS-SEARCH: uncompressed-delta-size=87938
352 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
356 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
353 DBG-DELTAS-SEARCH: DELTA: length=52101 (GOOD)
357 DBG-DELTAS-SEARCH: DELTA: length=52101 (GOOD)
354 DBG-DELTAS-SEARCH: ROUND #4 - 1 candidates - refine-down
358 DBG-DELTAS-SEARCH: ROUND #4 - 1 candidates - refine-down
355 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
359 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
356 DBG-DELTAS-SEARCH: CANDIDATE: rev=4667
360 DBG-DELTAS-SEARCH: CANDIDATE: rev=4667
357 DBG-DELTAS-SEARCH: type=snapshot-0
361 DBG-DELTAS-SEARCH: type=snapshot-0
358 DBG-DELTAS-SEARCH: size=196699
362 DBG-DELTAS-SEARCH: size=196699
359 DBG-DELTAS-SEARCH: base=-1
363 DBG-DELTAS-SEARCH: base=-1
360 DBG-DELTAS-SEARCH: uncompressed-delta-size=281309
364 DBG-DELTAS-SEARCH: uncompressed-delta-size=281309
361 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
365 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
362 DBG-DELTAS-SEARCH: DELTA: length=165408 (GOOD)
366 DBG-DELTAS-SEARCH: DELTA: length=165408 (GOOD)
363 DBG-DELTAS-SEARCH: ROUND #5 - 1 candidates - refine-up
367 DBG-DELTAS-SEARCH: ROUND #5 - 1 candidates - refine-up
364 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
368 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
365 DBG-DELTAS-SEARCH: CANDIDATE: rev=4954
369 DBG-DELTAS-SEARCH: CANDIDATE: rev=4954
366 DBG-DELTAS-SEARCH: type=snapshot-2
370 DBG-DELTAS-SEARCH: type=snapshot-2
367 DBG-DELTAS-SEARCH: size=58198
371 DBG-DELTAS-SEARCH: size=58198
368 DBG-DELTAS-SEARCH: base=4947
372 DBG-DELTAS-SEARCH: base=4947
369 DBG-DELTAS-SEARCH: uncompressed-delta-size=92195
373 DBG-DELTAS-SEARCH: uncompressed-delta-size=92195
370 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
374 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
371 DBG-DELTAS-SEARCH: DELTA: length=54601 (BAD)
375 DBG-DELTAS-SEARCH: DELTA: length=54601 (BAD)
372 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4996: delta-base=4947 is-cached=0 - search-rounds=5 try-count=5 - delta-type=snapshot snap-depth=2 - p1-chain-length=15 p2-chain-length=-1 - duration=*.?????? (glob)
376 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4996: delta-base=4947 is-cached=0 - search-rounds=5 try-count=5 - delta-type=snapshot snap-depth=2 - p1-chain-length=15 p2-chain-length=-1 - duration=*.?????? (glob)
373 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE $LAST_SNAP --source storage
377 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE $LAST_SNAP --source storage
374 DBG-DELTAS-SEARCH: SEARCH rev=4996
378 DBG-DELTAS-SEARCH: SEARCH rev=4996
375 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - cached-delta
379 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - cached-delta
376 DBG-DELTAS-SEARCH: CANDIDATE: rev=4947
380 DBG-DELTAS-SEARCH: CANDIDATE: rev=4947
377 DBG-DELTAS-SEARCH: type=snapshot-1
381 DBG-DELTAS-SEARCH: type=snapshot-1
378 DBG-DELTAS-SEARCH: size=164878
382 DBG-DELTAS-SEARCH: size=164878
379 DBG-DELTAS-SEARCH: base=4667
383 DBG-DELTAS-SEARCH: base=4667
380 DBG-DELTAS-SEARCH: uncompressed-delta-size=87938
384 DBG-DELTAS-SEARCH: uncompressed-delta-size=87938
381 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
385 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
382 DBG-DELTAS-SEARCH: DELTA: length=52101 (GOOD)
386 DBG-DELTAS-SEARCH: DELTA: length=52101 (GOOD)
383 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4996: delta-base=4947 is-cached=1 - search-rounds=1 try-count=1 - delta-type=delta snap-depth=-1 - p1-chain-length=15 p2-chain-length=-1 - duration=*.?????? (glob)
387 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4996: delta-base=4947 is-cached=1 - search-rounds=1 try-count=1 - delta-type=delta snap-depth=-1 - p1-chain-length=15 p2-chain-length=-1 - duration=*.?????? (glob)
384 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE $LAST_SNAP --source p1
388 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE $LAST_SNAP --source p1
385 DBG-DELTAS-SEARCH: SEARCH rev=4996
389 DBG-DELTAS-SEARCH: SEARCH rev=4996
386 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
390 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
387 DBG-DELTAS-SEARCH: CANDIDATE: rev=4964
391 DBG-DELTAS-SEARCH: CANDIDATE: rev=4964
388 DBG-DELTAS-SEARCH: type=snapshot-3
392 DBG-DELTAS-SEARCH: type=snapshot-3
389 DBG-DELTAS-SEARCH: size=15153
393 DBG-DELTAS-SEARCH: size=15153
390 DBG-DELTAS-SEARCH: base=4958
394 DBG-DELTAS-SEARCH: base=4958
391 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
395 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
392 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
396 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
393 DBG-DELTAS-SEARCH: DELTA: length=36297 (BAD)
397 DBG-DELTAS-SEARCH: DELTA: length=36297 (BAD)
394 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
398 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
395 DBG-DELTAS-SEARCH: CANDIDATE: rev=4958
399 DBG-DELTAS-SEARCH: CANDIDATE: rev=4958
396 DBG-DELTAS-SEARCH: type=snapshot-2
400 DBG-DELTAS-SEARCH: type=snapshot-2
397 DBG-DELTAS-SEARCH: size=30977
401 DBG-DELTAS-SEARCH: size=30977
398 DBG-DELTAS-SEARCH: base=4947
402 DBG-DELTAS-SEARCH: base=4947
399 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
403 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
400 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
404 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
401 DBG-DELTAS-SEARCH: DELTA: length=36578 (BAD)
405 DBG-DELTAS-SEARCH: DELTA: length=36578 (BAD)
402 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - search-down
406 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - search-down
403 DBG-DELTAS-SEARCH: CANDIDATE: rev=4947
407 DBG-DELTAS-SEARCH: CANDIDATE: rev=4947
404 DBG-DELTAS-SEARCH: type=snapshot-1
408 DBG-DELTAS-SEARCH: type=snapshot-1
405 DBG-DELTAS-SEARCH: size=164878
409 DBG-DELTAS-SEARCH: size=164878
406 DBG-DELTAS-SEARCH: base=4667
410 DBG-DELTAS-SEARCH: base=4667
407 DBG-DELTAS-SEARCH: uncompressed-delta-size=87938
411 DBG-DELTAS-SEARCH: uncompressed-delta-size=87938
408 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
412 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
409 DBG-DELTAS-SEARCH: DELTA: length=52101 (GOOD)
413 DBG-DELTAS-SEARCH: DELTA: length=52101 (GOOD)
410 DBG-DELTAS-SEARCH: ROUND #4 - 1 candidates - refine-down
414 DBG-DELTAS-SEARCH: ROUND #4 - 1 candidates - refine-down
411 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
415 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
412 DBG-DELTAS-SEARCH: CANDIDATE: rev=4667
416 DBG-DELTAS-SEARCH: CANDIDATE: rev=4667
413 DBG-DELTAS-SEARCH: type=snapshot-0
417 DBG-DELTAS-SEARCH: type=snapshot-0
414 DBG-DELTAS-SEARCH: size=196699
418 DBG-DELTAS-SEARCH: size=196699
415 DBG-DELTAS-SEARCH: base=-1
419 DBG-DELTAS-SEARCH: base=-1
416 DBG-DELTAS-SEARCH: uncompressed-delta-size=281309
420 DBG-DELTAS-SEARCH: uncompressed-delta-size=281309
417 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
421 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
418 DBG-DELTAS-SEARCH: DELTA: length=165408 (GOOD)
422 DBG-DELTAS-SEARCH: DELTA: length=165408 (GOOD)
419 DBG-DELTAS-SEARCH: ROUND #5 - 1 candidates - refine-up
423 DBG-DELTAS-SEARCH: ROUND #5 - 1 candidates - refine-up
420 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
424 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
421 DBG-DELTAS-SEARCH: CANDIDATE: rev=4954
425 DBG-DELTAS-SEARCH: CANDIDATE: rev=4954
422 DBG-DELTAS-SEARCH: type=snapshot-2
426 DBG-DELTAS-SEARCH: type=snapshot-2
423 DBG-DELTAS-SEARCH: size=58198
427 DBG-DELTAS-SEARCH: size=58198
424 DBG-DELTAS-SEARCH: base=4947
428 DBG-DELTAS-SEARCH: base=4947
425 DBG-DELTAS-SEARCH: uncompressed-delta-size=92195
429 DBG-DELTAS-SEARCH: uncompressed-delta-size=92195
426 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
430 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
427 DBG-DELTAS-SEARCH: DELTA: length=54601 (BAD)
431 DBG-DELTAS-SEARCH: DELTA: length=54601 (BAD)
428 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4996: delta-base=4947 is-cached=0 - search-rounds=5 try-count=5 - delta-type=snapshot snap-depth=2 - p1-chain-length=15 p2-chain-length=-1 - duration=*.?????? (glob)
432 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4996: delta-base=4947 is-cached=0 - search-rounds=5 try-count=5 - delta-type=snapshot snap-depth=2 - p1-chain-length=15 p2-chain-length=-1 - duration=*.?????? (glob)
429 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE $LAST_SNAP --source p2
433 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE $LAST_SNAP --source p2
430 DBG-DELTAS-SEARCH: SEARCH rev=4996
434 DBG-DELTAS-SEARCH: SEARCH rev=4996
431 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
435 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
432 DBG-DELTAS-SEARCH: CANDIDATE: rev=4964
436 DBG-DELTAS-SEARCH: CANDIDATE: rev=4964
433 DBG-DELTAS-SEARCH: type=snapshot-3
437 DBG-DELTAS-SEARCH: type=snapshot-3
434 DBG-DELTAS-SEARCH: size=15153
438 DBG-DELTAS-SEARCH: size=15153
435 DBG-DELTAS-SEARCH: base=4958
439 DBG-DELTAS-SEARCH: base=4958
436 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
440 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
437 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
441 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
438 DBG-DELTAS-SEARCH: DELTA: length=36297 (BAD)
442 DBG-DELTAS-SEARCH: DELTA: length=36297 (BAD)
439 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
443 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
440 DBG-DELTAS-SEARCH: CANDIDATE: rev=4958
444 DBG-DELTAS-SEARCH: CANDIDATE: rev=4958
441 DBG-DELTAS-SEARCH: type=snapshot-2
445 DBG-DELTAS-SEARCH: type=snapshot-2
442 DBG-DELTAS-SEARCH: size=30977
446 DBG-DELTAS-SEARCH: size=30977
443 DBG-DELTAS-SEARCH: base=4947
447 DBG-DELTAS-SEARCH: base=4947
444 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
448 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
445 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
449 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
446 DBG-DELTAS-SEARCH: DELTA: length=36578 (BAD)
450 DBG-DELTAS-SEARCH: DELTA: length=36578 (BAD)
447 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - search-down
451 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - search-down
448 DBG-DELTAS-SEARCH: CANDIDATE: rev=4947
452 DBG-DELTAS-SEARCH: CANDIDATE: rev=4947
449 DBG-DELTAS-SEARCH: type=snapshot-1
453 DBG-DELTAS-SEARCH: type=snapshot-1
450 DBG-DELTAS-SEARCH: size=164878
454 DBG-DELTAS-SEARCH: size=164878
451 DBG-DELTAS-SEARCH: base=4667
455 DBG-DELTAS-SEARCH: base=4667
452 DBG-DELTAS-SEARCH: uncompressed-delta-size=87938
456 DBG-DELTAS-SEARCH: uncompressed-delta-size=87938
453 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
457 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
454 DBG-DELTAS-SEARCH: DELTA: length=52101 (GOOD)
458 DBG-DELTAS-SEARCH: DELTA: length=52101 (GOOD)
455 DBG-DELTAS-SEARCH: ROUND #4 - 1 candidates - refine-down
459 DBG-DELTAS-SEARCH: ROUND #4 - 1 candidates - refine-down
456 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
460 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
457 DBG-DELTAS-SEARCH: CANDIDATE: rev=4667
461 DBG-DELTAS-SEARCH: CANDIDATE: rev=4667
458 DBG-DELTAS-SEARCH: type=snapshot-0
462 DBG-DELTAS-SEARCH: type=snapshot-0
459 DBG-DELTAS-SEARCH: size=196699
463 DBG-DELTAS-SEARCH: size=196699
460 DBG-DELTAS-SEARCH: base=-1
464 DBG-DELTAS-SEARCH: base=-1
461 DBG-DELTAS-SEARCH: uncompressed-delta-size=281309
465 DBG-DELTAS-SEARCH: uncompressed-delta-size=281309
462 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
466 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
463 DBG-DELTAS-SEARCH: DELTA: length=165408 (GOOD)
467 DBG-DELTAS-SEARCH: DELTA: length=165408 (GOOD)
464 DBG-DELTAS-SEARCH: ROUND #5 - 1 candidates - refine-up
468 DBG-DELTAS-SEARCH: ROUND #5 - 1 candidates - refine-up
465 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
469 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
466 DBG-DELTAS-SEARCH: CANDIDATE: rev=4954
470 DBG-DELTAS-SEARCH: CANDIDATE: rev=4954
467 DBG-DELTAS-SEARCH: type=snapshot-2
471 DBG-DELTAS-SEARCH: type=snapshot-2
468 DBG-DELTAS-SEARCH: size=58198
472 DBG-DELTAS-SEARCH: size=58198
469 DBG-DELTAS-SEARCH: base=4947
473 DBG-DELTAS-SEARCH: base=4947
470 DBG-DELTAS-SEARCH: uncompressed-delta-size=92195
474 DBG-DELTAS-SEARCH: uncompressed-delta-size=92195
471 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
475 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
472 DBG-DELTAS-SEARCH: DELTA: length=54601 (BAD)
476 DBG-DELTAS-SEARCH: DELTA: length=54601 (BAD)
473 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4996: delta-base=4947 is-cached=0 - search-rounds=5 try-count=5 - delta-type=snapshot snap-depth=2 - p1-chain-length=15 p2-chain-length=-1 - duration=*.?????? (glob)
477 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4996: delta-base=4947 is-cached=0 - search-rounds=5 try-count=5 - delta-type=snapshot snap-depth=2 - p1-chain-length=15 p2-chain-length=-1 - duration=*.?????? (glob)
474 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE $LAST_SNAP --source prev
478 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE $LAST_SNAP --source prev
475 DBG-DELTAS-SEARCH: SEARCH rev=4996
479 DBG-DELTAS-SEARCH: SEARCH rev=4996
476 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
480 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
477 DBG-DELTAS-SEARCH: CANDIDATE: rev=4964
481 DBG-DELTAS-SEARCH: CANDIDATE: rev=4964
478 DBG-DELTAS-SEARCH: type=snapshot-3
482 DBG-DELTAS-SEARCH: type=snapshot-3
479 DBG-DELTAS-SEARCH: size=15153
483 DBG-DELTAS-SEARCH: size=15153
480 DBG-DELTAS-SEARCH: base=4958
484 DBG-DELTAS-SEARCH: base=4958
481 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
485 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
482 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
486 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
483 DBG-DELTAS-SEARCH: DELTA: length=36297 (BAD)
487 DBG-DELTAS-SEARCH: DELTA: length=36297 (BAD)
484 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
488 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
485 DBG-DELTAS-SEARCH: CANDIDATE: rev=4958
489 DBG-DELTAS-SEARCH: CANDIDATE: rev=4958
486 DBG-DELTAS-SEARCH: type=snapshot-2
490 DBG-DELTAS-SEARCH: type=snapshot-2
487 DBG-DELTAS-SEARCH: size=30977
491 DBG-DELTAS-SEARCH: size=30977
488 DBG-DELTAS-SEARCH: base=4947
492 DBG-DELTAS-SEARCH: base=4947
489 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
493 DBG-DELTAS-SEARCH: uncompressed-delta-size=61571
490 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
494 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
491 DBG-DELTAS-SEARCH: DELTA: length=36578 (BAD)
495 DBG-DELTAS-SEARCH: DELTA: length=36578 (BAD)
492 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - search-down
496 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - search-down
493 DBG-DELTAS-SEARCH: CANDIDATE: rev=4947
497 DBG-DELTAS-SEARCH: CANDIDATE: rev=4947
494 DBG-DELTAS-SEARCH: type=snapshot-1
498 DBG-DELTAS-SEARCH: type=snapshot-1
495 DBG-DELTAS-SEARCH: size=164878
499 DBG-DELTAS-SEARCH: size=164878
496 DBG-DELTAS-SEARCH: base=4667
500 DBG-DELTAS-SEARCH: base=4667
497 DBG-DELTAS-SEARCH: uncompressed-delta-size=87938
501 DBG-DELTAS-SEARCH: uncompressed-delta-size=87938
498 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
502 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
499 DBG-DELTAS-SEARCH: DELTA: length=52101 (GOOD)
503 DBG-DELTAS-SEARCH: DELTA: length=52101 (GOOD)
500 DBG-DELTAS-SEARCH: ROUND #4 - 1 candidates - refine-down
504 DBG-DELTAS-SEARCH: ROUND #4 - 1 candidates - refine-down
501 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
505 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
502 DBG-DELTAS-SEARCH: CANDIDATE: rev=4667
506 DBG-DELTAS-SEARCH: CANDIDATE: rev=4667
503 DBG-DELTAS-SEARCH: type=snapshot-0
507 DBG-DELTAS-SEARCH: type=snapshot-0
504 DBG-DELTAS-SEARCH: size=196699
508 DBG-DELTAS-SEARCH: size=196699
505 DBG-DELTAS-SEARCH: base=-1
509 DBG-DELTAS-SEARCH: base=-1
506 DBG-DELTAS-SEARCH: uncompressed-delta-size=281309
510 DBG-DELTAS-SEARCH: uncompressed-delta-size=281309
507 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
511 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
508 DBG-DELTAS-SEARCH: DELTA: length=165408 (GOOD)
512 DBG-DELTAS-SEARCH: DELTA: length=165408 (GOOD)
509 DBG-DELTAS-SEARCH: ROUND #5 - 1 candidates - refine-up
513 DBG-DELTAS-SEARCH: ROUND #5 - 1 candidates - refine-up
510 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
514 DBG-DELTAS-SEARCH: CONTENDER: rev=4947 - length=52101
511 DBG-DELTAS-SEARCH: CANDIDATE: rev=4954
515 DBG-DELTAS-SEARCH: CANDIDATE: rev=4954
512 DBG-DELTAS-SEARCH: type=snapshot-2
516 DBG-DELTAS-SEARCH: type=snapshot-2
513 DBG-DELTAS-SEARCH: size=58198
517 DBG-DELTAS-SEARCH: size=58198
514 DBG-DELTAS-SEARCH: base=4947
518 DBG-DELTAS-SEARCH: base=4947
515 DBG-DELTAS-SEARCH: uncompressed-delta-size=92195
519 DBG-DELTAS-SEARCH: uncompressed-delta-size=92195
516 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
520 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
517 DBG-DELTAS-SEARCH: DELTA: length=54601 (BAD)
521 DBG-DELTAS-SEARCH: DELTA: length=54601 (BAD)
518 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4996: delta-base=4947 is-cached=0 - search-rounds=5 try-count=5 - delta-type=snapshot snap-depth=2 - p1-chain-length=15 p2-chain-length=-1 - duration=*.?????? (glob)
522 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4996: delta-base=4947 is-cached=0 - search-rounds=5 try-count=5 - delta-type=snapshot snap-depth=2 - p1-chain-length=15 p2-chain-length=-1 - duration=*.?????? (glob)
519
523
520 $ cd ..
524 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now