Show More
@@ -105,6 +105,164 b' def writechunks(ui, chunks, filename, vf' | |||||
105 | os.unlink(cleanup) |
|
105 | os.unlink(cleanup) | |
106 |
|
106 | |||
107 |
|
107 | |||
|
108 | def _dbg_ubdl_line( | |||
|
109 | ui, | |||
|
110 | indent, | |||
|
111 | key, | |||
|
112 | base_value=None, | |||
|
113 | percentage_base=None, | |||
|
114 | percentage_key=None, | |||
|
115 | ): | |||
|
116 | """Print one line of debug_unbundle_debug_info""" | |||
|
117 | line = b"DEBUG-UNBUNDLING: " | |||
|
118 | line += b' ' * (2 * indent) | |||
|
119 | key += b":" | |||
|
120 | padding = b'' | |||
|
121 | if base_value is not None: | |||
|
122 | assert len(key) + 1 + (2 * indent) <= _KEY_PART_WIDTH | |||
|
123 | line += key.ljust(_KEY_PART_WIDTH - (2 * indent)) | |||
|
124 | if isinstance(base_value, float): | |||
|
125 | line += b"%14.3f seconds" % base_value | |||
|
126 | else: | |||
|
127 | line += b"%10d" % base_value | |||
|
128 | padding = b' ' | |||
|
129 | else: | |||
|
130 | line += key | |||
|
131 | ||||
|
132 | if percentage_base is not None: | |||
|
133 | line += padding | |||
|
134 | padding = b'' | |||
|
135 | assert base_value is not None | |||
|
136 | percentage = base_value * 100 // percentage_base | |||
|
137 | if percentage_key is not None: | |||
|
138 | line += b" (%3d%% of %s)" % ( | |||
|
139 | percentage, | |||
|
140 | percentage_key, | |||
|
141 | ) | |||
|
142 | else: | |||
|
143 | line += b" (%3d%%)" % percentage | |||
|
144 | ||||
|
145 | line += b'\n' | |||
|
146 | ui.write_err(line) | |||
|
147 | ||||
|
148 | ||||
|
149 | def _sumf(items): | |||
|
150 | # python < 3.8 does not support a `start=0.0` argument to sum | |||
|
151 | # So we have to cheat a bit until we drop support for those version | |||
|
152 | if not items: | |||
|
153 | return 0.0 | |||
|
154 | return sum(items) | |||
|
155 | ||||
|
156 | ||||
|
157 | def display_unbundle_debug_info(ui, debug_info): | |||
|
158 | """display an unbundling report from debug information""" | |||
|
159 | cl_info = [] | |||
|
160 | mn_info = [] | |||
|
161 | fl_info = [] | |||
|
162 | _dispatch = [ | |||
|
163 | (b'CHANGELOG:', cl_info), | |||
|
164 | (b'MANIFESTLOG:', mn_info), | |||
|
165 | (b'FILELOG:', fl_info), | |||
|
166 | ] | |||
|
167 | for e in debug_info: | |||
|
168 | for prefix, info in _dispatch: | |||
|
169 | if e["target-revlog"].startswith(prefix): | |||
|
170 | info.append(e) | |||
|
171 | break | |||
|
172 | else: | |||
|
173 | assert False, 'unreachable' | |||
|
174 | each_info = [ | |||
|
175 | (b'changelog', cl_info), | |||
|
176 | (b'manifests', mn_info), | |||
|
177 | (b'files', fl_info), | |||
|
178 | ] | |||
|
179 | ||||
|
180 | # General Revision Countss | |||
|
181 | _dbg_ubdl_line(ui, 0, b'revisions', len(debug_info)) | |||
|
182 | for key, info in each_info: | |||
|
183 | if not info: | |||
|
184 | continue | |||
|
185 | _dbg_ubdl_line(ui, 1, key, len(info), len(debug_info)) | |||
|
186 | ||||
|
187 | # General Time spent | |||
|
188 | all_durations = [e['duration'] for e in debug_info] | |||
|
189 | all_durations.sort() | |||
|
190 | total_duration = _sumf(all_durations) | |||
|
191 | _dbg_ubdl_line(ui, 0, b'total-time', total_duration) | |||
|
192 | ||||
|
193 | for key, info in each_info: | |||
|
194 | if not info: | |||
|
195 | continue | |||
|
196 | durations = [e['duration'] for e in info] | |||
|
197 | durations.sort() | |||
|
198 | _dbg_ubdl_line(ui, 1, key, _sumf(durations), total_duration) | |||
|
199 | ||||
|
200 | # Count and cache reuse per delta types | |||
|
201 | each_types = {} | |||
|
202 | for key, info in each_info: | |||
|
203 | each_types[key] = types = { | |||
|
204 | b'full': 0, | |||
|
205 | b'full-cached': 0, | |||
|
206 | b'snapshot': 0, | |||
|
207 | b'snapshot-cached': 0, | |||
|
208 | b'delta': 0, | |||
|
209 | b'delta-cached': 0, | |||
|
210 | b'unknown': 0, | |||
|
211 | b'unknown-cached': 0, | |||
|
212 | } | |||
|
213 | for e in info: | |||
|
214 | types[e['type']] += 1 | |||
|
215 | if e['using-cached-base']: | |||
|
216 | types[e['type'] + b'-cached'] += 1 | |||
|
217 | ||||
|
218 | EXPECTED_TYPES = (b'full', b'snapshot', b'delta', b'unknown') | |||
|
219 | if debug_info: | |||
|
220 | _dbg_ubdl_line(ui, 0, b'type-count') | |||
|
221 | for key, info in each_info: | |||
|
222 | if info: | |||
|
223 | _dbg_ubdl_line(ui, 1, key) | |||
|
224 | t = each_types[key] | |||
|
225 | for tn in EXPECTED_TYPES: | |||
|
226 | if t[tn]: | |||
|
227 | tc = tn + b'-cached' | |||
|
228 | _dbg_ubdl_line(ui, 2, tn, t[tn]) | |||
|
229 | _dbg_ubdl_line(ui, 3, b'cached', t[tc], t[tn]) | |||
|
230 | ||||
|
231 | # time perf delta types and reuse | |||
|
232 | each_type_time = {} | |||
|
233 | for key, info in each_info: | |||
|
234 | each_type_time[key] = t = { | |||
|
235 | b'full': [], | |||
|
236 | b'full-cached': [], | |||
|
237 | b'snapshot': [], | |||
|
238 | b'snapshot-cached': [], | |||
|
239 | b'delta': [], | |||
|
240 | b'delta-cached': [], | |||
|
241 | b'unknown': [], | |||
|
242 | b'unknown-cached': [], | |||
|
243 | } | |||
|
244 | for e in info: | |||
|
245 | t[e['type']].append(e['duration']) | |||
|
246 | if e['using-cached-base']: | |||
|
247 | t[e['type'] + b'-cached'].append(e['duration']) | |||
|
248 | for t_key, value in list(t.items()): | |||
|
249 | value.sort() | |||
|
250 | t[t_key] = _sumf(value) | |||
|
251 | ||||
|
252 | if debug_info: | |||
|
253 | _dbg_ubdl_line(ui, 0, b'type-time') | |||
|
254 | for key, info in each_info: | |||
|
255 | if info: | |||
|
256 | _dbg_ubdl_line(ui, 1, key) | |||
|
257 | t = each_type_time[key] | |||
|
258 | td = total_duration # to same space on next lines | |||
|
259 | for tn in EXPECTED_TYPES: | |||
|
260 | if t[tn]: | |||
|
261 | tc = tn + b'-cached' | |||
|
262 | _dbg_ubdl_line(ui, 2, tn, t[tn], td, b"total") | |||
|
263 | _dbg_ubdl_line(ui, 3, b'cached', t[tc], td, b"total") | |||
|
264 | ||||
|
265 | ||||
108 | class cg1unpacker: |
|
266 | class cg1unpacker: | |
109 | """Unpacker for cg1 changegroup streams. |
|
267 | """Unpacker for cg1 changegroup streams. | |
110 |
|
268 | |||
@@ -254,7 +412,15 b' class cg1unpacker:' | |||||
254 | pos = next |
|
412 | pos = next | |
255 | yield closechunk() |
|
413 | yield closechunk() | |
256 |
|
414 | |||
257 | def _unpackmanifests(self, repo, revmap, trp, prog, addrevisioncb=None): |
|
415 | def _unpackmanifests( | |
|
416 | self, | |||
|
417 | repo, | |||
|
418 | revmap, | |||
|
419 | trp, | |||
|
420 | prog, | |||
|
421 | addrevisioncb=None, | |||
|
422 | debug_info=None, | |||
|
423 | ): | |||
258 | self.callback = prog.increment |
|
424 | self.callback = prog.increment | |
259 | # no need to check for empty manifest group here: |
|
425 | # no need to check for empty manifest group here: | |
260 | # if the result of the merge of 1 and 2 is the same in 3 and 4, |
|
426 | # if the result of the merge of 1 and 2 is the same in 3 and 4, | |
@@ -263,7 +429,13 b' class cg1unpacker:' | |||||
263 | self.manifestheader() |
|
429 | self.manifestheader() | |
264 | deltas = self.deltaiter() |
|
430 | deltas = self.deltaiter() | |
265 | storage = repo.manifestlog.getstorage(b'') |
|
431 | storage = repo.manifestlog.getstorage(b'') | |
266 | storage.addgroup(deltas, revmap, trp, addrevisioncb=addrevisioncb) |
|
432 | storage.addgroup( | |
|
433 | deltas, | |||
|
434 | revmap, | |||
|
435 | trp, | |||
|
436 | addrevisioncb=addrevisioncb, | |||
|
437 | debug_info=debug_info, | |||
|
438 | ) | |||
267 | prog.complete() |
|
439 | prog.complete() | |
268 | self.callback = None |
|
440 | self.callback = None | |
269 |
|
441 | |||
@@ -292,6 +464,10 b' class cg1unpacker:' | |||||
292 | """ |
|
464 | """ | |
293 | repo = repo.unfiltered() |
|
465 | repo = repo.unfiltered() | |
294 |
|
466 | |||
|
467 | debug_info = None | |||
|
468 | if repo.ui.configbool(b'debug', b'unbundling-stats'): | |||
|
469 | debug_info = [] | |||
|
470 | ||||
295 | # Only useful if we're adding sidedata categories. If both peers have |
|
471 | # Only useful if we're adding sidedata categories. If both peers have | |
296 | # the same categories, then we simply don't do anything. |
|
472 | # the same categories, then we simply don't do anything. | |
297 | adding_sidedata = ( |
|
473 | adding_sidedata = ( | |
@@ -366,6 +542,7 b' class cg1unpacker:' | |||||
366 | alwayscache=True, |
|
542 | alwayscache=True, | |
367 | addrevisioncb=onchangelog, |
|
543 | addrevisioncb=onchangelog, | |
368 | duplicaterevisioncb=ondupchangelog, |
|
544 | duplicaterevisioncb=ondupchangelog, | |
|
545 | debug_info=debug_info, | |||
369 | ): |
|
546 | ): | |
370 | repo.ui.develwarn( |
|
547 | repo.ui.develwarn( | |
371 | b'applied empty changelog from changegroup', |
|
548 | b'applied empty changelog from changegroup', | |
@@ -413,6 +590,7 b' class cg1unpacker:' | |||||
413 | trp, |
|
590 | trp, | |
414 | progress, |
|
591 | progress, | |
415 | addrevisioncb=on_manifest_rev, |
|
592 | addrevisioncb=on_manifest_rev, | |
|
593 | debug_info=debug_info, | |||
416 | ) |
|
594 | ) | |
417 |
|
595 | |||
418 | needfiles = {} |
|
596 | needfiles = {} | |
@@ -449,6 +627,7 b' class cg1unpacker:' | |||||
449 | efiles, |
|
627 | efiles, | |
450 | needfiles, |
|
628 | needfiles, | |
451 | addrevisioncb=on_filelog_rev, |
|
629 | addrevisioncb=on_filelog_rev, | |
|
630 | debug_info=debug_info, | |||
452 | ) |
|
631 | ) | |
453 |
|
632 | |||
454 | if sidedata_helpers: |
|
633 | if sidedata_helpers: | |
@@ -567,6 +746,8 b' class cg1unpacker:' | |||||
567 | b'changegroup-runhooks-%020i' % clstart, |
|
746 | b'changegroup-runhooks-%020i' % clstart, | |
568 | lambda tr: repo._afterlock(runhooks), |
|
747 | lambda tr: repo._afterlock(runhooks), | |
569 | ) |
|
748 | ) | |
|
749 | if debug_info is not None: | |||
|
750 | display_unbundle_debug_info(repo.ui, debug_info) | |||
570 | finally: |
|
751 | finally: | |
571 | repo.ui.flush() |
|
752 | repo.ui.flush() | |
572 | # never return 0 here: |
|
753 | # never return 0 here: | |
@@ -626,9 +807,22 b' class cg3unpacker(cg2unpacker):' | |||||
626 | protocol_flags = 0 |
|
807 | protocol_flags = 0 | |
627 | return node, p1, p2, deltabase, cs, flags, protocol_flags |
|
808 | return node, p1, p2, deltabase, cs, flags, protocol_flags | |
628 |
|
809 | |||
629 | def _unpackmanifests(self, repo, revmap, trp, prog, addrevisioncb=None): |
|
810 | def _unpackmanifests( | |
|
811 | self, | |||
|
812 | repo, | |||
|
813 | revmap, | |||
|
814 | trp, | |||
|
815 | prog, | |||
|
816 | addrevisioncb=None, | |||
|
817 | debug_info=None, | |||
|
818 | ): | |||
630 | super(cg3unpacker, self)._unpackmanifests( |
|
819 | super(cg3unpacker, self)._unpackmanifests( | |
631 | repo, revmap, trp, prog, addrevisioncb=addrevisioncb |
|
820 | repo, | |
|
821 | revmap, | |||
|
822 | trp, | |||
|
823 | prog, | |||
|
824 | addrevisioncb=addrevisioncb, | |||
|
825 | debug_info=debug_info, | |||
632 | ) |
|
826 | ) | |
633 | for chunkdata in iter(self.filelogheader, {}): |
|
827 | for chunkdata in iter(self.filelogheader, {}): | |
634 | # If we get here, there are directory manifests in the changegroup |
|
828 | # If we get here, there are directory manifests in the changegroup | |
@@ -636,7 +830,11 b' class cg3unpacker(cg2unpacker):' | |||||
636 | repo.ui.debug(b"adding %s revisions\n" % d) |
|
830 | repo.ui.debug(b"adding %s revisions\n" % d) | |
637 | deltas = self.deltaiter() |
|
831 | deltas = self.deltaiter() | |
638 | if not repo.manifestlog.getstorage(d).addgroup( |
|
832 | if not repo.manifestlog.getstorage(d).addgroup( | |
639 | deltas, revmap, trp, addrevisioncb=addrevisioncb |
|
833 | deltas, | |
|
834 | revmap, | |||
|
835 | trp, | |||
|
836 | addrevisioncb=addrevisioncb, | |||
|
837 | debug_info=debug_info, | |||
640 | ): |
|
838 | ): | |
641 | raise error.Abort(_(b"received dir revlog group is empty")) |
|
839 | raise error.Abort(_(b"received dir revlog group is empty")) | |
642 |
|
840 | |||
@@ -2173,6 +2371,7 b' def _addchangegroupfiles(' | |||||
2173 | expectedfiles, |
|
2371 | expectedfiles, | |
2174 | needfiles, |
|
2372 | needfiles, | |
2175 | addrevisioncb=None, |
|
2373 | addrevisioncb=None, | |
|
2374 | debug_info=None, | |||
2176 | ): |
|
2375 | ): | |
2177 | revisions = 0 |
|
2376 | revisions = 0 | |
2178 | files = 0 |
|
2377 | files = 0 | |
@@ -2193,6 +2392,7 b' def _addchangegroupfiles(' | |||||
2193 | revmap, |
|
2392 | revmap, | |
2194 | trp, |
|
2393 | trp, | |
2195 | addrevisioncb=addrevisioncb, |
|
2394 | addrevisioncb=addrevisioncb, | |
|
2395 | debug_info=debug_info, | |||
2196 | ) |
|
2396 | ) | |
2197 | if not added: |
|
2397 | if not added: | |
2198 | raise error.Abort(_(b"received file revlog group is empty")) |
|
2398 | raise error.Abort(_(b"received file revlog group is empty")) |
@@ -594,6 +594,12 b' coreconfigitem(' | |||||
594 | b'bundling-stats', |
|
594 | b'bundling-stats', | |
595 | default=False, |
|
595 | default=False, | |
596 | ) |
|
596 | ) | |
|
597 | # display extra information about the unbundling process | |||
|
598 | coreconfigitem( | |||
|
599 | b'debug', | |||
|
600 | b'unbundling-stats', | |||
|
601 | default=False, | |||
|
602 | ) | |||
597 | coreconfigitem( |
|
603 | coreconfigitem( | |
598 | b'defaults', |
|
604 | b'defaults', | |
599 | b'.*', |
|
605 | b'.*', |
@@ -153,6 +153,7 b' class filelog:' | |||||
153 | addrevisioncb=None, |
|
153 | addrevisioncb=None, | |
154 | duplicaterevisioncb=None, |
|
154 | duplicaterevisioncb=None, | |
155 | maybemissingparents=False, |
|
155 | maybemissingparents=False, | |
|
156 | debug_info=None, | |||
156 | ): |
|
157 | ): | |
157 | if maybemissingparents: |
|
158 | if maybemissingparents: | |
158 | raise error.Abort( |
|
159 | raise error.Abort( | |
@@ -173,6 +174,7 b' class filelog:' | |||||
173 | transaction, |
|
174 | transaction, | |
174 | addrevisioncb=addrevisioncb, |
|
175 | addrevisioncb=addrevisioncb, | |
175 | duplicaterevisioncb=duplicaterevisioncb, |
|
176 | duplicaterevisioncb=duplicaterevisioncb, | |
|
177 | debug_info=debug_info, | |||
176 | ) |
|
178 | ) | |
177 |
|
179 | |||
178 | def getstrippoint(self, minlink): |
|
180 | def getstrippoint(self, minlink): |
@@ -1856,6 +1856,7 b' class manifestrevlog:' | |||||
1856 | alwayscache=False, |
|
1856 | alwayscache=False, | |
1857 | addrevisioncb=None, |
|
1857 | addrevisioncb=None, | |
1858 | duplicaterevisioncb=None, |
|
1858 | duplicaterevisioncb=None, | |
|
1859 | debug_info=None, | |||
1859 | ): |
|
1860 | ): | |
1860 | return self._revlog.addgroup( |
|
1861 | return self._revlog.addgroup( | |
1861 | deltas, |
|
1862 | deltas, | |
@@ -1864,6 +1865,7 b' class manifestrevlog:' | |||||
1864 | alwayscache=alwayscache, |
|
1865 | alwayscache=alwayscache, | |
1865 | addrevisioncb=addrevisioncb, |
|
1866 | addrevisioncb=addrevisioncb, | |
1866 | duplicaterevisioncb=duplicaterevisioncb, |
|
1867 | duplicaterevisioncb=duplicaterevisioncb, | |
|
1868 | debug_info=debug_info, | |||
1867 | ) |
|
1869 | ) | |
1868 |
|
1870 | |||
1869 | def rawsize(self, rev): |
|
1871 | def rawsize(self, rev): |
@@ -2640,6 +2640,7 b' class revlog:' | |||||
2640 | alwayscache=False, |
|
2640 | alwayscache=False, | |
2641 | addrevisioncb=None, |
|
2641 | addrevisioncb=None, | |
2642 | duplicaterevisioncb=None, |
|
2642 | duplicaterevisioncb=None, | |
|
2643 | debug_info=None, | |||
2643 | ): |
|
2644 | ): | |
2644 | """ |
|
2645 | """ | |
2645 | add a delta group |
|
2646 | add a delta group | |
@@ -2665,6 +2666,7 b' class revlog:' | |||||
2665 | deltacomputer = deltautil.deltacomputer( |
|
2666 | deltacomputer = deltautil.deltacomputer( | |
2666 | self, |
|
2667 | self, | |
2667 | write_debug=write_debug, |
|
2668 | write_debug=write_debug, | |
|
2669 | debug_info=debug_info, | |||
2668 | ) |
|
2670 | ) | |
2669 | # loop through our set of deltas |
|
2671 | # loop through our set of deltas | |
2670 | for data in deltas: |
|
2672 | for data in deltas: |
@@ -674,7 +674,12 b' def _candidategroups(revlog, textlen, p1' | |||||
674 | deltas_limit = textlen * LIMIT_DELTA2TEXT |
|
674 | deltas_limit = textlen * LIMIT_DELTA2TEXT | |
675 |
|
675 | |||
676 | tested = {nullrev} |
|
676 | tested = {nullrev} | |
677 |
candidates = _refinedgroups( |
|
677 | candidates = _refinedgroups( | |
|
678 | revlog, | |||
|
679 | p1, | |||
|
680 | p2, | |||
|
681 | cachedelta, | |||
|
682 | ) | |||
678 | while True: |
|
683 | while True: | |
679 | temptative = candidates.send(good) |
|
684 | temptative = candidates.send(good) | |
680 | if temptative is None: |
|
685 | if temptative is None: | |
@@ -703,6 +708,7 b' def _candidategroups(revlog, textlen, p1' | |||||
703 | # no delta for rawtext-changing revs (see "candelta" for why) |
|
708 | # no delta for rawtext-changing revs (see "candelta" for why) | |
704 | if revlog.flags(rev) & REVIDX_RAWTEXT_CHANGING_FLAGS: |
|
709 | if revlog.flags(rev) & REVIDX_RAWTEXT_CHANGING_FLAGS: | |
705 | continue |
|
710 | continue | |
|
711 | ||||
706 | # If we reach here, we are about to build and test a delta. |
|
712 | # If we reach here, we are about to build and test a delta. | |
707 | # The delta building process will compute the chaininfo in all |
|
713 | # The delta building process will compute the chaininfo in all | |
708 | # case, since that computation is cached, it is fine to access it |
|
714 | # case, since that computation is cached, it is fine to access it | |
@@ -768,15 +774,28 b' def _refinedgroups(revlog, p1, p2, cache' | |||||
768 | # This logic only applies to general delta repositories and can be disabled |
|
774 | # This logic only applies to general delta repositories and can be disabled | |
769 | # through configuration. Disabling reuse source delta is useful when |
|
775 | # through configuration. Disabling reuse source delta is useful when | |
770 | # we want to make sure we recomputed "optimal" deltas. |
|
776 | # we want to make sure we recomputed "optimal" deltas. | |
|
777 | debug_info = None | |||
771 | if cachedelta and revlog._generaldelta and revlog._lazydeltabase: |
|
778 | if cachedelta and revlog._generaldelta and revlog._lazydeltabase: | |
772 | # Assume what we received from the server is a good choice |
|
779 | # Assume what we received from the server is a good choice | |
773 | # build delta will reuse the cache |
|
780 | # build delta will reuse the cache | |
|
781 | if debug_info is not None: | |||
|
782 | debug_info['cached-delta.tested'] += 1 | |||
774 | good = yield (cachedelta[0],) |
|
783 | good = yield (cachedelta[0],) | |
775 | if good is not None: |
|
784 | if good is not None: | |
|
785 | if debug_info is not None: | |||
|
786 | debug_info['cached-delta.accepted'] += 1 | |||
776 | yield None |
|
787 | yield None | |
777 | return |
|
788 | return | |
|
789 | # XXX cache me higher | |||
778 | snapshots = collections.defaultdict(list) |
|
790 | snapshots = collections.defaultdict(list) | |
779 | for candidates in _rawgroups(revlog, p1, p2, cachedelta, snapshots): |
|
791 | groups = _rawgroups( | |
|
792 | revlog, | |||
|
793 | p1, | |||
|
794 | p2, | |||
|
795 | cachedelta, | |||
|
796 | snapshots, | |||
|
797 | ) | |||
|
798 | for candidates in groups: | |||
780 | good = yield candidates |
|
799 | good = yield candidates | |
781 | if good is not None: |
|
800 | if good is not None: | |
782 | break |
|
801 | break | |
@@ -805,7 +824,10 b' def _refinedgroups(revlog, p1, p2, cache' | |||||
805 | children = tuple(sorted(c for c in snapshots[good])) |
|
824 | children = tuple(sorted(c for c in snapshots[good])) | |
806 | good = yield children |
|
825 | good = yield children | |
807 |
|
826 | |||
808 | # we have found nothing |
|
827 | if debug_info is not None: | |
|
828 | if good is None: | |||
|
829 | debug_info['no-solution'] += 1 | |||
|
830 | ||||
809 | yield None |
|
831 | yield None | |
810 |
|
832 | |||
811 |
|
833 | |||
@@ -931,10 +953,17 b' def _rawgroups(revlog, p1, p2, cachedelt' | |||||
931 |
|
953 | |||
932 |
|
954 | |||
933 | class deltacomputer: |
|
955 | class deltacomputer: | |
934 | def __init__(self, revlog, write_debug=None, debug_search=False): |
|
956 | def __init__( | |
|
957 | self, | |||
|
958 | revlog, | |||
|
959 | write_debug=None, | |||
|
960 | debug_search=False, | |||
|
961 | debug_info=None, | |||
|
962 | ): | |||
935 | self.revlog = revlog |
|
963 | self.revlog = revlog | |
936 | self._write_debug = write_debug |
|
964 | self._write_debug = write_debug | |
937 | self._debug_search = debug_search |
|
965 | self._debug_search = debug_search | |
|
966 | self._debug_info = debug_info | |||
938 |
|
967 | |||
939 | def buildtext(self, revinfo, fh): |
|
968 | def buildtext(self, revinfo, fh): | |
940 | """Builds a fulltext version of a revision |
|
969 | """Builds a fulltext version of a revision | |
@@ -1103,11 +1132,14 b' class deltacomputer:' | |||||
1103 | if revinfo.flags & REVIDX_RAWTEXT_CHANGING_FLAGS: |
|
1132 | if revinfo.flags & REVIDX_RAWTEXT_CHANGING_FLAGS: | |
1104 | return self._fullsnapshotinfo(fh, revinfo, target_rev) |
|
1133 | return self._fullsnapshotinfo(fh, revinfo, target_rev) | |
1105 |
|
1134 | |||
1106 | if self._write_debug is not None: |
|
1135 | gather_debug = ( | |
|
1136 | self._write_debug is not None or self._debug_info is not None | |||
|
1137 | ) | |||
|
1138 | debug_search = self._write_debug is not None and self._debug_search | |||
|
1139 | ||||
|
1140 | if gather_debug: | |||
1107 | start = util.timer() |
|
1141 | start = util.timer() | |
1108 |
|
1142 | |||
1109 | debug_search = self._write_debug is not None and self._debug_search |
|
|||
1110 |
|
||||
1111 | # count the number of different delta we tried (for debug purpose) |
|
1143 | # count the number of different delta we tried (for debug purpose) | |
1112 | dbg_try_count = 0 |
|
1144 | dbg_try_count = 0 | |
1113 | # count the number of "search round" we did. (for debug purpose) |
|
1145 | # count the number of "search round" we did. (for debug purpose) | |
@@ -1122,7 +1154,7 b' class deltacomputer:' | |||||
1122 | deltainfo = None |
|
1154 | deltainfo = None | |
1123 | p1r, p2r = revlog.rev(p1), revlog.rev(p2) |
|
1155 | p1r, p2r = revlog.rev(p1), revlog.rev(p2) | |
1124 |
|
1156 | |||
1125 |
if |
|
1157 | if gather_debug: | |
1126 | if p1r != nullrev: |
|
1158 | if p1r != nullrev: | |
1127 | p1_chain_len = revlog._chaininfo(p1r)[0] |
|
1159 | p1_chain_len = revlog._chaininfo(p1r)[0] | |
1128 | else: |
|
1160 | else: | |
@@ -1250,9 +1282,8 b' class deltacomputer:' | |||||
1250 | else: |
|
1282 | else: | |
1251 | dbg_type = b"delta" |
|
1283 | dbg_type = b"delta" | |
1252 |
|
1284 | |||
1253 |
if |
|
1285 | if gather_debug: | |
1254 | end = util.timer() |
|
1286 | end = util.timer() | |
1255 | assert deltainfo is not None # please pytype |
|
|||
1256 | used_cached = ( |
|
1287 | used_cached = ( | |
1257 | cachedelta is not None |
|
1288 | cachedelta is not None | |
1258 | and dbg_try_rounds == 1 |
|
1289 | and dbg_try_rounds == 1 | |
@@ -1262,7 +1293,7 b' class deltacomputer:' | |||||
1262 | dbg = { |
|
1293 | dbg = { | |
1263 | 'duration': end - start, |
|
1294 | 'duration': end - start, | |
1264 | 'revision': target_rev, |
|
1295 | 'revision': target_rev, | |
1265 | 'delta-base': deltainfo.base, |
|
1296 | 'delta-base': deltainfo.base, # pytype: disable=attribute-error | |
1266 | 'search_round_count': dbg_try_rounds, |
|
1297 | 'search_round_count': dbg_try_rounds, | |
1267 | 'using-cached-base': used_cached, |
|
1298 | 'using-cached-base': used_cached, | |
1268 | 'delta_try_count': dbg_try_count, |
|
1299 | 'delta_try_count': dbg_try_count, | |
@@ -1294,6 +1325,10 b' class deltacomputer:' | |||||
1294 | target_revlog += b'%s:' % target_key |
|
1325 | target_revlog += b'%s:' % target_key | |
1295 | dbg['target-revlog'] = target_revlog |
|
1326 | dbg['target-revlog'] = target_revlog | |
1296 |
|
1327 | |||
|
1328 | if self._debug_info is not None: | |||
|
1329 | self._debug_info.append(dbg) | |||
|
1330 | ||||
|
1331 | if self._write_debug is not None: | |||
1297 | msg = ( |
|
1332 | msg = ( | |
1298 | b"DBG-DELTAS:" |
|
1333 | b"DBG-DELTAS:" | |
1299 | b" %-12s" |
|
1334 | b" %-12s" |
@@ -1081,3 +1081,47 b' Test the debug output when applying delt' | |||||
1081 | new changesets 4fe08cd4693e:4652c276ac4f (3 drafts) |
|
1081 | new changesets 4fe08cd4693e:4652c276ac4f (3 drafts) | |
1082 | (run 'hg update' to get a working copy) |
|
1082 | (run 'hg update' to get a working copy) | |
1083 |
|
1083 | |||
|
1084 | ||||
|
1085 | Test the debug statistic when applying a bundle | |||
|
1086 | ----------------------------------------------- | |||
|
1087 | ||||
|
1088 | $ hg init bar | |||
|
1089 | $ hg -R bar unbundle ./default.hg --config debug.unbundling-stats=yes | |||
|
1090 | adding changesets | |||
|
1091 | adding manifests | |||
|
1092 | adding file changes | |||
|
1093 | DEBUG-UNBUNDLING: revisions: 9 | |||
|
1094 | DEBUG-UNBUNDLING: changelog: 3 ( 33%) | |||
|
1095 | DEBUG-UNBUNDLING: manifests: 3 ( 33%) | |||
|
1096 | DEBUG-UNBUNDLING: files: 3 ( 33%) | |||
|
1097 | DEBUG-UNBUNDLING: total-time: ?????????????? seconds (glob) | |||
|
1098 | DEBUG-UNBUNDLING: changelog: ?????????????? seconds (???%) (glob) | |||
|
1099 | DEBUG-UNBUNDLING: manifests: ?????????????? seconds (???%) (glob) | |||
|
1100 | DEBUG-UNBUNDLING: files: ?????????????? seconds (???%) (glob) | |||
|
1101 | DEBUG-UNBUNDLING: type-count: | |||
|
1102 | DEBUG-UNBUNDLING: changelog: | |||
|
1103 | DEBUG-UNBUNDLING: full: 3 | |||
|
1104 | DEBUG-UNBUNDLING: cached: 0 ( 0%) | |||
|
1105 | DEBUG-UNBUNDLING: manifests: | |||
|
1106 | DEBUG-UNBUNDLING: full: 1 | |||
|
1107 | DEBUG-UNBUNDLING: cached: 0 ( 0%) | |||
|
1108 | DEBUG-UNBUNDLING: delta: 2 | |||
|
1109 | DEBUG-UNBUNDLING: cached: 2 (100%) | |||
|
1110 | DEBUG-UNBUNDLING: files: | |||
|
1111 | DEBUG-UNBUNDLING: full: 3 | |||
|
1112 | DEBUG-UNBUNDLING: cached: 0 ( 0%) | |||
|
1113 | DEBUG-UNBUNDLING: type-time: | |||
|
1114 | DEBUG-UNBUNDLING: changelog: | |||
|
1115 | DEBUG-UNBUNDLING: full: ?????????????? seconds (???% of total) (glob) | |||
|
1116 | DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob) | |||
|
1117 | DEBUG-UNBUNDLING: manifests: | |||
|
1118 | DEBUG-UNBUNDLING: full: ?????????????? seconds (???% of total) (glob) | |||
|
1119 | DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob) | |||
|
1120 | DEBUG-UNBUNDLING: delta: ?????????????? seconds (???% of total) (glob) | |||
|
1121 | DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob) | |||
|
1122 | DEBUG-UNBUNDLING: files: | |||
|
1123 | DEBUG-UNBUNDLING: full: ?????????????? seconds (???% of total) (glob) | |||
|
1124 | DEBUG-UNBUNDLING: cached: ?????????????? seconds (???% of total) (glob) | |||
|
1125 | added 3 changesets with 3 changes to 3 files | |||
|
1126 | new changesets 4fe08cd4693e:4652c276ac4f (3 drafts) | |||
|
1127 | (run 'hg update' to get a working copy) |
General Comments 0
You need to be logged in to leave comments.
Login now