Show More
@@ -20,7 +20,10 b' Configurations' | |||
|
20 | 20 | |
|
21 | 21 | ``profile-benchmark`` |
|
22 | 22 | Enable profiling for the benchmarked section. |
|
23 |
( |
|
|
23 | (by default, the first iteration is benchmarked) | |
|
24 | ||
|
25 | ``profiled-runs`` | |
|
26 | list of iteration to profile (starting from 0) | |
|
24 | 27 | |
|
25 | 28 | ``run-limits`` |
|
26 | 29 | Control the number of runs each benchmark will perform. The option value |
@@ -318,6 +321,11 b' try:' | |||
|
318 | 321 | ) |
|
319 | 322 | configitem( |
|
320 | 323 | b'perf', |
|
324 | b'profiled-runs', | |
|
325 | default=mercurial.configitems.dynamicdefault, | |
|
326 | ) | |
|
327 | configitem( | |
|
328 | b'perf', | |
|
321 | 329 | b'run-limits', |
|
322 | 330 | default=mercurial.configitems.dynamicdefault, |
|
323 | 331 | experimental=True, |
@@ -354,7 +362,7 b' except TypeError:' | |||
|
354 | 362 | ) |
|
355 | 363 | configitem( |
|
356 | 364 | b'perf', |
|
357 |
b'profile |
|
|
365 | b'profiled-runs', | |
|
358 | 366 | default=mercurial.configitems.dynamicdefault, |
|
359 | 367 | ) |
|
360 | 368 | configitem( |
@@ -491,9 +499,12 b' def gettimer(ui, opts=None):' | |||
|
491 | 499 | limits = DEFAULTLIMITS |
|
492 | 500 | |
|
493 | 501 | profiler = None |
|
502 | profiled_runs = set() | |
|
494 | 503 | if profiling is not None: |
|
495 | 504 | if ui.configbool(b"perf", b"profile-benchmark", False): |
|
496 | profiler = profiling.profile(ui) | |
|
505 | profiler = lambda: profiling.profile(ui) | |
|
506 | for run in ui.configlist(b"perf", b"profiled-runs", [0]): | |
|
507 | profiled_runs.add(int(run)) | |
|
497 | 508 | |
|
498 | 509 | prerun = getint(ui, b"perf", b"pre-run", 0) |
|
499 | 510 | t = functools.partial( |
@@ -503,6 +514,7 b' def gettimer(ui, opts=None):' | |||
|
503 | 514 | limits=limits, |
|
504 | 515 | prerun=prerun, |
|
505 | 516 | profiler=profiler, |
|
517 | profiled_runs=profiled_runs, | |
|
506 | 518 | ) |
|
507 | 519 | return t, fm |
|
508 | 520 | |
@@ -547,13 +559,14 b' def _timer(' | |||
|
547 | 559 | limits=DEFAULTLIMITS, |
|
548 | 560 | prerun=0, |
|
549 | 561 | profiler=None, |
|
562 | profiled_runs=(0,), | |
|
550 | 563 | ): |
|
551 | 564 | gc.collect() |
|
552 | 565 | results = [] |
|
553 | 566 | begin = util.timer() |
|
554 | 567 | count = 0 |
|
555 | 568 | if profiler is None: |
|
556 | profiler = NOOPCTX | |
|
569 | profiler = lambda: NOOPCTX | |
|
557 | 570 | for i in range(prerun): |
|
558 | 571 | if setup is not None: |
|
559 | 572 | setup() |
@@ -561,13 +574,16 b' def _timer(' | |||
|
561 | 574 | func() |
|
562 | 575 | keepgoing = True |
|
563 | 576 | while keepgoing: |
|
577 | if count in profiled_runs: | |
|
578 | prof = profiler() | |
|
579 | else: | |
|
580 | prof = NOOPCTX | |
|
564 | 581 | if setup is not None: |
|
565 | 582 | setup() |
|
566 | 583 | with context(): |
|
567 |
with prof |
|
|
584 | with prof: | |
|
568 | 585 | with timeone() as item: |
|
569 | 586 | r = func() |
|
570 | profiler = NOOPCTX | |
|
571 | 587 | count += 1 |
|
572 | 588 | results.append(item[0]) |
|
573 | 589 | cstop = util.timer() |
@@ -59,8 +59,11 b' perfstatus' | |||
|
59 | 59 | number of run to perform before starting measurement. |
|
60 | 60 | |
|
61 | 61 | "profile-benchmark" |
|
62 |
Enable profiling for the benchmarked section. ( |
|
|
63 | benchmarked) | |
|
62 | Enable profiling for the benchmarked section. (by default, the first | |
|
63 | iteration is benchmarked) | |
|
64 | ||
|
65 | "profiled-runs" | |
|
66 | list of iteration to profile (starting from 0) | |
|
64 | 67 | |
|
65 | 68 | "run-limits" |
|
66 | 69 | Control the number of runs each benchmark will perform. The option value |
General Comments 0
You need to be logged in to leave comments.
Login now