Show More
@@ -15,6 +15,7 b' import os' | |||||
15 | import sys |
|
15 | import sys | |
16 | import time |
|
16 | import time | |
17 | import timeit |
|
17 | import timeit | |
|
18 | import math | |||
18 | from pdb import Restart |
|
19 | from pdb import Restart | |
19 |
|
20 | |||
20 | # cProfile was added in Python2.5 |
|
21 | # cProfile was added in Python2.5 | |
@@ -71,22 +72,26 b' class TimeitResult(object):' | |||||
71 |
|
72 | |||
72 | """ |
|
73 | """ | |
73 |
|
74 | |||
74 |
def __init__(self, loops, repeat, |
|
75 | def __init__(self, loops, repeat, average, stdev, all_runs, compile_time, precision): | |
75 | self.loops = loops |
|
76 | self.loops = loops | |
76 | self.repeat = repeat |
|
77 | self.repeat = repeat | |
77 |
self. |
|
78 | self.average = average | |
78 |
self. |
|
79 | self.stdev = stdev | |
79 | self.all_runs = all_runs |
|
80 | self.all_runs = all_runs | |
80 | self.compile_time = compile_time |
|
81 | self.compile_time = compile_time | |
81 | self._precision = precision |
|
82 | self._precision = precision | |
82 |
|
83 | |||
83 | def _repr_pretty_(self, p , cycle): |
|
84 | def _repr_pretty_(self, p , cycle): | |
84 |
|
|
85 | if self.loops == 1: # No s at "loops" if only one loop | |
85 |
unic = |
|
86 | unic = (u"%s loop, average of %d: %s +- %s per loop (using standard deviation)" | |
86 |
|
|
87 | % (self.loops, self.repeat, | |
|
88 | _format_time(self.average, self._precision), | |||
|
89 | _format_time(self.stdev, self._precision))) | |||
87 |
|
|
90 | else: | |
88 |
|
|
91 | unic = (u"%s loops, average of %d: %s +- %s per loop (using standard deviation)" | |
89 |
|
|
92 | % (self.loops, self.repeat, | |
|
93 | _format_time(self.average, self._precision), | |||
|
94 | _format_time(self.stdev, self._precision))) | |||
90 |
|
|
95 | p.text(u'<TimeitResult : '+unic+u'>') | |
91 |
|
96 | |||
92 |
|
97 | |||
@@ -950,20 +955,20 b' python-profiler package from non-free.""")' | |||||
950 | :: |
|
955 | :: | |
951 |
|
956 | |||
952 | In [1]: %timeit pass |
|
957 | In [1]: %timeit pass | |
953 |
10000000 loops, |
|
958 | 100000000 loops, average of 7: 5.48 ns +- 0.354 ns per loop (using standard deviation) | |
954 |
|
959 | |||
955 | In [2]: u = None |
|
960 | In [2]: u = None | |
956 |
|
961 | |||
957 | In [3]: %timeit u is None |
|
962 | In [3]: %timeit u is None | |
958 |
10000000 loops, |
|
963 | 10000000 loops, average of 7: 22.7 ns +- 2.33 ns per loop (using standard deviation) | |
959 |
|
964 | |||
960 | In [4]: %timeit -r 4 u == None |
|
965 | In [4]: %timeit -r 4 u == None | |
961 |
1000000 loops, |
|
966 | 10000000 loops, average of 4: 27.5 ns +- 2.91 ns per loop (using standard deviation) | |
962 |
|
967 | |||
963 | In [5]: import time |
|
968 | In [5]: import time | |
964 |
|
969 | |||
965 | In [6]: %timeit -n1 time.sleep(2) |
|
970 | In [6]: %timeit -n1 time.sleep(2) | |
966 |
1 loop, |
|
971 | 1 loop, average of 7: 2 s +- 4.71 Β΅s per loop (using standard deviation) | |
967 |
|
972 | |||
968 |
|
973 | |||
969 | The times reported by %timeit will be slightly higher than those |
|
974 | The times reported by %timeit will be slightly higher than those | |
@@ -981,7 +986,8 b' python-profiler package from non-free.""")' | |||||
981 |
|
986 | |||
982 | timefunc = timeit.default_timer |
|
987 | timefunc = timeit.default_timer | |
983 | number = int(getattr(opts, "n", 0)) |
|
988 | number = int(getattr(opts, "n", 0)) | |
984 | repeat = int(getattr(opts, "r", timeit.default_repeat)) |
|
989 | default_repeat = 7 if timeit.default_repeat < 7 else timeit.default_repeat | |
|
990 | repeat = int(getattr(opts, "r", default_repeat)) | |||
985 | precision = int(getattr(opts, "p", 3)) |
|
991 | precision = int(getattr(opts, "p", 3)) | |
986 | quiet = 'q' in opts |
|
992 | quiet = 'q' in opts | |
987 | return_result = 'o' in opts |
|
993 | return_result = 'o' in opts | |
@@ -1036,22 +1042,26 b' python-profiler package from non-free.""")' | |||||
1036 | # This is used to check if there is a huge difference between the |
|
1042 | # This is used to check if there is a huge difference between the | |
1037 | # best and worst timings. |
|
1043 | # best and worst timings. | |
1038 | # Issue: https://github.com/ipython/ipython/issues/6471 |
|
1044 | # Issue: https://github.com/ipython/ipython/issues/6471 | |
1039 | worst_tuning = 0 |
|
|||
1040 | if number == 0: |
|
1045 | if number == 0: | |
1041 | # determine number so that 0.2 <= total time < 2.0 |
|
1046 | # determine number so that 0.2 <= total time < 2.0 | |
1042 | number = 1 |
|
1047 | for index in range(0, 10): | |
1043 | for _ in range(1, 10): |
|
1048 | number = 10 ** index | |
1044 | time_number = timer.timeit(number) |
|
1049 | time_number = timer.timeit(number) | |
1045 | worst_tuning = max(worst_tuning, time_number / number) |
|
|||
1046 | if time_number >= 0.2: |
|
1050 | if time_number >= 0.2: | |
1047 | break |
|
1051 | break | |
1048 | number *= 10 |
|
1052 | ||
1049 | all_runs = timer.repeat(repeat, number) |
|
1053 | all_runs = timer.repeat(repeat, number) | |
1050 | best = min(all_runs) / number |
|
1054 | timings = [ dt / number for dt in all_runs] | |
|
1055 | ||||
|
1056 | def _avg(numbers): | |||
|
1057 | return math.fsum(numbers) / len(numbers) | |||
|
1058 | ||||
|
1059 | def _stdev(numbers): | |||
|
1060 | mean = _avg(numbers) | |||
|
1061 | return (math.fsum([(x - mean) ** 2 for x in numbers]) / len(numbers)) ** 0.5 | |||
1051 |
|
1062 | |||
1052 | worst = max(all_runs) / number |
|
1063 | average = _avg(timings) | |
1053 | if worst_tuning: |
|
1064 | stdev = _stdev(timings) | |
1054 | worst = max(worst, worst_tuning) |
|
|||
1055 |
|
1065 | |||
1056 | if not quiet : |
|
1066 | if not quiet : | |
1057 | # Check best timing is greater than zero to avoid a |
|
1067 | # Check best timing is greater than zero to avoid a | |
@@ -1059,20 +1069,20 b' python-profiler package from non-free.""")' | |||||
1059 | # In cases where the slowest timing is lesser than a micosecond |
|
1069 | # In cases where the slowest timing is lesser than a micosecond | |
1060 | # we assume that it does not really matter if the fastest |
|
1070 | # we assume that it does not really matter if the fastest | |
1061 | # timing is 4 times faster than the slowest timing or not. |
|
1071 | # timing is 4 times faster than the slowest timing or not. | |
1062 | if worst > 4 * best and best > 0 and worst > 1e-6: |
|
|||
1063 | print("The slowest run took %0.2f times longer than the " |
|
|||
1064 | "fastest. This could mean that an intermediate result " |
|
|||
1065 | "is being cached." % (worst / best)) |
|
|||
1066 | if number == 1: # No s at "loops" if only one loop |
|
1072 | if number == 1: # No s at "loops" if only one loop | |
1067 |
print(u"% |
|
1073 | print(u"%s loop, average of %d: %s +- %s per loop (using standard deviation)" | |
1068 | _format_time(best, precision))) |
|
1074 | % (number, repeat, | |
|
1075 | _format_time(average, precision), | |||
|
1076 | _format_time(stdev, precision))) | |||
1069 | else: |
|
1077 | else: | |
1070 |
print(u"% |
|
1078 | print(u"%s loops, average of %d: %s +- %s per loop (using standard deviation)" | |
1071 | _format_time(best, precision))) |
|
1079 | % (number, repeat, | |
|
1080 | _format_time(average, precision), | |||
|
1081 | _format_time(stdev, precision))) | |||
1072 | if tc > tc_min: |
|
1082 | if tc > tc_min: | |
1073 | print("Compiler time: %.2f s" % tc) |
|
1083 | print("Compiler time: %.2f s" % tc) | |
1074 | if return_result: |
|
1084 | if return_result: | |
1075 |
return TimeitResult(number, repeat, |
|
1085 | return TimeitResult(number, repeat, average, stdev, all_runs, tc, precision) | |
1076 |
|
1086 | |||
1077 | @skip_doctest |
|
1087 | @skip_doctest | |
1078 | @needs_local_scope |
|
1088 | @needs_local_scope |
@@ -649,12 +649,12 b' class TestAstTransform(unittest.TestCase):' | |||||
649 | called.add(x) |
|
649 | called.add(x) | |
650 | ip.push({'f':f}) |
|
650 | ip.push({'f':f}) | |
651 |
|
651 | |||
652 |
with tt.AssertPrints(" |
|
652 | with tt.AssertPrints("average of "): | |
653 | ip.run_line_magic("timeit", "-n1 f(1)") |
|
653 | ip.run_line_magic("timeit", "-n1 f(1)") | |
654 | self.assertEqual(called, {-1}) |
|
654 | self.assertEqual(called, {-1}) | |
655 | called.clear() |
|
655 | called.clear() | |
656 |
|
656 | |||
657 |
with tt.AssertPrints(" |
|
657 | with tt.AssertPrints("average of "): | |
658 | ip.run_cell_magic("timeit", "-n1 f(2)", "f(3)") |
|
658 | ip.run_cell_magic("timeit", "-n1 f(2)", "f(3)") | |
659 | self.assertEqual(called, {-2, -3}) |
|
659 | self.assertEqual(called, {-2, -3}) | |
660 |
|
660 | |||
@@ -722,12 +722,12 b' class TestAstTransform2(unittest.TestCase):' | |||||
722 | called.add(x) |
|
722 | called.add(x) | |
723 | ip.push({'f':f}) |
|
723 | ip.push({'f':f}) | |
724 |
|
724 | |||
725 |
with tt.AssertPrints(" |
|
725 | with tt.AssertPrints("average of "): | |
726 | ip.run_line_magic("timeit", "-n1 f(1)") |
|
726 | ip.run_line_magic("timeit", "-n1 f(1)") | |
727 | self.assertEqual(called, {(1,)}) |
|
727 | self.assertEqual(called, {(1,)}) | |
728 | called.clear() |
|
728 | called.clear() | |
729 |
|
729 | |||
730 |
with tt.AssertPrints(" |
|
730 | with tt.AssertPrints("average of "): | |
731 | ip.run_cell_magic("timeit", "-n1 f(2)", "f(3)") |
|
731 | ip.run_cell_magic("timeit", "-n1 f(2)", "f(3)") | |
732 | self.assertEqual(called, {(2,), (3,)}) |
|
732 | self.assertEqual(called, {(2,), (3,)}) | |
733 |
|
733 |
General Comments 0
You need to be logged in to leave comments.
Login now