Show More
@@ -89,7 +89,6 b' class TestUpdatePullRequest(object):' | |||
|
89 | 89 | pr_util.update_source_repository(head='c') |
|
90 | 90 | repo = pull_request.source_repo.scm_instance() |
|
91 | 91 | commits = [x for x in repo.get_commits()] |
|
92 | print commits | |
|
93 | 92 | |
|
94 | 93 | added_commit_id = commits[-1].raw_id # c commit |
|
95 | 94 | common_commit_id = commits[1].raw_id # b commit is common ancestor |
@@ -13,12 +13,12 b' log = logging.getLogger(__name__)' | |||
|
13 | 13 | |
|
14 | 14 | def usage(): |
|
15 | 15 | """Gives usage information.""" |
|
16 |
print |
|
|
16 | print("""Usage: %(prog)s repository-to-migrate | |
|
17 | 17 | |
|
18 | 18 | Upgrade your repository to the new flat format. |
|
19 | 19 | |
|
20 | 20 | NOTE: You should probably make a backup before running this. |
|
21 | """ % {'prog': sys.argv[0]} | |
|
21 | """ % {'prog': sys.argv[0]}) | |
|
22 | 22 | |
|
23 | 23 | sys.exit(1) |
|
24 | 24 |
@@ -33,7 +33,7 b' def upgrade(migrate_engine):' | |||
|
33 | 33 | old_cons = UniqueConstraint('user_id', 'repository_id', table=tbl) |
|
34 | 34 | else: |
|
35 | 35 | # sqlite doesn't support dropping constraints... |
|
36 |
print |
|
|
36 | print("""Please manually drop UniqueConstraint('user_id', 'repository_id')""") | |
|
37 | 37 | |
|
38 | 38 | if old_cons: |
|
39 | 39 | try: |
@@ -41,7 +41,7 b' def upgrade(migrate_engine):' | |||
|
41 | 41 | except Exception as e: |
|
42 | 42 | # we don't care if this fails really... better to pass migration than |
|
43 | 43 | # leave this in intermidiate state |
|
44 |
print |
|
|
44 | print('Failed to remove Unique for user_id, repository_id reason %s' % e) | |
|
45 | 45 | |
|
46 | 46 | |
|
47 | 47 | #========================================================================== |
@@ -61,7 +61,7 b' def upgrade(migrate_engine):' | |||
|
61 | 61 | old_cons = UniqueConstraint('group_id', 'permission_id', table=tbl, name='group_to_perm_group_id_permission_id_key') |
|
62 | 62 | else: |
|
63 | 63 | # sqlite doesn't support dropping constraints... |
|
64 |
print |
|
|
64 | print("""Please manually drop UniqueConstraint('group_id', 'permission_id')""") | |
|
65 | 65 | |
|
66 | 66 | if old_cons: |
|
67 | 67 | try: |
@@ -69,7 +69,7 b' def upgrade(migrate_engine):' | |||
|
69 | 69 | except Exception as e: |
|
70 | 70 | # we don't care if this fails really... better to pass migration than |
|
71 | 71 | # leave this in intermidiate state |
|
72 |
print |
|
|
72 | print('Failed to remove Unique for user_id, repository_id reason %s' % e) | |
|
73 | 73 | |
|
74 | 74 | return |
|
75 | 75 |
@@ -80,7 +80,7 b' def fixups(models, _SESSION):' | |||
|
80 | 80 | new_perm = models.Permission() |
|
81 | 81 | new_perm.permission_name = p[0] |
|
82 | 82 | new_perm.permission_longname = p[0] #translation err with p[1] |
|
83 |
print |
|
|
83 | print('Creating new permission %s' % p[0]) | |
|
84 | 84 | _SESSION().add(new_perm) |
|
85 | 85 | |
|
86 | 86 | _SESSION().commit() |
@@ -44,6 +44,6 b' def fixups(models, _SESSION):' | |||
|
44 | 44 | notify('Upgrading repositories Caches') |
|
45 | 45 | repositories = models.Repository.getAll() |
|
46 | 46 | for repo in repositories: |
|
47 |
print |
|
|
47 | print(repo) | |
|
48 | 48 | repo.update_commit_cache() |
|
49 | 49 | _SESSION().commit() |
@@ -73,7 +73,7 b' def fixups(models, _SESSION):' | |||
|
73 | 73 | repo_store_path = get_repos_location(models.RhodeCodeUi) |
|
74 | 74 | _store = os.path.join(repo_store_path, '.cache', 'largefiles') |
|
75 | 75 | notify('Setting largefiles usercache') |
|
76 |
print |
|
|
76 | print(_store) | |
|
77 | 77 | |
|
78 | 78 | if not models.RhodeCodeUi.query().filter( |
|
79 | 79 | models.RhodeCodeUi.ui_key == 'usercache').scalar(): |
@@ -39,7 +39,7 b' def fixups(models, _SESSION):' | |||
|
39 | 39 | notify('fixing new schema for landing_rev') |
|
40 | 40 | |
|
41 | 41 | for repo in models.Repository.get_all(): |
|
42 |
print |
|
|
42 | print(u'repo %s old landing rev is: %s' % (repo, repo.landing_rev)) | |
|
43 | 43 | _rev = repo.landing_rev[1] |
|
44 | 44 | _rev_type = 'rev' # default |
|
45 | 45 | |
@@ -58,13 +58,13 b' def fixups(models, _SESSION):' | |||
|
58 | 58 | elif _rev in known_bookmarks: |
|
59 | 59 | _rev_type = 'book' |
|
60 | 60 | except Exception as e: |
|
61 |
print |
|
|
62 |
print |
|
|
61 | print(e) | |
|
62 | print('continue...') | |
|
63 | 63 | #we don't want any error to break the process |
|
64 | 64 | pass |
|
65 | 65 | |
|
66 | 66 | _new_landing_rev = '%s:%s' % (_rev_type, _rev) |
|
67 |
print |
|
|
67 | print(u'setting to %s' % _new_landing_rev) | |
|
68 | 68 | repo.landing_rev = _new_landing_rev |
|
69 | 69 | _SESSION().add(repo) |
|
70 | 70 | _SESSION().commit() |
@@ -47,7 +47,7 b' def fixups(models, _SESSION):' | |||
|
47 | 47 | new_perm = models.Permission() |
|
48 | 48 | new_perm.permission_name = p[0] |
|
49 | 49 | new_perm.permission_longname = p[0] #translation err with p[1] |
|
50 |
print |
|
|
50 | print('Creating new permission %s' % p[0]) | |
|
51 | 51 | _SESSION().add(new_perm) |
|
52 | 52 | |
|
53 | 53 | _SESSION().commit() |
@@ -60,6 +60,6 b' def fixups(models, _SESSION):' | |||
|
60 | 60 | new = models.UserToPerm() |
|
61 | 61 | new.user = user |
|
62 | 62 | new.permission = get_by_key(models.Permission, _def) |
|
63 |
print |
|
|
63 | print('Setting default to %s' % _def) | |
|
64 | 64 | _SESSION().add(new) |
|
65 | 65 | _SESSION().commit() |
@@ -38,7 +38,7 b' def downgrade(migrate_engine):' | |||
|
38 | 38 | def fixups(models, _SESSION): |
|
39 | 39 | notify('Setting default renderer to rst') |
|
40 | 40 | for cs_comment in models.ChangesetComment.get_all(): |
|
41 |
print |
|
|
41 | print('comment_id %s renderer rst' % (cs_comment.comment_id)) | |
|
42 | 42 | cs_comment.renderer = 'rst' |
|
43 | 43 | _SESSION().add(cs_comment) |
|
44 | 44 | _SESSION().commit() |
@@ -59,7 +59,7 b' class DaemonLock(object):' | |||
|
59 | 59 | def _on_finalize(lock, debug): |
|
60 | 60 | if lock.held: |
|
61 | 61 | if debug: |
|
62 |
print |
|
|
62 | print('leck held finilazing and running lock.release()') | |
|
63 | 63 | lock.release() |
|
64 | 64 | |
|
65 | 65 | def lock(self): |
@@ -69,7 +69,7 b' class DaemonLock(object):' | |||
|
69 | 69 | """ |
|
70 | 70 | lockname = '%s' % (os.getpid()) |
|
71 | 71 | if self.debug: |
|
72 |
print |
|
|
72 | print('running lock') | |
|
73 | 73 | self.trylock() |
|
74 | 74 | self.makelock(lockname, self.pidfile) |
|
75 | 75 | return True |
@@ -77,7 +77,7 b' class DaemonLock(object):' | |||
|
77 | 77 | def trylock(self): |
|
78 | 78 | running_pid = False |
|
79 | 79 | if self.debug: |
|
80 |
print |
|
|
80 | print('checking for already running process') | |
|
81 | 81 | try: |
|
82 | 82 | with open(self.pidfile, 'r') as f: |
|
83 | 83 | try: |
@@ -97,13 +97,13 b' class DaemonLock(object):' | |||
|
97 | 97 | if exc.errno in (errno.ESRCH, errno.EPERM): |
|
98 | 98 |
print |
|
99 | 99 |
|
|
100 |
print |
|
|
100 | print("Removing lock file for the: %s" % running_pid) | |
|
101 | 101 | self.release() |
|
102 | 102 | else: |
|
103 | 103 | raise |
|
104 | 104 | else: |
|
105 |
print |
|
|
106 |
print |
|
|
105 | print("You already have an instance of the program running") | |
|
106 | print("It is running as process %s" % running_pid) | |
|
107 | 107 | raise LockHeld() |
|
108 | 108 | |
|
109 | 109 | except IOError as e: |
@@ -114,21 +114,21 b' class DaemonLock(object):' | |||
|
114 | 114 | """releases the pid by removing the pidfile |
|
115 | 115 | """ |
|
116 | 116 | if self.debug: |
|
117 |
print |
|
|
117 | print('trying to release the pidlock') | |
|
118 | 118 | |
|
119 | 119 | if self.callbackfn: |
|
120 | 120 | #execute callback function on release |
|
121 | 121 | if self.debug: |
|
122 |
print |
|
|
122 | print('executing callback function %s' % self.callbackfn) | |
|
123 | 123 | self.callbackfn() |
|
124 | 124 | try: |
|
125 | 125 | if self.debug: |
|
126 |
print |
|
|
126 | print('removing pidfile %s' % self.pidfile) | |
|
127 | 127 | os.remove(self.pidfile) |
|
128 | 128 | self.held = False |
|
129 | 129 | except OSError as e: |
|
130 | 130 | if self.debug: |
|
131 |
print |
|
|
131 | print('removing pidfile failed %s' % e) | |
|
132 | 132 | pass |
|
133 | 133 | |
|
134 | 134 | def makelock(self, lockname, pidfile): |
@@ -139,7 +139,7 b' class DaemonLock(object):' | |||
|
139 | 139 | :param pidfile: the file to write the pid in |
|
140 | 140 | """ |
|
141 | 141 | if self.debug: |
|
142 |
print |
|
|
142 | print('creating a file %s and pid: %s' % (pidfile, lockname)) | |
|
143 | 143 | |
|
144 | 144 | dir_, file_ = os.path.split(pidfile) |
|
145 | 145 | if not os.path.isdir(dir_): |
@@ -40,10 +40,10 b' def make_web_build_callback(filename):' | |||
|
40 | 40 | stdout = ''.join(stdout) |
|
41 | 41 | stderr = ''.join(stderr) |
|
42 | 42 | if stdout: |
|
43 |
print |
|
|
43 | print(stdout) | |
|
44 | 44 | if stderr: |
|
45 | 45 |
print |
|
46 |
print |
|
|
46 | print(stderr) | |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | MAXFD = 1024 |
@@ -53,7 +53,7 b' class TestINI(object):' | |||
|
53 | 53 | data. Example usage:: |
|
54 | 54 | |
|
55 | 55 | with TestINI('test.ini', [{'section':{'key':val'}]) as new_test_ini_path: |
|
56 |
print |
|
|
56 | print('paster server %s' % new_test_ini) | |
|
57 | 57 | """ |
|
58 | 58 | |
|
59 | 59 | def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT', |
@@ -194,7 +194,7 b' class TestSimpleSvnApp(object):' | |||
|
194 | 194 | ] |
|
195 | 195 | request_mock.assert_called_once_with( |
|
196 | 196 | self.environment['REQUEST_METHOD'], expected_url, |
|
197 | data=self.data, headers=expected_request_headers) | |
|
197 | data=self.data, headers=expected_request_headers, stream=False) | |
|
198 | 198 | response_mock.iter_content.assert_called_once_with(chunk_size=1024) |
|
199 | 199 | args, _ = start_response.call_args |
|
200 | 200 | assert args[0] == '200 OK' |
@@ -33,7 +33,7 b' class TestTokenizeString(object):' | |||
|
33 | 33 | import this |
|
34 | 34 | |
|
35 | 35 | var = 6 |
|
36 |
print |
|
|
36 | print("this") | |
|
37 | 37 | |
|
38 | 38 | ''' |
|
39 | 39 | |
@@ -58,10 +58,11 b' class TestTokenizeString(object):' | |||
|
58 | 58 | ('', u'\n'), |
|
59 | 59 | ('', u' '), |
|
60 | 60 | ('k', u'print'), |
|
61 |
('', |
|
|
61 | ('p', u'('), | |
|
62 | 62 |
('s2', |
|
63 | 63 |
('s2', |
|
64 | 64 |
('s2', |
|
65 | ('p', u')'), | |
|
65 | 66 | ('', u'\n'), |
|
66 | 67 | ('', u'\n'), |
|
67 | 68 | ('', u' ') |
@@ -73,7 +74,7 b' class TestTokenizeString(object):' | |||
|
73 | 74 | |
|
74 | 75 | assert tokens == [ |
|
75 | 76 | ('', |
|
76 |
u'\n import this\n\n var = 6\n print |
|
|
77 | u'\n import this\n\n var = 6\n print("this")\n\n ') | |
|
77 | 78 | ] |
|
78 | 79 | |
|
79 | 80 |
@@ -90,7 +90,7 b' def execute(*popenargs, **kwargs):' | |||
|
90 | 90 | cmd = kwargs.get("args") |
|
91 | 91 | if cmd is None: |
|
92 | 92 | cmd = popenargs[0] |
|
93 | print cmd, output, error | |
|
93 | print('{} {} {} '.format(cmd, output, error)) | |
|
94 | 94 | raise subprocess32.CalledProcessError(retcode, cmd, output=output) |
|
95 | 95 | return output |
|
96 | 96 | |
@@ -125,14 +125,14 b' class TestPerformanceBase(object):' | |||
|
125 | 125 | try: |
|
126 | 126 | self.test() |
|
127 | 127 | except Exception as error: |
|
128 |
print |
|
|
128 | print(error) | |
|
129 | 129 | finally: |
|
130 | 130 | self.cleanup() |
|
131 | 131 | |
|
132 |
print |
|
|
133 |
print |
|
|
134 |
print |
|
|
135 |
print |
|
|
132 | print('Clone time :{}'.format(self.clone_time)) | |
|
133 | print('Push time :{}'.format(mean(self.push_times))) | |
|
134 | print('Pull time :{}'.format(mean(self.pull_times))) | |
|
135 | print('Empty pull time:{}'.format(mean(self.empty_pull_times))) | |
|
136 | 136 | |
|
137 | 137 | return { |
|
138 | 138 | 'clone': self.clone_time, |
@@ -163,10 +163,10 b' class TestPerformanceBase(object):' | |||
|
163 | 163 | self.orig_repo, commits[self.skip_commits - 1], 'upstream') |
|
164 | 164 | commits = commits[self.skip_commits:self.max_commits] |
|
165 | 165 | |
|
166 |
print |
|
|
167 |
for i in |
|
|
166 | print('Working with %d commits' % len(commits)) | |
|
167 | for i in range(self.n_commits - 1, len(commits), self.n_commits): | |
|
168 | 168 | commit = commits[i] |
|
169 |
print |
|
|
169 | print('Processing commit %s (%d)' % (commit, i + 1)) | |
|
170 | 170 | self.push_times.append( |
|
171 | 171 | self.push(self.orig_repo, commit, 'upstream')) |
|
172 | 172 | self.check_remote_last_commit_is(commit, upstream_url) |
@@ -402,7 +402,7 b' def main(argv):' | |||
|
402 | 402 | '--api-key', dest='api_key', action='store', required=True, |
|
403 | 403 | help='The api key of RhodeCode') |
|
404 | 404 | options = parser.parse_args(argv[1:]) |
|
405 |
print |
|
|
405 | print(options) | |
|
406 | 406 | |
|
407 | 407 | test_config = { |
|
408 | 408 | 'python': { |
@@ -452,9 +452,9 b' def main(argv):' | |||
|
452 | 452 | test_config[test_name]['limit'], |
|
453 | 453 | test_config[test_name].get('skip', 0), |
|
454 | 454 | api_key) |
|
455 |
print |
|
|
456 |
print |
|
|
457 |
print |
|
|
455 | print('*' * 80) | |
|
456 | print('Running performance test: %s with size %d' % (test_name, size)) | |
|
457 | print('*' * 80) | |
|
458 | 458 | results[test_name][size] = test.run() |
|
459 | 459 | pprint.pprint(dict(results)) |
|
460 | 460 |
@@ -51,7 +51,7 b' def profile():' | |||
|
51 | 51 | try: |
|
52 | 52 | process = psutil.Process(config.pid) |
|
53 | 53 | except psutil.NoSuchProcess: |
|
54 |
print |
|
|
54 | print("Process {pid} does not exist!".format(pid=config.pid)) | |
|
55 | 55 | sys.exit(1) |
|
56 | 56 | |
|
57 | 57 | while True: |
@@ -105,7 +105,7 b' def process_stats(process):' | |||
|
105 | 105 | |
|
106 | 106 | def dump_stats(stats): |
|
107 | 107 | for sample in stats: |
|
108 |
print |
|
|
108 | print(json.dumps(sample)) | |
|
109 | 109 | |
|
110 | 110 | |
|
111 | 111 | class AppenlightClient(): |
@@ -43,7 +43,7 b' RC_WEBSITE = "http://localhost:5001/"' | |||
|
43 | 43 | |
|
44 | 44 | def get_file(prefix): |
|
45 | 45 | out_file = None |
|
46 |
for i in |
|
|
46 | for i in range(100): | |
|
47 | 47 | file_path = "%s_profile%.3d.csv" % (prefix, i) |
|
48 | 48 | if os.path.exists(file_path): |
|
49 | 49 | continue |
@@ -54,15 +54,15 b' def get_file(prefix):' | |||
|
54 | 54 | |
|
55 | 55 | |
|
56 | 56 | def dump_system(): |
|
57 |
print |
|
|
58 |
print |
|
|
59 | (psutil.cpu_count(), psutil.cpu_count(logical=False)) | |
|
60 |
print |
|
|
61 |
print |
|
|
62 |
print |
|
|
63 |
print |
|
|
64 |
print |
|
|
65 |
print |
|
|
57 | print("System Overview...") | |
|
58 | print("\nCPU Count: %d (%d real)" % | |
|
59 | (psutil.cpu_count(), psutil.cpu_count(logical=False))) | |
|
60 | print("\nDisk:") | |
|
61 | print(psutil.disk_usage(os.sep)) | |
|
62 | print("\nMemory:") | |
|
63 | print(psutil.virtual_memory()) | |
|
64 | print("\nMemory (swap):") | |
|
65 | print(psutil.swap_memory()) | |
|
66 | 66 | |
|
67 | 67 | |
|
68 | 68 | def count_dulwich_fds(proc): |
@@ -97,30 +97,30 b' def dump_process(pid, out_file):' | |||
|
97 | 97 | # Open output files |
|
98 | 98 | vcs_out = get_file("vcs") |
|
99 | 99 | if vcs_out is None: |
|
100 |
print |
|
|
100 | print("Unable to enumerate output file for VCS") | |
|
101 | 101 | sys.exit(1) |
|
102 | 102 | rc_out = get_file("rc") |
|
103 | 103 | if rc_out is None: |
|
104 |
print |
|
|
104 | print("Unable to enumerate output file for RC") | |
|
105 | 105 | sys.exit(1) |
|
106 | 106 | |
|
107 | 107 | # Show system information |
|
108 | 108 | dump_system() |
|
109 | 109 | |
|
110 |
print |
|
|
110 | print("\nStarting VCS...") | |
|
111 | 111 | vcs = psutil.Popen(["vcsserver"]) |
|
112 | 112 | time.sleep(1) |
|
113 | 113 | if not vcs.is_running(): |
|
114 |
print |
|
|
114 | print("VCS - Failed to start") | |
|
115 | 115 | sys.exit(1) |
|
116 |
print |
|
|
116 | print("VCS - Ok") | |
|
117 | 117 | |
|
118 |
print |
|
|
118 | print("\nStarting RhodeCode...") | |
|
119 | 119 | rc = psutil.Popen("RC_VCSSERVER_TEST_DISABLE=1 paster serve test.ini", |
|
120 | 120 | shell=True, stdin=subprocess32.PIPE) |
|
121 | 121 | time.sleep(1) |
|
122 | 122 | if not rc.is_running(): |
|
123 |
print |
|
|
123 | print("RC - Failed to start") | |
|
124 | 124 | vcs.terminate() |
|
125 | 125 | sys.exit(1) |
|
126 | 126 | |
@@ -132,19 +132,19 b' time.sleep(4)' | |||
|
132 | 132 | try: |
|
133 | 133 | urllib.urlopen(RC_WEBSITE) |
|
134 | 134 | except IOError: |
|
135 |
print |
|
|
135 | print("RC - Website not started") | |
|
136 | 136 | vcs.terminate() |
|
137 | 137 | sys.exit(1) |
|
138 |
print |
|
|
138 | print("RC - Ok") | |
|
139 | 139 | |
|
140 |
print |
|
|
140 | print("\nProfiling...\n%s\n" % ("-"*80)) | |
|
141 | 141 | while True: |
|
142 | 142 | try: |
|
143 | 143 | dump_process(vcs, vcs_out) |
|
144 | 144 | dump_process(rc, rc_out) |
|
145 | 145 | time.sleep(PROFILING_INTERVAL) |
|
146 | 146 | except Exception: |
|
147 |
print |
|
|
147 | print(traceback.format_exc()) | |
|
148 | 148 | break |
|
149 | 149 | |
|
150 | 150 | # Finalize the profiling |
@@ -56,14 +56,14 b' svn_pages = [' | |||
|
56 | 56 | |
|
57 | 57 | repeat = 10 |
|
58 | 58 | |
|
59 |
print |
|
|
59 | print("Repeating each URL x%d\n" % repeat) | |
|
60 | 60 | for page in pages: |
|
61 | 61 | url = "http://%s/%s" % (server, page) |
|
62 |
print |
|
|
62 | print(url) | |
|
63 | 63 | |
|
64 | 64 | stmt = "urllib2.urlopen('%s', timeout=120)" % url |
|
65 | 65 | t = timeit.Timer(stmt=stmt, setup="import urllib2") |
|
66 | 66 | |
|
67 | 67 | result = t.repeat(repeat=repeat, number=1) |
|
68 |
print |
|
|
69 | (min(result), max(result), sum(result)/len(result)) | |
|
68 | print("\t%.3f (min) - %.3f (max) - %.3f (avg)\n" % | |
|
69 | (min(result), max(result), sum(result)/len(result))) |
@@ -56,7 +56,7 b' if len(sys.argv) == 2:' | |||
|
56 | 56 | if not BASE_URI.endswith('/'): |
|
57 | 57 | BASE_URI += '/' |
|
58 | 58 | |
|
59 |
print |
|
|
59 | print('Crawling @ %s' % BASE_URI) | |
|
60 | 60 | BASE_URI += '%s' |
|
61 | 61 | PROJECT_PATH = jn('/', 'home', 'marcink', 'repos') |
|
62 | 62 | PROJECTS = [ |
@@ -104,16 +104,16 b' def test_changelog_walk(proj, pages=100)' | |||
|
104 | 104 | size = len(f.read()) |
|
105 | 105 | e = time.time() - s |
|
106 | 106 | total_time += e |
|
107 |
print |
|
|
107 | print('visited %s size:%s req:%s ms' % (full_uri, size, e)) | |
|
108 | 108 | |
|
109 |
print |
|
|
110 |
print |
|
|
109 | print('total_time {}'.format(total_time)) | |
|
110 | print('average on req {}'.format(total_time / float(pages))) | |
|
111 | 111 | |
|
112 | 112 | |
|
113 | 113 | def test_commit_walk(proj, limit=None): |
|
114 | 114 | repo, proj = _get_repo(proj) |
|
115 | 115 | |
|
116 |
print |
|
|
116 | print('processing', jn(PROJECT_PATH, proj)) | |
|
117 | 117 | total_time = 0 |
|
118 | 118 | |
|
119 | 119 | cnt = 0 |
@@ -124,22 +124,22 b' def test_commit_walk(proj, limit=None):' | |||
|
124 | 124 | break |
|
125 | 125 | |
|
126 | 126 | full_uri = (BASE_URI % raw_cs) |
|
127 |
print |
|
|
127 | print('%s visiting %s\%s' % (cnt, full_uri, i)) | |
|
128 | 128 | s = time.time() |
|
129 | 129 | f = o.open(full_uri) |
|
130 | 130 | size = len(f.read()) |
|
131 | 131 | e = time.time() - s |
|
132 | 132 | total_time += e |
|
133 |
print |
|
|
133 | print('%s visited %s\%s size:%s req:%s ms' % (cnt, full_uri, i, size, e)) | |
|
134 | 134 | |
|
135 |
print |
|
|
136 |
print |
|
|
135 | print('total_time {}'.format(total_time)) | |
|
136 | print('average on req {}'.format(total_time / float(cnt))) | |
|
137 | 137 | |
|
138 | 138 | |
|
139 | 139 | def test_files_walk(proj, limit=100): |
|
140 | 140 | repo, proj = _get_repo(proj) |
|
141 | 141 | |
|
142 |
print |
|
|
142 | print('processing {}'.format(jn(PROJECT_PATH, proj))) | |
|
143 | 143 | total_time = 0 |
|
144 | 144 | |
|
145 | 145 | paths_ = OrderedSet(['']) |
@@ -166,22 +166,22 b' def test_files_walk(proj, limit=100):' | |||
|
166 | 166 | |
|
167 | 167 | file_path = '/'.join((proj, 'files', 'tip', f)) |
|
168 | 168 | full_uri = (BASE_URI % file_path) |
|
169 |
print |
|
|
169 | print('%s visiting %s' % (cnt, full_uri)) | |
|
170 | 170 | s = time.time() |
|
171 | 171 | f = o.open(full_uri) |
|
172 | 172 | size = len(f.read()) |
|
173 | 173 | e = time.time() - s |
|
174 | 174 | total_time += e |
|
175 |
print |
|
|
175 | print('%s visited OK size:%s req:%s ms' % (cnt, size, e)) | |
|
176 | 176 | |
|
177 |
print |
|
|
178 |
print |
|
|
177 | print('total_time {}'.format(total_time)) | |
|
178 | print('average on req {}'.format(total_time / float(cnt))) | |
|
179 | 179 | |
|
180 | 180 | if __name__ == '__main__': |
|
181 | 181 | for path in PROJECTS: |
|
182 | 182 | repo = vcs.get_repo(jn(PROJECT_PATH, path)) |
|
183 | 183 | for i in range(PASES): |
|
184 |
print |
|
|
184 | print('PASS %s/%s' % (i, PASES)) | |
|
185 | 185 | test_changelog_walk(repo, pages=80) |
|
186 | 186 | test_commit_walk(repo, limit=100) |
|
187 | 187 | test_files_walk(repo, limit=100) |
General Comments 0
You need to be logged in to leave comments.
Login now