Show More
@@ -89,7 +89,6 b' class TestUpdatePullRequest(object):' | |||||
89 | pr_util.update_source_repository(head='c') |
|
89 | pr_util.update_source_repository(head='c') | |
90 | repo = pull_request.source_repo.scm_instance() |
|
90 | repo = pull_request.source_repo.scm_instance() | |
91 | commits = [x for x in repo.get_commits()] |
|
91 | commits = [x for x in repo.get_commits()] | |
92 | print commits |
|
|||
93 |
|
92 | |||
94 | added_commit_id = commits[-1].raw_id # c commit |
|
93 | added_commit_id = commits[-1].raw_id # c commit | |
95 | common_commit_id = commits[1].raw_id # b commit is common ancestor |
|
94 | common_commit_id = commits[1].raw_id # b commit is common ancestor |
@@ -138,7 +138,7 b' class DbManage(object):' | |||||
138 | DatabaseNotControlledError |
|
138 | DatabaseNotControlledError | |
139 |
|
139 | |||
140 | if 'sqlite' in self.dburi: |
|
140 | if 'sqlite' in self.dburi: | |
141 |
print |
|
141 | print( | |
142 | '********************** WARNING **********************\n' |
|
142 | '********************** WARNING **********************\n' | |
143 | 'Make sure your version of sqlite is at least 3.7.X. \n' |
|
143 | 'Make sure your version of sqlite is at least 3.7.X. \n' | |
144 | 'Earlier versions are known to fail on some migrations\n' |
|
144 | 'Earlier versions are known to fail on some migrations\n' |
@@ -13,12 +13,12 b' log = logging.getLogger(__name__)' | |||||
13 |
|
13 | |||
14 | def usage(): |
|
14 | def usage(): | |
15 | """Gives usage information.""" |
|
15 | """Gives usage information.""" | |
16 |
print |
|
16 | print("""Usage: %(prog)s repository-to-migrate | |
17 |
|
17 | |||
18 | Upgrade your repository to the new flat format. |
|
18 | Upgrade your repository to the new flat format. | |
19 |
|
19 | |||
20 | NOTE: You should probably make a backup before running this. |
|
20 | NOTE: You should probably make a backup before running this. | |
21 | """ % {'prog': sys.argv[0]} |
|
21 | """ % {'prog': sys.argv[0]}) | |
22 |
|
22 | |||
23 | sys.exit(1) |
|
23 | sys.exit(1) | |
24 |
|
24 |
@@ -33,7 +33,7 b' def upgrade(migrate_engine):' | |||||
33 | old_cons = UniqueConstraint('user_id', 'repository_id', table=tbl) |
|
33 | old_cons = UniqueConstraint('user_id', 'repository_id', table=tbl) | |
34 | else: |
|
34 | else: | |
35 | # sqlite doesn't support dropping constraints... |
|
35 | # sqlite doesn't support dropping constraints... | |
36 |
print |
|
36 | print("""Please manually drop UniqueConstraint('user_id', 'repository_id')""") | |
37 |
|
37 | |||
38 | if old_cons: |
|
38 | if old_cons: | |
39 | try: |
|
39 | try: | |
@@ -41,7 +41,7 b' def upgrade(migrate_engine):' | |||||
41 | except Exception as e: |
|
41 | except Exception as e: | |
42 | # we don't care if this fails really... better to pass migration than |
|
42 | # we don't care if this fails really... better to pass migration than | |
43 | # leave this in intermidiate state |
|
43 | # leave this in intermidiate state | |
44 |
print |
|
44 | print('Failed to remove Unique for user_id, repository_id reason %s' % e) | |
45 |
|
45 | |||
46 |
|
46 | |||
47 | #========================================================================== |
|
47 | #========================================================================== | |
@@ -61,7 +61,7 b' def upgrade(migrate_engine):' | |||||
61 | old_cons = UniqueConstraint('group_id', 'permission_id', table=tbl, name='group_to_perm_group_id_permission_id_key') |
|
61 | old_cons = UniqueConstraint('group_id', 'permission_id', table=tbl, name='group_to_perm_group_id_permission_id_key') | |
62 | else: |
|
62 | else: | |
63 | # sqlite doesn't support dropping constraints... |
|
63 | # sqlite doesn't support dropping constraints... | |
64 |
print |
|
64 | print("""Please manually drop UniqueConstraint('group_id', 'permission_id')""") | |
65 |
|
65 | |||
66 | if old_cons: |
|
66 | if old_cons: | |
67 | try: |
|
67 | try: | |
@@ -69,7 +69,7 b' def upgrade(migrate_engine):' | |||||
69 | except Exception as e: |
|
69 | except Exception as e: | |
70 | # we don't care if this fails really... better to pass migration than |
|
70 | # we don't care if this fails really... better to pass migration than | |
71 | # leave this in intermidiate state |
|
71 | # leave this in intermidiate state | |
72 |
print |
|
72 | print('Failed to remove Unique for user_id, repository_id reason %s' % e) | |
73 |
|
73 | |||
74 | return |
|
74 | return | |
75 |
|
75 |
@@ -80,7 +80,7 b' def fixups(models, _SESSION):' | |||||
80 | new_perm = models.Permission() |
|
80 | new_perm = models.Permission() | |
81 | new_perm.permission_name = p[0] |
|
81 | new_perm.permission_name = p[0] | |
82 | new_perm.permission_longname = p[0] #translation err with p[1] |
|
82 | new_perm.permission_longname = p[0] #translation err with p[1] | |
83 |
print |
|
83 | print('Creating new permission %s' % p[0]) | |
84 | _SESSION().add(new_perm) |
|
84 | _SESSION().add(new_perm) | |
85 |
|
85 | |||
86 | _SESSION().commit() |
|
86 | _SESSION().commit() |
@@ -44,6 +44,6 b' def fixups(models, _SESSION):' | |||||
44 | notify('Upgrading repositories Caches') |
|
44 | notify('Upgrading repositories Caches') | |
45 | repositories = models.Repository.getAll() |
|
45 | repositories = models.Repository.getAll() | |
46 | for repo in repositories: |
|
46 | for repo in repositories: | |
47 |
print |
|
47 | print(repo) | |
48 | repo.update_commit_cache() |
|
48 | repo.update_commit_cache() | |
49 | _SESSION().commit() |
|
49 | _SESSION().commit() |
@@ -73,7 +73,7 b' def fixups(models, _SESSION):' | |||||
73 | repo_store_path = get_repos_location(models.RhodeCodeUi) |
|
73 | repo_store_path = get_repos_location(models.RhodeCodeUi) | |
74 | _store = os.path.join(repo_store_path, '.cache', 'largefiles') |
|
74 | _store = os.path.join(repo_store_path, '.cache', 'largefiles') | |
75 | notify('Setting largefiles usercache') |
|
75 | notify('Setting largefiles usercache') | |
76 |
print |
|
76 | print(_store) | |
77 |
|
77 | |||
78 | if not models.RhodeCodeUi.query().filter( |
|
78 | if not models.RhodeCodeUi.query().filter( | |
79 | models.RhodeCodeUi.ui_key == 'usercache').scalar(): |
|
79 | models.RhodeCodeUi.ui_key == 'usercache').scalar(): |
@@ -39,7 +39,7 b' def fixups(models, _SESSION):' | |||||
39 | notify('fixing new schema for landing_rev') |
|
39 | notify('fixing new schema for landing_rev') | |
40 |
|
40 | |||
41 | for repo in models.Repository.get_all(): |
|
41 | for repo in models.Repository.get_all(): | |
42 |
print |
|
42 | print(u'repo %s old landing rev is: %s' % (repo, repo.landing_rev)) | |
43 | _rev = repo.landing_rev[1] |
|
43 | _rev = repo.landing_rev[1] | |
44 | _rev_type = 'rev' # default |
|
44 | _rev_type = 'rev' # default | |
45 |
|
45 | |||
@@ -58,13 +58,13 b' def fixups(models, _SESSION):' | |||||
58 | elif _rev in known_bookmarks: |
|
58 | elif _rev in known_bookmarks: | |
59 | _rev_type = 'book' |
|
59 | _rev_type = 'book' | |
60 | except Exception as e: |
|
60 | except Exception as e: | |
61 |
print |
|
61 | print(e) | |
62 |
print |
|
62 | print('continue...') | |
63 | #we don't want any error to break the process |
|
63 | #we don't want any error to break the process | |
64 | pass |
|
64 | pass | |
65 |
|
65 | |||
66 | _new_landing_rev = '%s:%s' % (_rev_type, _rev) |
|
66 | _new_landing_rev = '%s:%s' % (_rev_type, _rev) | |
67 |
print |
|
67 | print(u'setting to %s' % _new_landing_rev) | |
68 | repo.landing_rev = _new_landing_rev |
|
68 | repo.landing_rev = _new_landing_rev | |
69 | _SESSION().add(repo) |
|
69 | _SESSION().add(repo) | |
70 | _SESSION().commit() |
|
70 | _SESSION().commit() |
@@ -47,7 +47,7 b' def fixups(models, _SESSION):' | |||||
47 | new_perm = models.Permission() |
|
47 | new_perm = models.Permission() | |
48 | new_perm.permission_name = p[0] |
|
48 | new_perm.permission_name = p[0] | |
49 | new_perm.permission_longname = p[0] #translation err with p[1] |
|
49 | new_perm.permission_longname = p[0] #translation err with p[1] | |
50 |
print |
|
50 | print('Creating new permission %s' % p[0]) | |
51 | _SESSION().add(new_perm) |
|
51 | _SESSION().add(new_perm) | |
52 |
|
52 | |||
53 | _SESSION().commit() |
|
53 | _SESSION().commit() | |
@@ -60,6 +60,6 b' def fixups(models, _SESSION):' | |||||
60 | new = models.UserToPerm() |
|
60 | new = models.UserToPerm() | |
61 | new.user = user |
|
61 | new.user = user | |
62 | new.permission = get_by_key(models.Permission, _def) |
|
62 | new.permission = get_by_key(models.Permission, _def) | |
63 |
print |
|
63 | print('Setting default to %s' % _def) | |
64 | _SESSION().add(new) |
|
64 | _SESSION().add(new) | |
65 | _SESSION().commit() |
|
65 | _SESSION().commit() |
@@ -38,7 +38,7 b' def downgrade(migrate_engine):' | |||||
38 | def fixups(models, _SESSION): |
|
38 | def fixups(models, _SESSION): | |
39 | notify('Setting default renderer to rst') |
|
39 | notify('Setting default renderer to rst') | |
40 | for cs_comment in models.ChangesetComment.get_all(): |
|
40 | for cs_comment in models.ChangesetComment.get_all(): | |
41 |
print |
|
41 | print('comment_id %s renderer rst' % (cs_comment.comment_id)) | |
42 | cs_comment.renderer = 'rst' |
|
42 | cs_comment.renderer = 'rst' | |
43 | _SESSION().add(cs_comment) |
|
43 | _SESSION().add(cs_comment) | |
44 | _SESSION().commit() |
|
44 | _SESSION().commit() |
@@ -59,7 +59,7 b' class DaemonLock(object):' | |||||
59 | def _on_finalize(lock, debug): |
|
59 | def _on_finalize(lock, debug): | |
60 | if lock.held: |
|
60 | if lock.held: | |
61 | if debug: |
|
61 | if debug: | |
62 |
print |
|
62 | print('leck held finilazing and running lock.release()') | |
63 | lock.release() |
|
63 | lock.release() | |
64 |
|
64 | |||
65 | def lock(self): |
|
65 | def lock(self): | |
@@ -69,7 +69,7 b' class DaemonLock(object):' | |||||
69 | """ |
|
69 | """ | |
70 | lockname = '%s' % (os.getpid()) |
|
70 | lockname = '%s' % (os.getpid()) | |
71 | if self.debug: |
|
71 | if self.debug: | |
72 |
print |
|
72 | print('running lock') | |
73 | self.trylock() |
|
73 | self.trylock() | |
74 | self.makelock(lockname, self.pidfile) |
|
74 | self.makelock(lockname, self.pidfile) | |
75 | return True |
|
75 | return True | |
@@ -77,7 +77,7 b' class DaemonLock(object):' | |||||
77 | def trylock(self): |
|
77 | def trylock(self): | |
78 | running_pid = False |
|
78 | running_pid = False | |
79 | if self.debug: |
|
79 | if self.debug: | |
80 |
print |
|
80 | print('checking for already running process') | |
81 | try: |
|
81 | try: | |
82 | with open(self.pidfile, 'r') as f: |
|
82 | with open(self.pidfile, 'r') as f: | |
83 | try: |
|
83 | try: | |
@@ -86,8 +86,8 b' class DaemonLock(object):' | |||||
86 | running_pid = -1 |
|
86 | running_pid = -1 | |
87 |
|
87 | |||
88 | if self.debug: |
|
88 | if self.debug: | |
89 |
print |
|
89 | print('lock file present running_pid: %s, ' | |
90 |
|
|
90 | 'checking for execution' % (running_pid,)) | |
91 | # Now we check the PID from lock file matches to the current |
|
91 | # Now we check the PID from lock file matches to the current | |
92 | # process PID |
|
92 | # process PID | |
93 | if running_pid: |
|
93 | if running_pid: | |
@@ -95,15 +95,15 b' class DaemonLock(object):' | |||||
95 | kill(running_pid, 0) |
|
95 | kill(running_pid, 0) | |
96 | except OSError as exc: |
|
96 | except OSError as exc: | |
97 | if exc.errno in (errno.ESRCH, errno.EPERM): |
|
97 | if exc.errno in (errno.ESRCH, errno.EPERM): | |
98 |
print |
|
98 | print("Lock File is there but" | |
99 |
|
|
99 | " the program is not running") | |
100 |
print |
|
100 | print("Removing lock file for the: %s" % running_pid) | |
101 | self.release() |
|
101 | self.release() | |
102 | else: |
|
102 | else: | |
103 | raise |
|
103 | raise | |
104 | else: |
|
104 | else: | |
105 |
print |
|
105 | print("You already have an instance of the program running") | |
106 |
print |
|
106 | print("It is running as process %s" % running_pid) | |
107 | raise LockHeld() |
|
107 | raise LockHeld() | |
108 |
|
108 | |||
109 | except IOError as e: |
|
109 | except IOError as e: | |
@@ -114,21 +114,21 b' class DaemonLock(object):' | |||||
114 | """releases the pid by removing the pidfile |
|
114 | """releases the pid by removing the pidfile | |
115 | """ |
|
115 | """ | |
116 | if self.debug: |
|
116 | if self.debug: | |
117 |
print |
|
117 | print('trying to release the pidlock') | |
118 |
|
118 | |||
119 | if self.callbackfn: |
|
119 | if self.callbackfn: | |
120 | #execute callback function on release |
|
120 | # execute callback function on release | |
121 | if self.debug: |
|
121 | if self.debug: | |
122 |
print |
|
122 | print('executing callback function %s' % self.callbackfn) | |
123 | self.callbackfn() |
|
123 | self.callbackfn() | |
124 | try: |
|
124 | try: | |
125 | if self.debug: |
|
125 | if self.debug: | |
126 |
print |
|
126 | print('removing pidfile %s' % self.pidfile) | |
127 | os.remove(self.pidfile) |
|
127 | os.remove(self.pidfile) | |
128 | self.held = False |
|
128 | self.held = False | |
129 | except OSError as e: |
|
129 | except OSError as e: | |
130 | if self.debug: |
|
130 | if self.debug: | |
131 |
print |
|
131 | print('removing pidfile failed %s' % e) | |
132 | pass |
|
132 | pass | |
133 |
|
133 | |||
134 | def makelock(self, lockname, pidfile): |
|
134 | def makelock(self, lockname, pidfile): | |
@@ -139,7 +139,7 b' class DaemonLock(object):' | |||||
139 | :param pidfile: the file to write the pid in |
|
139 | :param pidfile: the file to write the pid in | |
140 | """ |
|
140 | """ | |
141 | if self.debug: |
|
141 | if self.debug: | |
142 |
print |
|
142 | print('creating a file %s and pid: %s' % (pidfile, lockname)) | |
143 |
|
143 | |||
144 | dir_, file_ = os.path.split(pidfile) |
|
144 | dir_, file_ = os.path.split(pidfile) | |
145 | if not os.path.isdir(dir_): |
|
145 | if not os.path.isdir(dir_): |
@@ -40,10 +40,10 b' def make_web_build_callback(filename):' | |||||
40 | stdout = ''.join(stdout) |
|
40 | stdout = ''.join(stdout) | |
41 | stderr = ''.join(stderr) |
|
41 | stderr = ''.join(stderr) | |
42 | if stdout: |
|
42 | if stdout: | |
43 |
print |
|
43 | print(stdout) | |
44 | if stderr: |
|
44 | if stderr: | |
45 |
print |
|
45 | print('%s %s %s' % ('-' * 20, 'ERRORS', '-' * 20)) | |
46 |
print |
|
46 | print(stderr) | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | MAXFD = 1024 |
|
49 | MAXFD = 1024 |
@@ -53,7 +53,7 b' class TestINI(object):' | |||||
53 | data. Example usage:: |
|
53 | data. Example usage:: | |
54 |
|
54 | |||
55 | with TestINI('test.ini', [{'section':{'key':val'}]) as new_test_ini_path: |
|
55 | with TestINI('test.ini', [{'section':{'key':val'}]) as new_test_ini_path: | |
56 |
print |
|
56 | print('paster server %s' % new_test_ini) | |
57 | """ |
|
57 | """ | |
58 |
|
58 | |||
59 | def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT', |
|
59 | def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT', |
@@ -194,7 +194,7 b' class TestSimpleSvnApp(object):' | |||||
194 | ] |
|
194 | ] | |
195 | request_mock.assert_called_once_with( |
|
195 | request_mock.assert_called_once_with( | |
196 | self.environment['REQUEST_METHOD'], expected_url, |
|
196 | self.environment['REQUEST_METHOD'], expected_url, | |
197 | data=self.data, headers=expected_request_headers) |
|
197 | data=self.data, headers=expected_request_headers, stream=False) | |
198 | response_mock.iter_content.assert_called_once_with(chunk_size=1024) |
|
198 | response_mock.iter_content.assert_called_once_with(chunk_size=1024) | |
199 | args, _ = start_response.call_args |
|
199 | args, _ = start_response.call_args | |
200 | assert args[0] == '200 OK' |
|
200 | assert args[0] == '200 OK' |
@@ -33,7 +33,7 b' class TestTokenizeString(object):' | |||||
33 | import this |
|
33 | import this | |
34 |
|
34 | |||
35 | var = 6 |
|
35 | var = 6 | |
36 |
print |
|
36 | print("this") | |
37 |
|
37 | |||
38 | ''' |
|
38 | ''' | |
39 |
|
39 | |||
@@ -58,10 +58,11 b' class TestTokenizeString(object):' | |||||
58 | ('', u'\n'), |
|
58 | ('', u'\n'), | |
59 | ('', u' '), |
|
59 | ('', u' '), | |
60 | ('k', u'print'), |
|
60 | ('k', u'print'), | |
61 |
('', |
|
61 | ('p', u'('), | |
62 |
('s2', |
|
62 | ('s2', u'"'), | |
63 |
('s2', |
|
63 | ('s2', u'this'), | |
64 |
('s2', |
|
64 | ('s2', u'"'), | |
|
65 | ('p', u')'), | |||
65 | ('', u'\n'), |
|
66 | ('', u'\n'), | |
66 | ('', u'\n'), |
|
67 | ('', u'\n'), | |
67 | ('', u' ') |
|
68 | ('', u' ') | |
@@ -73,7 +74,7 b' class TestTokenizeString(object):' | |||||
73 |
|
74 | |||
74 | assert tokens == [ |
|
75 | assert tokens == [ | |
75 | ('', |
|
76 | ('', | |
76 |
u'\n import this\n\n var = 6\n print |
|
77 | u'\n import this\n\n var = 6\n print("this")\n\n ') | |
77 | ] |
|
78 | ] | |
78 |
|
79 | |||
79 |
|
80 |
@@ -90,7 +90,7 b' def execute(*popenargs, **kwargs):' | |||||
90 | cmd = kwargs.get("args") |
|
90 | cmd = kwargs.get("args") | |
91 | if cmd is None: |
|
91 | if cmd is None: | |
92 | cmd = popenargs[0] |
|
92 | cmd = popenargs[0] | |
93 | print cmd, output, error |
|
93 | print('{} {} {} '.format(cmd, output, error)) | |
94 | raise subprocess32.CalledProcessError(retcode, cmd, output=output) |
|
94 | raise subprocess32.CalledProcessError(retcode, cmd, output=output) | |
95 | return output |
|
95 | return output | |
96 |
|
96 | |||
@@ -125,14 +125,14 b' class TestPerformanceBase(object):' | |||||
125 | try: |
|
125 | try: | |
126 | self.test() |
|
126 | self.test() | |
127 | except Exception as error: |
|
127 | except Exception as error: | |
128 |
print |
|
128 | print(error) | |
129 | finally: |
|
129 | finally: | |
130 | self.cleanup() |
|
130 | self.cleanup() | |
131 |
|
131 | |||
132 |
print |
|
132 | print('Clone time :{}'.format(self.clone_time)) | |
133 |
print |
|
133 | print('Push time :{}'.format(mean(self.push_times))) | |
134 |
print |
|
134 | print('Pull time :{}'.format(mean(self.pull_times))) | |
135 |
print |
|
135 | print('Empty pull time:{}'.format(mean(self.empty_pull_times))) | |
136 |
|
136 | |||
137 | return { |
|
137 | return { | |
138 | 'clone': self.clone_time, |
|
138 | 'clone': self.clone_time, | |
@@ -163,10 +163,10 b' class TestPerformanceBase(object):' | |||||
163 | self.orig_repo, commits[self.skip_commits - 1], 'upstream') |
|
163 | self.orig_repo, commits[self.skip_commits - 1], 'upstream') | |
164 | commits = commits[self.skip_commits:self.max_commits] |
|
164 | commits = commits[self.skip_commits:self.max_commits] | |
165 |
|
165 | |||
166 |
print |
|
166 | print('Working with %d commits' % len(commits)) | |
167 |
for i in |
|
167 | for i in range(self.n_commits - 1, len(commits), self.n_commits): | |
168 | commit = commits[i] |
|
168 | commit = commits[i] | |
169 |
print |
|
169 | print('Processing commit %s (%d)' % (commit, i + 1)) | |
170 | self.push_times.append( |
|
170 | self.push_times.append( | |
171 | self.push(self.orig_repo, commit, 'upstream')) |
|
171 | self.push(self.orig_repo, commit, 'upstream')) | |
172 | self.check_remote_last_commit_is(commit, upstream_url) |
|
172 | self.check_remote_last_commit_is(commit, upstream_url) | |
@@ -402,7 +402,7 b' def main(argv):' | |||||
402 | '--api-key', dest='api_key', action='store', required=True, |
|
402 | '--api-key', dest='api_key', action='store', required=True, | |
403 | help='The api key of RhodeCode') |
|
403 | help='The api key of RhodeCode') | |
404 | options = parser.parse_args(argv[1:]) |
|
404 | options = parser.parse_args(argv[1:]) | |
405 |
print |
|
405 | print(options) | |
406 |
|
406 | |||
407 | test_config = { |
|
407 | test_config = { | |
408 | 'python': { |
|
408 | 'python': { | |
@@ -434,8 +434,8 b' def main(argv):' | |||||
434 | if test_names == ['all']: |
|
434 | if test_names == ['all']: | |
435 | test_names = test_config.keys() |
|
435 | test_names = test_config.keys() | |
436 | if not set(test_names) <= set(test_config.keys()): |
|
436 | if not set(test_names) <= set(test_config.keys()): | |
437 |
print |
|
437 | print('Invalid tests: only %s are valid but specified %s' % | |
438 |
|
|
438 | (test_config.keys(), test_names)) | |
439 | return 1 |
|
439 | return 1 | |
440 |
|
440 | |||
441 | sizes = options.sizes.split(',') |
|
441 | sizes = options.sizes.split(',') | |
@@ -452,9 +452,9 b' def main(argv):' | |||||
452 | test_config[test_name]['limit'], |
|
452 | test_config[test_name]['limit'], | |
453 | test_config[test_name].get('skip', 0), |
|
453 | test_config[test_name].get('skip', 0), | |
454 | api_key) |
|
454 | api_key) | |
455 |
print |
|
455 | print('*' * 80) | |
456 |
print |
|
456 | print('Running performance test: %s with size %d' % (test_name, size)) | |
457 |
print |
|
457 | print('*' * 80) | |
458 | results[test_name][size] = test.run() |
|
458 | results[test_name][size] = test.run() | |
459 | pprint.pprint(dict(results)) |
|
459 | pprint.pprint(dict(results)) | |
460 |
|
460 |
@@ -51,7 +51,7 b' def profile():' | |||||
51 | try: |
|
51 | try: | |
52 | process = psutil.Process(config.pid) |
|
52 | process = psutil.Process(config.pid) | |
53 | except psutil.NoSuchProcess: |
|
53 | except psutil.NoSuchProcess: | |
54 |
print |
|
54 | print("Process {pid} does not exist!".format(pid=config.pid)) | |
55 | sys.exit(1) |
|
55 | sys.exit(1) | |
56 |
|
56 | |||
57 | while True: |
|
57 | while True: | |
@@ -105,7 +105,7 b' def process_stats(process):' | |||||
105 |
|
105 | |||
106 | def dump_stats(stats): |
|
106 | def dump_stats(stats): | |
107 | for sample in stats: |
|
107 | for sample in stats: | |
108 |
print |
|
108 | print(json.dumps(sample)) | |
109 |
|
109 | |||
110 |
|
110 | |||
111 | class AppenlightClient(): |
|
111 | class AppenlightClient(): |
@@ -43,7 +43,7 b' RC_WEBSITE = "http://localhost:5001/"' | |||||
43 |
|
43 | |||
44 | def get_file(prefix): |
|
44 | def get_file(prefix): | |
45 | out_file = None |
|
45 | out_file = None | |
46 |
for i in |
|
46 | for i in range(100): | |
47 | file_path = "%s_profile%.3d.csv" % (prefix, i) |
|
47 | file_path = "%s_profile%.3d.csv" % (prefix, i) | |
48 | if os.path.exists(file_path): |
|
48 | if os.path.exists(file_path): | |
49 | continue |
|
49 | continue | |
@@ -54,15 +54,15 b' def get_file(prefix):' | |||||
54 |
|
54 | |||
55 |
|
55 | |||
56 | def dump_system(): |
|
56 | def dump_system(): | |
57 |
print |
|
57 | print("System Overview...") | |
58 |
print |
|
58 | print("\nCPU Count: %d (%d real)" % | |
59 | (psutil.cpu_count(), psutil.cpu_count(logical=False)) |
|
59 | (psutil.cpu_count(), psutil.cpu_count(logical=False))) | |
60 |
print |
|
60 | print("\nDisk:") | |
61 |
print |
|
61 | print(psutil.disk_usage(os.sep)) | |
62 |
print |
|
62 | print("\nMemory:") | |
63 |
print |
|
63 | print(psutil.virtual_memory()) | |
64 |
print |
|
64 | print("\nMemory (swap):") | |
65 |
print |
|
65 | print(psutil.swap_memory()) | |
66 |
|
66 | |||
67 |
|
67 | |||
68 | def count_dulwich_fds(proc): |
|
68 | def count_dulwich_fds(proc): | |
@@ -97,30 +97,30 b' def dump_process(pid, out_file):' | |||||
97 | # Open output files |
|
97 | # Open output files | |
98 | vcs_out = get_file("vcs") |
|
98 | vcs_out = get_file("vcs") | |
99 | if vcs_out is None: |
|
99 | if vcs_out is None: | |
100 |
print |
|
100 | print("Unable to enumerate output file for VCS") | |
101 | sys.exit(1) |
|
101 | sys.exit(1) | |
102 | rc_out = get_file("rc") |
|
102 | rc_out = get_file("rc") | |
103 | if rc_out is None: |
|
103 | if rc_out is None: | |
104 |
print |
|
104 | print("Unable to enumerate output file for RC") | |
105 | sys.exit(1) |
|
105 | sys.exit(1) | |
106 |
|
106 | |||
107 | # Show system information |
|
107 | # Show system information | |
108 | dump_system() |
|
108 | dump_system() | |
109 |
|
109 | |||
110 |
print |
|
110 | print("\nStarting VCS...") | |
111 | vcs = psutil.Popen(["vcsserver"]) |
|
111 | vcs = psutil.Popen(["vcsserver"]) | |
112 | time.sleep(1) |
|
112 | time.sleep(1) | |
113 | if not vcs.is_running(): |
|
113 | if not vcs.is_running(): | |
114 |
print |
|
114 | print("VCS - Failed to start") | |
115 | sys.exit(1) |
|
115 | sys.exit(1) | |
116 |
print |
|
116 | print("VCS - Ok") | |
117 |
|
117 | |||
118 |
print |
|
118 | print("\nStarting RhodeCode...") | |
119 | rc = psutil.Popen("RC_VCSSERVER_TEST_DISABLE=1 paster serve test.ini", |
|
119 | rc = psutil.Popen("RC_VCSSERVER_TEST_DISABLE=1 paster serve test.ini", | |
120 | shell=True, stdin=subprocess32.PIPE) |
|
120 | shell=True, stdin=subprocess32.PIPE) | |
121 | time.sleep(1) |
|
121 | time.sleep(1) | |
122 | if not rc.is_running(): |
|
122 | if not rc.is_running(): | |
123 |
print |
|
123 | print("RC - Failed to start") | |
124 | vcs.terminate() |
|
124 | vcs.terminate() | |
125 | sys.exit(1) |
|
125 | sys.exit(1) | |
126 |
|
126 | |||
@@ -132,19 +132,19 b' time.sleep(4)' | |||||
132 | try: |
|
132 | try: | |
133 | urllib.urlopen(RC_WEBSITE) |
|
133 | urllib.urlopen(RC_WEBSITE) | |
134 | except IOError: |
|
134 | except IOError: | |
135 |
print |
|
135 | print("RC - Website not started") | |
136 | vcs.terminate() |
|
136 | vcs.terminate() | |
137 | sys.exit(1) |
|
137 | sys.exit(1) | |
138 |
print |
|
138 | print("RC - Ok") | |
139 |
|
139 | |||
140 |
print |
|
140 | print("\nProfiling...\n%s\n" % ("-"*80)) | |
141 | while True: |
|
141 | while True: | |
142 | try: |
|
142 | try: | |
143 | dump_process(vcs, vcs_out) |
|
143 | dump_process(vcs, vcs_out) | |
144 | dump_process(rc, rc_out) |
|
144 | dump_process(rc, rc_out) | |
145 | time.sleep(PROFILING_INTERVAL) |
|
145 | time.sleep(PROFILING_INTERVAL) | |
146 | except Exception: |
|
146 | except Exception: | |
147 |
print |
|
147 | print(traceback.format_exc()) | |
148 | break |
|
148 | break | |
149 |
|
149 | |||
150 | # Finalize the profiling |
|
150 | # Finalize the profiling |
@@ -56,14 +56,14 b' svn_pages = [' | |||||
56 |
|
56 | |||
57 | repeat = 10 |
|
57 | repeat = 10 | |
58 |
|
58 | |||
59 |
print |
|
59 | print("Repeating each URL x%d\n" % repeat) | |
60 | for page in pages: |
|
60 | for page in pages: | |
61 | url = "http://%s/%s" % (server, page) |
|
61 | url = "http://%s/%s" % (server, page) | |
62 |
print |
|
62 | print(url) | |
63 |
|
63 | |||
64 | stmt = "urllib2.urlopen('%s', timeout=120)" % url |
|
64 | stmt = "urllib2.urlopen('%s', timeout=120)" % url | |
65 | t = timeit.Timer(stmt=stmt, setup="import urllib2") |
|
65 | t = timeit.Timer(stmt=stmt, setup="import urllib2") | |
66 |
|
66 | |||
67 | result = t.repeat(repeat=repeat, number=1) |
|
67 | result = t.repeat(repeat=repeat, number=1) | |
68 |
print |
|
68 | print("\t%.3f (min) - %.3f (max) - %.3f (avg)\n" % | |
69 | (min(result), max(result), sum(result)/len(result)) |
|
69 | (min(result), max(result), sum(result)/len(result))) |
@@ -56,7 +56,7 b' if len(sys.argv) == 2:' | |||||
56 | if not BASE_URI.endswith('/'): |
|
56 | if not BASE_URI.endswith('/'): | |
57 | BASE_URI += '/' |
|
57 | BASE_URI += '/' | |
58 |
|
58 | |||
59 |
print |
|
59 | print('Crawling @ %s' % BASE_URI) | |
60 | BASE_URI += '%s' |
|
60 | BASE_URI += '%s' | |
61 | PROJECT_PATH = jn('/', 'home', 'marcink', 'repos') |
|
61 | PROJECT_PATH = jn('/', 'home', 'marcink', 'repos') | |
62 | PROJECTS = [ |
|
62 | PROJECTS = [ | |
@@ -104,16 +104,16 b' def test_changelog_walk(proj, pages=100)' | |||||
104 | size = len(f.read()) |
|
104 | size = len(f.read()) | |
105 | e = time.time() - s |
|
105 | e = time.time() - s | |
106 | total_time += e |
|
106 | total_time += e | |
107 |
print |
|
107 | print('visited %s size:%s req:%s ms' % (full_uri, size, e)) | |
108 |
|
108 | |||
109 |
print |
|
109 | print('total_time {}'.format(total_time)) | |
110 |
print |
|
110 | print('average on req {}'.format(total_time / float(pages))) | |
111 |
|
111 | |||
112 |
|
112 | |||
113 | def test_commit_walk(proj, limit=None): |
|
113 | def test_commit_walk(proj, limit=None): | |
114 | repo, proj = _get_repo(proj) |
|
114 | repo, proj = _get_repo(proj) | |
115 |
|
115 | |||
116 |
print |
|
116 | print('processing', jn(PROJECT_PATH, proj)) | |
117 | total_time = 0 |
|
117 | total_time = 0 | |
118 |
|
118 | |||
119 | cnt = 0 |
|
119 | cnt = 0 | |
@@ -124,22 +124,22 b' def test_commit_walk(proj, limit=None):' | |||||
124 | break |
|
124 | break | |
125 |
|
125 | |||
126 | full_uri = (BASE_URI % raw_cs) |
|
126 | full_uri = (BASE_URI % raw_cs) | |
127 |
print |
|
127 | print('%s visiting %s\%s' % (cnt, full_uri, i)) | |
128 | s = time.time() |
|
128 | s = time.time() | |
129 | f = o.open(full_uri) |
|
129 | f = o.open(full_uri) | |
130 | size = len(f.read()) |
|
130 | size = len(f.read()) | |
131 | e = time.time() - s |
|
131 | e = time.time() - s | |
132 | total_time += e |
|
132 | total_time += e | |
133 |
print |
|
133 | print('%s visited %s\%s size:%s req:%s ms' % (cnt, full_uri, i, size, e)) | |
134 |
|
134 | |||
135 |
print |
|
135 | print('total_time {}'.format(total_time)) | |
136 |
print |
|
136 | print('average on req {}'.format(total_time / float(cnt))) | |
137 |
|
137 | |||
138 |
|
138 | |||
139 | def test_files_walk(proj, limit=100): |
|
139 | def test_files_walk(proj, limit=100): | |
140 | repo, proj = _get_repo(proj) |
|
140 | repo, proj = _get_repo(proj) | |
141 |
|
141 | |||
142 |
print |
|
142 | print('processing {}'.format(jn(PROJECT_PATH, proj))) | |
143 | total_time = 0 |
|
143 | total_time = 0 | |
144 |
|
144 | |||
145 | paths_ = OrderedSet(['']) |
|
145 | paths_ = OrderedSet(['']) | |
@@ -166,22 +166,22 b' def test_files_walk(proj, limit=100):' | |||||
166 |
|
166 | |||
167 | file_path = '/'.join((proj, 'files', 'tip', f)) |
|
167 | file_path = '/'.join((proj, 'files', 'tip', f)) | |
168 | full_uri = (BASE_URI % file_path) |
|
168 | full_uri = (BASE_URI % file_path) | |
169 |
print |
|
169 | print('%s visiting %s' % (cnt, full_uri)) | |
170 | s = time.time() |
|
170 | s = time.time() | |
171 | f = o.open(full_uri) |
|
171 | f = o.open(full_uri) | |
172 | size = len(f.read()) |
|
172 | size = len(f.read()) | |
173 | e = time.time() - s |
|
173 | e = time.time() - s | |
174 | total_time += e |
|
174 | total_time += e | |
175 |
print |
|
175 | print('%s visited OK size:%s req:%s ms' % (cnt, size, e)) | |
176 |
|
176 | |||
177 |
print |
|
177 | print('total_time {}'.format(total_time)) | |
178 |
print |
|
178 | print('average on req {}'.format(total_time / float(cnt))) | |
179 |
|
179 | |||
180 | if __name__ == '__main__': |
|
180 | if __name__ == '__main__': | |
181 | for path in PROJECTS: |
|
181 | for path in PROJECTS: | |
182 | repo = vcs.get_repo(jn(PROJECT_PATH, path)) |
|
182 | repo = vcs.get_repo(jn(PROJECT_PATH, path)) | |
183 | for i in range(PASES): |
|
183 | for i in range(PASES): | |
184 |
print |
|
184 | print('PASS %s/%s' % (i, PASES)) | |
185 | test_changelog_walk(repo, pages=80) |
|
185 | test_changelog_walk(repo, pages=80) | |
186 | test_commit_walk(repo, limit=100) |
|
186 | test_commit_walk(repo, limit=100) | |
187 | test_files_walk(repo, limit=100) |
|
187 | test_files_walk(repo, limit=100) |
General Comments 0
You need to be logged in to leave comments.
Login now