# HG changeset patch # User Marcin Kuzminski # Date 2018-09-12 07:47:41 # Node ID 26663955cec76687d14aec85d02c9457e572a65e # Parent 869da457dca6e04b0b415c04bf46e14ca3e97acd core: use py3 compatible prints diff --git a/rhodecode/api/tests/test_update_pull_request.py b/rhodecode/api/tests/test_update_pull_request.py --- a/rhodecode/api/tests/test_update_pull_request.py +++ b/rhodecode/api/tests/test_update_pull_request.py @@ -89,7 +89,6 @@ class TestUpdatePullRequest(object): pr_util.update_source_repository(head='c') repo = pull_request.source_repo.scm_instance() commits = [x for x in repo.get_commits()] - print commits added_commit_id = commits[-1].raw_id # c commit common_commit_id = commits[1].raw_id # b commit is common ancestor diff --git a/rhodecode/lib/db_manage.py b/rhodecode/lib/db_manage.py --- a/rhodecode/lib/db_manage.py +++ b/rhodecode/lib/db_manage.py @@ -138,7 +138,7 @@ class DbManage(object): DatabaseNotControlledError if 'sqlite' in self.dburi: - print ( + print( '********************** WARNING **********************\n' 'Make sure your version of sqlite is at least 3.7.X. \n' 'Earlier versions are known to fail on some migrations\n' diff --git a/rhodecode/lib/dbmigrate/migrate/versioning/migrate_repository.py b/rhodecode/lib/dbmigrate/migrate/versioning/migrate_repository.py --- a/rhodecode/lib/dbmigrate/migrate/versioning/migrate_repository.py +++ b/rhodecode/lib/dbmigrate/migrate/versioning/migrate_repository.py @@ -13,12 +13,12 @@ log = logging.getLogger(__name__) def usage(): """Gives usage information.""" - print """Usage: %(prog)s repository-to-migrate + print("""Usage: %(prog)s repository-to-migrate Upgrade your repository to the new flat format. NOTE: You should probably make a backup before running this. - """ % {'prog': sys.argv[0]} + """ % {'prog': sys.argv[0]}) sys.exit(1) diff --git a/rhodecode/lib/dbmigrate/versions/005_version_1_3_0.py b/rhodecode/lib/dbmigrate/versions/005_version_1_3_0.py --- a/rhodecode/lib/dbmigrate/versions/005_version_1_3_0.py +++ b/rhodecode/lib/dbmigrate/versions/005_version_1_3_0.py @@ -33,7 +33,7 @@ def upgrade(migrate_engine): old_cons = UniqueConstraint('user_id', 'repository_id', table=tbl) else: # sqlite doesn't support dropping constraints... - print """Please manually drop UniqueConstraint('user_id', 'repository_id')""" + print("""Please manually drop UniqueConstraint('user_id', 'repository_id')""") if old_cons: try: @@ -41,7 +41,7 @@ def upgrade(migrate_engine): except Exception as e: # we don't care if this fails really... better to pass migration than # leave this in intermidiate state - print 'Failed to remove Unique for user_id, repository_id reason %s' % e + print('Failed to remove Unique for user_id, repository_id reason %s' % e) #========================================================================== @@ -61,7 +61,7 @@ def upgrade(migrate_engine): old_cons = UniqueConstraint('group_id', 'permission_id', table=tbl, name='group_to_perm_group_id_permission_id_key') else: # sqlite doesn't support dropping constraints... - print """Please manually drop UniqueConstraint('group_id', 'permission_id')""" + print("""Please manually drop UniqueConstraint('group_id', 'permission_id')""") if old_cons: try: @@ -69,7 +69,7 @@ def upgrade(migrate_engine): except Exception as e: # we don't care if this fails really... better to pass migration than # leave this in intermidiate state - print 'Failed to remove Unique for user_id, repository_id reason %s' % e + print('Failed to remove Unique for user_id, repository_id reason %s' % e) return diff --git a/rhodecode/lib/dbmigrate/versions/008_version_1_5_0.py b/rhodecode/lib/dbmigrate/versions/008_version_1_5_0.py --- a/rhodecode/lib/dbmigrate/versions/008_version_1_5_0.py +++ b/rhodecode/lib/dbmigrate/versions/008_version_1_5_0.py @@ -80,7 +80,7 @@ def fixups(models, _SESSION): new_perm = models.Permission() new_perm.permission_name = p[0] new_perm.permission_longname = p[0] #translation err with p[1] - print 'Creating new permission %s' % p[0] + print('Creating new permission %s' % p[0]) _SESSION().add(new_perm) _SESSION().commit() diff --git a/rhodecode/lib/dbmigrate/versions/011_version_1_6_0.py b/rhodecode/lib/dbmigrate/versions/011_version_1_6_0.py --- a/rhodecode/lib/dbmigrate/versions/011_version_1_6_0.py +++ b/rhodecode/lib/dbmigrate/versions/011_version_1_6_0.py @@ -44,6 +44,6 @@ def fixups(models, _SESSION): notify('Upgrading repositories Caches') repositories = models.Repository.getAll() for repo in repositories: - print repo + print(repo) repo.update_commit_cache() _SESSION().commit() diff --git a/rhodecode/lib/dbmigrate/versions/021_version_2_0_2.py b/rhodecode/lib/dbmigrate/versions/021_version_2_0_2.py --- a/rhodecode/lib/dbmigrate/versions/021_version_2_0_2.py +++ b/rhodecode/lib/dbmigrate/versions/021_version_2_0_2.py @@ -73,7 +73,7 @@ def fixups(models, _SESSION): repo_store_path = get_repos_location(models.RhodeCodeUi) _store = os.path.join(repo_store_path, '.cache', 'largefiles') notify('Setting largefiles usercache') - print _store + print(_store) if not models.RhodeCodeUi.query().filter( models.RhodeCodeUi.ui_key == 'usercache').scalar(): diff --git a/rhodecode/lib/dbmigrate/versions/022_version_2_0_2.py b/rhodecode/lib/dbmigrate/versions/022_version_2_0_2.py --- a/rhodecode/lib/dbmigrate/versions/022_version_2_0_2.py +++ b/rhodecode/lib/dbmigrate/versions/022_version_2_0_2.py @@ -39,7 +39,7 @@ def fixups(models, _SESSION): notify('fixing new schema for landing_rev') for repo in models.Repository.get_all(): - print u'repo %s old landing rev is: %s' % (repo, repo.landing_rev) + print(u'repo %s old landing rev is: %s' % (repo, repo.landing_rev)) _rev = repo.landing_rev[1] _rev_type = 'rev' # default @@ -58,13 +58,13 @@ def fixups(models, _SESSION): elif _rev in known_bookmarks: _rev_type = 'book' except Exception as e: - print e - print 'continue...' + print(e) + print('continue...') #we don't want any error to break the process pass _new_landing_rev = '%s:%s' % (_rev_type, _rev) - print u'setting to %s' % _new_landing_rev + print(u'setting to %s' % _new_landing_rev) repo.landing_rev = _new_landing_rev _SESSION().add(repo) _SESSION().commit() diff --git a/rhodecode/lib/dbmigrate/versions/027_version_2_2_0.py b/rhodecode/lib/dbmigrate/versions/027_version_2_2_0.py --- a/rhodecode/lib/dbmigrate/versions/027_version_2_2_0.py +++ b/rhodecode/lib/dbmigrate/versions/027_version_2_2_0.py @@ -47,7 +47,7 @@ def fixups(models, _SESSION): new_perm = models.Permission() new_perm.permission_name = p[0] new_perm.permission_longname = p[0] #translation err with p[1] - print 'Creating new permission %s' % p[0] + print('Creating new permission %s' % p[0]) _SESSION().add(new_perm) _SESSION().commit() @@ -60,6 +60,6 @@ def fixups(models, _SESSION): new = models.UserToPerm() new.user = user new.permission = get_by_key(models.Permission, _def) - print 'Setting default to %s' % _def + print('Setting default to %s' % _def) _SESSION().add(new) _SESSION().commit() diff --git a/rhodecode/lib/dbmigrate/versions/036_version_2_3_0.py b/rhodecode/lib/dbmigrate/versions/036_version_2_3_0.py --- a/rhodecode/lib/dbmigrate/versions/036_version_2_3_0.py +++ b/rhodecode/lib/dbmigrate/versions/036_version_2_3_0.py @@ -38,7 +38,7 @@ def downgrade(migrate_engine): def fixups(models, _SESSION): notify('Setting default renderer to rst') for cs_comment in models.ChangesetComment.get_all(): - print 'comment_id %s renderer rst' % (cs_comment.comment_id) + print('comment_id %s renderer rst' % (cs_comment.comment_id)) cs_comment.renderer = 'rst' _SESSION().add(cs_comment) _SESSION().commit() diff --git a/rhodecode/lib/pidlock.py b/rhodecode/lib/pidlock.py --- a/rhodecode/lib/pidlock.py +++ b/rhodecode/lib/pidlock.py @@ -59,7 +59,7 @@ class DaemonLock(object): def _on_finalize(lock, debug): if lock.held: if debug: - print 'leck held finilazing and running lock.release()' + print('leck held finilazing and running lock.release()') lock.release() def lock(self): @@ -69,7 +69,7 @@ class DaemonLock(object): """ lockname = '%s' % (os.getpid()) if self.debug: - print 'running lock' + print('running lock') self.trylock() self.makelock(lockname, self.pidfile) return True @@ -77,7 +77,7 @@ class DaemonLock(object): def trylock(self): running_pid = False if self.debug: - print 'checking for already running process' + print('checking for already running process') try: with open(self.pidfile, 'r') as f: try: @@ -86,8 +86,8 @@ class DaemonLock(object): running_pid = -1 if self.debug: - print ('lock file present running_pid: %s, ' - 'checking for execution' % (running_pid,)) + print('lock file present running_pid: %s, ' + 'checking for execution' % (running_pid,)) # Now we check the PID from lock file matches to the current # process PID if running_pid: @@ -95,15 +95,15 @@ class DaemonLock(object): kill(running_pid, 0) except OSError as exc: if exc.errno in (errno.ESRCH, errno.EPERM): - print ("Lock File is there but" - " the program is not running") - print "Removing lock file for the: %s" % running_pid + print("Lock File is there but" + " the program is not running") + print("Removing lock file for the: %s" % running_pid) self.release() else: raise else: - print "You already have an instance of the program running" - print "It is running as process %s" % running_pid + print("You already have an instance of the program running") + print("It is running as process %s" % running_pid) raise LockHeld() except IOError as e: @@ -114,21 +114,21 @@ class DaemonLock(object): """releases the pid by removing the pidfile """ if self.debug: - print 'trying to release the pidlock' + print('trying to release the pidlock') if self.callbackfn: - #execute callback function on release + # execute callback function on release if self.debug: - print 'executing callback function %s' % self.callbackfn + print('executing callback function %s' % self.callbackfn) self.callbackfn() try: if self.debug: - print 'removing pidfile %s' % self.pidfile + print('removing pidfile %s' % self.pidfile) os.remove(self.pidfile) self.held = False except OSError as e: if self.debug: - print 'removing pidfile failed %s' % e + print('removing pidfile failed %s' % e) pass def makelock(self, lockname, pidfile): @@ -139,7 +139,7 @@ class DaemonLock(object): :param pidfile: the file to write the pid in """ if self.debug: - print 'creating a file %s and pid: %s' % (pidfile, lockname) + print('creating a file %s and pid: %s' % (pidfile, lockname)) dir_, file_ = os.path.split(pidfile) if not os.path.isdir(dir_): diff --git a/rhodecode/rcserver.py b/rhodecode/rcserver.py --- a/rhodecode/rcserver.py +++ b/rhodecode/rcserver.py @@ -40,10 +40,10 @@ def make_web_build_callback(filename): stdout = ''.join(stdout) stderr = ''.join(stderr) if stdout: - print stdout + print(stdout) if stderr: - print ('%s %s %s' % ('-' * 20, 'ERRORS', '-' * 20)) - print stderr + print('%s %s %s' % ('-' * 20, 'ERRORS', '-' * 20)) + print(stderr) MAXFD = 1024 diff --git a/rhodecode/tests/fixture.py b/rhodecode/tests/fixture.py --- a/rhodecode/tests/fixture.py +++ b/rhodecode/tests/fixture.py @@ -53,7 +53,7 @@ class TestINI(object): data. Example usage:: with TestINI('test.ini', [{'section':{'key':val'}]) as new_test_ini_path: - print 'paster server %s' % new_test_ini + print('paster server %s' % new_test_ini) """ def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT', diff --git a/rhodecode/tests/lib/middleware/test_simplesvn.py b/rhodecode/tests/lib/middleware/test_simplesvn.py --- a/rhodecode/tests/lib/middleware/test_simplesvn.py +++ b/rhodecode/tests/lib/middleware/test_simplesvn.py @@ -194,7 +194,7 @@ class TestSimpleSvnApp(object): ] request_mock.assert_called_once_with( self.environment['REQUEST_METHOD'], expected_url, - data=self.data, headers=expected_request_headers) + data=self.data, headers=expected_request_headers, stream=False) response_mock.iter_content.assert_called_once_with(chunk_size=1024) args, _ = start_response.call_args assert args[0] == '200 OK' diff --git a/rhodecode/tests/lib/test_codeblocks.py b/rhodecode/tests/lib/test_codeblocks.py --- a/rhodecode/tests/lib/test_codeblocks.py +++ b/rhodecode/tests/lib/test_codeblocks.py @@ -33,7 +33,7 @@ class TestTokenizeString(object): import this var = 6 - print "this" + print("this") ''' @@ -58,10 +58,11 @@ class TestTokenizeString(object): ('', u'\n'), ('', u' '), ('k', u'print'), - ('', u' '), - ('s2', u'"'), - ('s2', u'this'), - ('s2', u'"'), + ('p', u'('), + ('s2', u'"'), + ('s2', u'this'), + ('s2', u'"'), + ('p', u')'), ('', u'\n'), ('', u'\n'), ('', u' ') @@ -73,7 +74,7 @@ class TestTokenizeString(object): assert tokens == [ ('', - u'\n import this\n\n var = 6\n print "this"\n\n ') + u'\n import this\n\n var = 6\n print("this")\n\n ') ] diff --git a/rhodecode/tests/load/http_performance.py b/rhodecode/tests/load/http_performance.py --- a/rhodecode/tests/load/http_performance.py +++ b/rhodecode/tests/load/http_performance.py @@ -90,7 +90,7 @@ def execute(*popenargs, **kwargs): cmd = kwargs.get("args") if cmd is None: cmd = popenargs[0] - print cmd, output, error + print('{} {} {} '.format(cmd, output, error)) raise subprocess32.CalledProcessError(retcode, cmd, output=output) return output @@ -125,14 +125,14 @@ class TestPerformanceBase(object): try: self.test() except Exception as error: - print error + print(error) finally: self.cleanup() - print 'Clone time :', self.clone_time - print 'Push time :', mean(self.push_times) - print 'Pull time :', mean(self.pull_times) - print 'Empty pull time:', mean(self.empty_pull_times) + print('Clone time :{}'.format(self.clone_time)) + print('Push time :{}'.format(mean(self.push_times))) + print('Pull time :{}'.format(mean(self.pull_times))) + print('Empty pull time:{}'.format(mean(self.empty_pull_times))) return { 'clone': self.clone_time, @@ -163,10 +163,10 @@ class TestPerformanceBase(object): self.orig_repo, commits[self.skip_commits - 1], 'upstream') commits = commits[self.skip_commits:self.max_commits] - print 'Working with %d commits' % len(commits) - for i in xrange(self.n_commits - 1, len(commits), self.n_commits): + print('Working with %d commits' % len(commits)) + for i in range(self.n_commits - 1, len(commits), self.n_commits): commit = commits[i] - print 'Processing commit %s (%d)' % (commit, i + 1) + print('Processing commit %s (%d)' % (commit, i + 1)) self.push_times.append( self.push(self.orig_repo, commit, 'upstream')) self.check_remote_last_commit_is(commit, upstream_url) @@ -402,7 +402,7 @@ def main(argv): '--api-key', dest='api_key', action='store', required=True, help='The api key of RhodeCode') options = parser.parse_args(argv[1:]) - print options + print(options) test_config = { 'python': { @@ -434,8 +434,8 @@ def main(argv): if test_names == ['all']: test_names = test_config.keys() if not set(test_names) <= set(test_config.keys()): - print ('Invalid tests: only %s are valid but specified %s' % - (test_config.keys(), test_names)) + print('Invalid tests: only %s are valid but specified %s' % + (test_config.keys(), test_names)) return 1 sizes = options.sizes.split(',') @@ -452,9 +452,9 @@ def main(argv): test_config[test_name]['limit'], test_config[test_name].get('skip', 0), api_key) - print '*' * 80 - print 'Running performance test: %s with size %d' % (test_name, size) - print '*' * 80 + print('*' * 80) + print('Running performance test: %s with size %d' % (test_name, size)) + print('*' * 80) results[test_name][size] = test.run() pprint.pprint(dict(results)) diff --git a/rhodecode/tests/load/profile-mem.py b/rhodecode/tests/load/profile-mem.py --- a/rhodecode/tests/load/profile-mem.py +++ b/rhodecode/tests/load/profile-mem.py @@ -51,7 +51,7 @@ def profile(): try: process = psutil.Process(config.pid) except psutil.NoSuchProcess: - print "Process {pid} does not exist!".format(pid=config.pid) + print("Process {pid} does not exist!".format(pid=config.pid)) sys.exit(1) while True: @@ -105,7 +105,7 @@ def process_stats(process): def dump_stats(stats): for sample in stats: - print json.dumps(sample) + print(json.dumps(sample)) class AppenlightClient(): diff --git a/rhodecode/tests/load/profile.py b/rhodecode/tests/load/profile.py --- a/rhodecode/tests/load/profile.py +++ b/rhodecode/tests/load/profile.py @@ -43,7 +43,7 @@ RC_WEBSITE = "http://localhost:5001/" def get_file(prefix): out_file = None - for i in xrange(100): + for i in range(100): file_path = "%s_profile%.3d.csv" % (prefix, i) if os.path.exists(file_path): continue @@ -54,15 +54,15 @@ def get_file(prefix): def dump_system(): - print "System Overview..." - print "\nCPU Count: %d (%d real)" % \ - (psutil.cpu_count(), psutil.cpu_count(logical=False)) - print "\nDisk:" - print psutil.disk_usage(os.sep) - print "\nMemory:" - print psutil.virtual_memory() - print "\nMemory (swap):" - print psutil.swap_memory() + print("System Overview...") + print("\nCPU Count: %d (%d real)" % + (psutil.cpu_count(), psutil.cpu_count(logical=False))) + print("\nDisk:") + print(psutil.disk_usage(os.sep)) + print("\nMemory:") + print(psutil.virtual_memory()) + print("\nMemory (swap):") + print(psutil.swap_memory()) def count_dulwich_fds(proc): @@ -97,30 +97,30 @@ def dump_process(pid, out_file): # Open output files vcs_out = get_file("vcs") if vcs_out is None: - print "Unable to enumerate output file for VCS" + print("Unable to enumerate output file for VCS") sys.exit(1) rc_out = get_file("rc") if rc_out is None: - print "Unable to enumerate output file for RC" + print("Unable to enumerate output file for RC") sys.exit(1) # Show system information dump_system() -print "\nStarting VCS..." +print("\nStarting VCS...") vcs = psutil.Popen(["vcsserver"]) time.sleep(1) if not vcs.is_running(): - print "VCS - Failed to start" + print("VCS - Failed to start") sys.exit(1) -print "VCS - Ok" +print("VCS - Ok") -print "\nStarting RhodeCode..." +print("\nStarting RhodeCode...") rc = psutil.Popen("RC_VCSSERVER_TEST_DISABLE=1 paster serve test.ini", shell=True, stdin=subprocess32.PIPE) time.sleep(1) if not rc.is_running(): - print "RC - Failed to start" + print("RC - Failed to start") vcs.terminate() sys.exit(1) @@ -132,19 +132,19 @@ time.sleep(4) try: urllib.urlopen(RC_WEBSITE) except IOError: - print "RC - Website not started" + print("RC - Website not started") vcs.terminate() sys.exit(1) -print "RC - Ok" +print("RC - Ok") -print "\nProfiling...\n%s\n" % ("-"*80) +print("\nProfiling...\n%s\n" % ("-"*80)) while True: try: dump_process(vcs, vcs_out) dump_process(rc, rc_out) time.sleep(PROFILING_INTERVAL) except Exception: - print traceback.format_exc() + print(traceback.format_exc()) break # Finalize the profiling diff --git a/rhodecode/tests/load/time_urls.py b/rhodecode/tests/load/time_urls.py --- a/rhodecode/tests/load/time_urls.py +++ b/rhodecode/tests/load/time_urls.py @@ -56,14 +56,14 @@ svn_pages = [ repeat = 10 -print "Repeating each URL x%d\n" % repeat +print("Repeating each URL x%d\n" % repeat) for page in pages: url = "http://%s/%s" % (server, page) - print url + print(url) stmt = "urllib2.urlopen('%s', timeout=120)" % url t = timeit.Timer(stmt=stmt, setup="import urllib2") result = t.repeat(repeat=repeat, number=1) - print "\t%.3f (min) - %.3f (max) - %.3f (avg)\n" % \ - (min(result), max(result), sum(result)/len(result)) + print("\t%.3f (min) - %.3f (max) - %.3f (avg)\n" % + (min(result), max(result), sum(result)/len(result))) diff --git a/rhodecode/tests/scripts/test_crawler.py b/rhodecode/tests/scripts/test_crawler.py --- a/rhodecode/tests/scripts/test_crawler.py +++ b/rhodecode/tests/scripts/test_crawler.py @@ -56,7 +56,7 @@ if len(sys.argv) == 2: if not BASE_URI.endswith('/'): BASE_URI += '/' -print 'Crawling @ %s' % BASE_URI +print('Crawling @ %s' % BASE_URI) BASE_URI += '%s' PROJECT_PATH = jn('/', 'home', 'marcink', 'repos') PROJECTS = [ @@ -104,16 +104,16 @@ def test_changelog_walk(proj, pages=100) size = len(f.read()) e = time.time() - s total_time += e - print 'visited %s size:%s req:%s ms' % (full_uri, size, e) + print('visited %s size:%s req:%s ms' % (full_uri, size, e)) - print 'total_time', total_time - print 'average on req', total_time / float(pages) + print('total_time {}'.format(total_time)) + print('average on req {}'.format(total_time / float(pages))) def test_commit_walk(proj, limit=None): repo, proj = _get_repo(proj) - print 'processing', jn(PROJECT_PATH, proj) + print('processing', jn(PROJECT_PATH, proj)) total_time = 0 cnt = 0 @@ -124,22 +124,22 @@ def test_commit_walk(proj, limit=None): break full_uri = (BASE_URI % raw_cs) - print '%s visiting %s\%s' % (cnt, full_uri, i) + print('%s visiting %s\%s' % (cnt, full_uri, i)) s = time.time() f = o.open(full_uri) size = len(f.read()) e = time.time() - s total_time += e - print '%s visited %s\%s size:%s req:%s ms' % (cnt, full_uri, i, size, e) + print('%s visited %s\%s size:%s req:%s ms' % (cnt, full_uri, i, size, e)) - print 'total_time', total_time - print 'average on req', total_time / float(cnt) + print('total_time {}'.format(total_time)) + print('average on req {}'.format(total_time / float(cnt))) def test_files_walk(proj, limit=100): repo, proj = _get_repo(proj) - print 'processing', jn(PROJECT_PATH, proj) + print('processing {}'.format(jn(PROJECT_PATH, proj))) total_time = 0 paths_ = OrderedSet(['']) @@ -166,22 +166,22 @@ def test_files_walk(proj, limit=100): file_path = '/'.join((proj, 'files', 'tip', f)) full_uri = (BASE_URI % file_path) - print '%s visiting %s' % (cnt, full_uri) + print('%s visiting %s' % (cnt, full_uri)) s = time.time() f = o.open(full_uri) size = len(f.read()) e = time.time() - s total_time += e - print '%s visited OK size:%s req:%s ms' % (cnt, size, e) + print('%s visited OK size:%s req:%s ms' % (cnt, size, e)) - print 'total_time', total_time - print 'average on req', total_time / float(cnt) + print('total_time {}'.format(total_time)) + print('average on req {}'.format(total_time / float(cnt))) if __name__ == '__main__': for path in PROJECTS: repo = vcs.get_repo(jn(PROJECT_PATH, path)) for i in range(PASES): - print 'PASS %s/%s' % (i, PASES) + print('PASS %s/%s' % (i, PASES)) test_changelog_walk(repo, pages=80) test_commit_walk(repo, limit=100) test_files_walk(repo, limit=100)