##// END OF EJS Templates
cleanup: pass log strings unformatted - avoid unnecessary % formatting when not logging
Mads Kiilerich -
r5375:0210d0b7 default
parent child Browse files
Show More
@@ -54,13 +54,13 b' def _journal_filter(user_log, search_ter'
54 54 :param user_log:
55 55 :param search_term:
56 56 """
57 log.debug('Initial search term: %r' % search_term)
57 log.debug('Initial search term: %r', search_term)
58 58 qry = None
59 59 if search_term:
60 60 qp = QueryParser('repository', schema=JOURNAL_SCHEMA)
61 61 qp.add_plugin(DateParserPlugin())
62 62 qry = qp.parse(unicode(search_term))
63 log.debug('Filtering using parsed query %r' % qry)
63 log.debug('Filtering using parsed query %r', qry)
64 64
65 65 def wildcard_handler(col, wc_term):
66 66 if wc_term.startswith('*') and not wc_term.endswith('*'):
@@ -85,7 +85,7 b' def _journal_filter(user_log, search_ter'
85 85 field = getattr(UserLog, 'username')
86 86 else:
87 87 field = getattr(UserLog, field)
88 log.debug('filter field: %s val=>%s' % (field, val))
88 log.debug('filter field: %s val=>%s', field, val)
89 89
90 90 #sql filtering
91 91 if isinstance(term, query.Wildcard):
@@ -128,7 +128,7 b' class AuthSettingsController(BaseControl'
128 128 if k == 'auth_plugins':
129 129 # we want to store it comma separated inside our settings
130 130 v = ','.join(v)
131 log.debug("%s = %s" % (k, str(v)))
131 log.debug("%s = %s", k, str(v))
132 132 setting = Setting.create_or_update(k, v)
133 133 Session().add(setting)
134 134 Session().commit()
@@ -196,8 +196,8 b' class GistsController(BaseController):'
196 196 #check if this gist is not expired
197 197 if c.gist.gist_expires != -1:
198 198 if time.time() > c.gist.gist_expires:
199 log.error('Gist expired at %s' %
200 (time_to_datetime(c.gist.gist_expires)))
199 log.error('Gist expired at %s',
200 time_to_datetime(c.gist.gist_expires))
201 201 raise HTTPNotFound()
202 202 try:
203 203 c.file_changeset, c.files = GistModel().get_gist_files(gist_id,
@@ -221,8 +221,8 b' class GistsController(BaseController):'
221 221 #check if this gist is not expired
222 222 if c.gist.gist_expires != -1:
223 223 if time.time() > c.gist.gist_expires:
224 log.error('Gist expired at %s' %
225 (time_to_datetime(c.gist.gist_expires)))
224 log.error('Gist expired at %s',
225 time_to_datetime(c.gist.gist_expires))
226 226 raise HTTPNotFound()
227 227 try:
228 228 c.file_changeset, c.files = GistModel().get_gist_files(gist_id)
@@ -285,8 +285,8 b' class GistsController(BaseController):'
285 285
286 286 ##TODO: maybe move this to model ?
287 287 if revision != last_rev.raw_id:
288 log.error('Last revision %s is different than submitted %s'
289 % (revision, last_rev))
288 log.error('Last revision %s is different than submitted %s',
289 revision, last_rev)
290 290 # our gist has newer version than we
291 291 success = False
292 292
@@ -497,7 +497,7 b' class SettingsController(BaseController)'
497 497 try:
498 498 import kallithea
499 499 ver = kallithea.__version__
500 log.debug('Checking for upgrade on `%s` server' % _update_url)
500 log.debug('Checking for upgrade on `%s` server', _update_url)
501 501 opener = urllib2.build_opener()
502 502 opener.addheaders = [('User-agent', 'Kallithea-SCM/%s' % ver)]
503 503 response = opener.open(_update_url)
@@ -117,7 +117,7 b' class JSONRPCController(WSGIController):'
117 117 else:
118 118 length = environ['CONTENT_LENGTH'] or 0
119 119 length = int(environ['CONTENT_LENGTH'])
120 log.debug('Content-Length: %s' % length)
120 log.debug('Content-Length: %s', length)
121 121
122 122 if length == 0:
123 123 log.debug("Content-Length is 0")
@@ -144,8 +144,8 b' class JSONRPCController(WSGIController):'
144 144 self._request_params = {}
145 145
146 146 log.debug(
147 'method: %s, params: %s' % (self._req_method,
148 self._request_params)
147 'method: %s, params: %s', self._req_method,
148 self._request_params
149 149 )
150 150 except KeyError as e:
151 151 return jsonrpc_error(retid=self._req_id,
@@ -163,7 +163,7 b' class JSONRPCController(WSGIController):'
163 163 return jsonrpc_error(retid=self._req_id,
164 164 message='request from IP:%s not allowed' % (ip_addr,))
165 165 else:
166 log.info('Access for IP:%s allowed' % (ip_addr,))
166 log.info('Access for IP:%s allowed', ip_addr)
167 167
168 168 except Exception as e:
169 169 return jsonrpc_error(retid=self._req_id,
@@ -261,8 +261,8 b' class JSONRPCController(WSGIController):'
261 261 except JSONRPCError as e:
262 262 self._error = safe_str(e)
263 263 except Exception as e:
264 log.error('Encountered unhandled exception: %s'
265 % (traceback.format_exc(),))
264 log.error('Encountered unhandled exception: %s',
265 traceback.format_exc(),)
266 266 json_exc = JSONRPCError('Internal server error')
267 267 self._error = safe_str(json_exc)
268 268
@@ -273,7 +273,7 b' class JSONRPCController(WSGIController):'
273 273 try:
274 274 return json.dumps(response)
275 275 except TypeError as e:
276 log.error('API FAILED. Error encoding response: %s' % e)
276 log.error('API FAILED. Error encoding response: %s', e)
277 277 return json.dumps(
278 278 dict(
279 279 id=self._req_id,
@@ -286,7 +286,7 b' class JSONRPCController(WSGIController):'
286 286 """
287 287 Return method named by `self._req_method` in controller if able
288 288 """
289 log.debug('Trying to find JSON-RPC method: %s' % (self._req_method,))
289 log.debug('Trying to find JSON-RPC method: %s', self._req_method)
290 290 if self._req_method.startswith('_'):
291 291 raise AttributeError("Method not allowed")
292 292
@@ -129,7 +129,7 b' class ChangelogController(BaseRepoContro'
129 129 try:
130 130
131 131 if f_path:
132 log.debug('generating changelog for path %s' % f_path)
132 log.debug('generating changelog for path %s', f_path)
133 133 # get the history for the file !
134 134 tip_cs = c.db_repo_scm_instance.get_changeset()
135 135 try:
@@ -246,8 +246,8 b' class CompareController(BaseRepoControll'
246 246 # case we want a simple diff without incoming changesets,
247 247 # previewing what will be merged.
248 248 # Make the diff on the other repo (which is known to have other_rev)
249 log.debug('Using ancestor %s as rev1 instead of %s'
250 % (c.ancestor, c.a_rev))
249 log.debug('Using ancestor %s as rev1 instead of %s',
250 c.ancestor, c.a_rev)
251 251 rev1 = c.ancestor
252 252 org_repo = other_repo
253 253 else: # comparing tips, not necessarily linearly related
@@ -262,8 +262,8 b' class CompareController(BaseRepoControll'
262 262
263 263 diff_limit = self.cut_off_limit if not c.fulldiff else None
264 264
265 log.debug('running diff between %s and %s in %s'
266 % (rev1, c.cs_rev, org_repo.scm_instance.path))
265 log.debug('running diff between %s and %s in %s',
266 rev1, c.cs_rev, org_repo.scm_instance.path)
267 267 txtdiff = org_repo.scm_instance.get_diff(rev1=rev1, rev2=c.cs_rev,
268 268 ignore_whitespace=ignore_whitespace,
269 269 context=line_context)
@@ -57,7 +57,7 b' class ErrorController(BaseController):'
57 57 resp = request.environ.get('pylons.original_response')
58 58 c.site_name = config.get('title')
59 59
60 log.debug('### %s ###' % (resp and resp.status or 'no response'))
60 log.debug('### %s ###', resp and resp.status or 'no response')
61 61
62 62 e = request.environ
63 63 c.serv_p = r'%(protocol)s://%(host)s/' % {
@@ -554,21 +554,21 b' class FilesController(BaseRepoController'
554 554 os.makedirs(archive_cache_dir)
555 555 cached_archive_path = os.path.join(archive_cache_dir, archive_name)
556 556 if os.path.isfile(cached_archive_path):
557 log.debug('Found cached archive in %s' % cached_archive_path)
557 log.debug('Found cached archive in %s', cached_archive_path)
558 558 archive_path = cached_archive_path
559 559 else:
560 log.debug('Archive %s is not yet cached' % (archive_name))
560 log.debug('Archive %s is not yet cached', archive_name)
561 561
562 562 if archive_path is None:
563 563 # generate new archive
564 564 fd, archive_path = tempfile.mkstemp()
565 log.debug('Creating new temp archive in %s' % archive_path)
565 log.debug('Creating new temp archive in %s', archive_path)
566 566 with os.fdopen(fd, 'wb') as stream:
567 567 cs.fill_archive(stream=stream, kind=fileformat, subrepos=subrepos)
568 568 # stream (and thus fd) has been closed by cs.fill_archive
569 569 if cached_archive_path is not None:
570 570 # we generated the archive - move it to cache
571 log.debug('Storing new archive in %s' % cached_archive_path)
571 log.debug('Storing new archive in %s', cached_archive_path)
572 572 shutil.move(archive_path, cached_archive_path)
573 573 archive_path = cached_archive_path
574 574
@@ -581,7 +581,7 b' class FilesController(BaseRepoController'
581 581 yield data
582 582 stream.close()
583 583 if archive_path != cached_archive_path:
584 log.debug('Destroying temp archive %s' % archive_path)
584 log.debug('Destroying temp archive %s', archive_path)
585 585 os.remove(archive_path)
586 586
587 587 action_logger(user=c.authuser,
@@ -64,8 +64,8 b' class LoginController(BaseController):'
64 64 server_parsed = urlparse.urlparse(url.current())
65 65 allowed_schemes = ['http', 'https']
66 66 if parsed.scheme and parsed.scheme not in allowed_schemes:
67 log.error('Suspicious URL scheme detected %s for url %s' %
68 (parsed.scheme, parsed))
67 log.error('Suspicious URL scheme detected %s for url %s',
68 parsed.scheme, parsed)
69 69 return False
70 70 if server_parsed.netloc != parsed.netloc:
71 71 log.error('Suspicious NETLOC detected %s for url %s server url '
@@ -641,8 +641,8 b' class PullrequestsController(BaseRepoCon'
641 641 diff_limit = self.cut_off_limit if not c.fulldiff else None
642 642
643 643 # we swap org/other ref since we run a simple diff on one repo
644 log.debug('running diff between %s and %s in %s'
645 % (c.a_rev, c.cs_rev, org_scm_instance.path))
644 log.debug('running diff between %s and %s in %s',
645 c.a_rev, c.cs_rev, org_scm_instance.path)
646 646 txtdiff = org_scm_instance.get_diff(rev1=safe_str(c.a_rev), rev2=safe_str(c.cs_rev),
647 647 ignore_whitespace=ignore_whitespace,
648 648 context=line_context)
@@ -77,8 +77,8 b' class SearchController(BaseRepoControlle'
77 77 'path': SCHEMA
78 78 }.get(c.cur_type, SCHEMA)
79 79
80 log.debug('IDX: %s' % index_name)
81 log.debug('SCHEMA: %s' % schema_defn)
80 log.debug('IDX: %s', index_name)
81 log.debug('SCHEMA: %s', schema_defn)
82 82
83 83 if c.cur_query:
84 84 cur_query = c.cur_query.lower()
@@ -109,8 +109,8 b' class SearchController(BaseRepoControlle'
109 109
110 110 matcher = query.matcher(searcher)
111 111
112 log.debug('query: %s' % query)
113 log.debug('hl terms: %s' % highlight_items)
112 log.debug('query: %s', query)
113 log.debug('hl terms: %s', highlight_items)
114 114 results = searcher.search(query)
115 115 res_ln = len(results)
116 116 c.runtime = '%s results (%.3f seconds)' % (
@@ -87,7 +87,7 b' class SummaryController(BaseRepoControll'
87 87 if not isinstance(readme, FileNode):
88 88 continue
89 89 readme_file = f
90 log.debug('Found README file `%s` rendering...' %
90 log.debug('Found README file `%s` rendering...',
91 91 readme_file)
92 92 readme_data = renderer.render(readme.content,
93 93 filename=f)
@@ -439,10 +439,10 b' def allowed_api_access(controller_name, '
439 439 from kallithea import CONFIG
440 440 whitelist = aslist(CONFIG.get('api_access_controllers_whitelist'),
441 441 sep=',')
442 log.debug('whitelist of API access is: %s' % (whitelist))
442 log.debug('whitelist of API access is: %s', whitelist)
443 443 api_access_valid = controller_name in whitelist
444 444 if api_access_valid:
445 log.debug('controller:%s is in API whitelist' % (controller_name))
445 log.debug('controller:%s is in API whitelist', controller_name)
446 446 else:
447 447 msg = 'controller: %s is *NOT* in API whitelist' % (controller_name)
448 448 if api_key:
@@ -497,7 +497,7 b' class AuthUser(object):'
497 497
498 498 # Look up database user, if necessary.
499 499 if user_id is not None:
500 log.debug('Auth User lookup by USER ID %s' % user_id)
500 log.debug('Auth User lookup by USER ID %s', user_id)
501 501 dbuser = user_model.get(user_id)
502 502 else:
503 503 # Note: dbuser is allowed to be None.
@@ -516,7 +516,7 b' class AuthUser(object):'
516 516 if not self.username:
517 517 self.username = 'None'
518 518
519 log.debug('Auth User is now %s' % self)
519 log.debug('Auth User is now %s', self)
520 520
521 521 def _fill_data(self, dbuser):
522 522 """
@@ -615,7 +615,7 b' class AuthUser(object):'
615 615 allowed_ips = AuthUser.get_allowed_ips(user.user_id, cache=True,
616 616 inherit_from_default=user.inherit_default_permissions)
617 617 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
618 log.debug('IP:%s is in range of %s' % (ip_addr, allowed_ips))
618 log.debug('IP:%s is in range of %s', ip_addr, allowed_ips)
619 619 return True
620 620 else:
621 621 log.info('Access for IP:%s forbidden, '
@@ -716,7 +716,7 b' def redirect_to_login(message=None):'
716 716 p = url.current()
717 717 if message:
718 718 h.flash(h.literal(message), category='warning')
719 log.debug('Redirecting to login page, origin: %s' % p)
719 log.debug('Redirecting to login page, origin: %s', p)
720 720 return redirect(url('login_home', came_from=p, **request.GET))
721 721
722 722 class LoginRequired(object):
@@ -738,7 +738,7 b' class LoginRequired(object):'
738 738 controller = fargs[0]
739 739 user = controller.authuser
740 740 loc = "%s:%s" % (controller.__class__.__name__, func.__name__)
741 log.debug('Checking access for user %s @ %s' % (user, loc))
741 log.debug('Checking access for user %s @ %s', user, loc)
742 742
743 743 if not AuthUser.check_ip_allowed(user, controller.ip_addr):
744 744 return redirect_to_login(_('IP %s not allowed') % controller.ip_addr)
@@ -749,15 +749,15 b' class LoginRequired(object):'
749 749 # explicit controller is enabled or API is in our whitelist
750 750 if self.api_access or allowed_api_access(loc, api_key=api_key):
751 751 if api_key in user.api_keys:
752 log.info('user %s authenticated with API key ****%s @ %s'
753 % (user, api_key[-4:], loc))
752 log.info('user %s authenticated with API key ****%s @ %s',
753 user, api_key[-4:], loc)
754 754 return func(*fargs, **fkwargs)
755 755 else:
756 log.warning('API key ****%s is NOT valid' % api_key[-4:])
756 log.warning('API key ****%s is NOT valid', api_key[-4:])
757 757 return redirect_to_login(_('Invalid API key'))
758 758 else:
759 759 # controller does not allow API access
760 log.warning('API access to %s is not allowed' % loc)
760 log.warning('API access to %s is not allowed', loc)
761 761 return abort(403)
762 762
763 763 # CSRF protection - POSTs with session auth must contain correct token
@@ -769,10 +769,10 b' class LoginRequired(object):'
769 769
770 770 # regular user authentication
771 771 if user.is_authenticated:
772 log.info('user %s authenticated with regular auth @ %s' % (user, loc))
772 log.info('user %s authenticated with regular auth @ %s', user, loc)
773 773 return func(*fargs, **fkwargs)
774 774 else:
775 log.warning('user %s NOT authenticated with regular auth @ %s' % (user, loc))
775 log.warning('user %s NOT authenticated with regular auth @ %s', user, loc)
776 776 return redirect_to_login()
777 777
778 778 class NotAnonymous(object):
@@ -787,7 +787,7 b' class NotAnonymous(object):'
787 787 cls = fargs[0]
788 788 self.user = cls.authuser
789 789
790 log.debug('Checking if user is not anonymous @%s' % cls)
790 log.debug('Checking if user is not anonymous @%s', cls)
791 791
792 792 anonymous = self.user.username == User.DEFAULT_USER
793 793
@@ -813,14 +813,14 b' class PermsDecorator(object):'
813 813 self.user = cls.authuser
814 814 self.user_perms = self.user.permissions
815 815 log.debug('checking %s permissions %s for %s %s',
816 self.__class__.__name__, self.required_perms, cls, self.user)
816 self.__class__.__name__, self.required_perms, cls, self.user)
817 817
818 818 if self.check_permissions():
819 log.debug('Permission granted for %s %s' % (cls, self.user))
819 log.debug('Permission granted for %s %s', cls, self.user)
820 820 return func(*fargs, **fkwargs)
821 821
822 822 else:
823 log.debug('Permission denied for %s %s' % (cls, self.user))
823 log.debug('Permission denied for %s %s', cls, self.user)
824 824 anonymous = self.user.username == User.DEFAULT_USER
825 825
826 826 if anonymous:
@@ -1003,15 +1003,15 b' class PermsFunction(object):'
1003 1003 return False
1004 1004 self.user_perms = user.permissions
1005 1005 if self.check_permissions():
1006 log.debug('Permission to %s granted for user: %s @ %s'
1007 % (check_scope, user,
1008 check_location or 'unspecified location'))
1006 log.debug('Permission to %s granted for user: %s @ %s',
1007 check_scope, user,
1008 check_location or 'unspecified location')
1009 1009 return True
1010 1010
1011 1011 else:
1012 log.debug('Permission to %s denied for user: %s @ %s'
1013 % (check_scope, user,
1014 check_location or 'unspecified location'))
1012 log.debug('Permission to %s denied for user: %s @ %s',
1013 check_scope, user,
1014 check_location or 'unspecified location')
1015 1015 return False
1016 1016
1017 1017 def check_permissions(self):
@@ -1163,11 +1163,11 b' class HasPermissionAnyMiddleware(object)'
1163 1163 'permissions %s for user:%s repository:%s', self.user_perms,
1164 1164 self.username, self.repo_name)
1165 1165 if self.required_perms.intersection(self.user_perms):
1166 log.debug('Permission to repo: %s granted for user: %s @ %s'
1167 % (self.repo_name, self.username, 'PermissionMiddleware'))
1166 log.debug('Permission to repo: %s granted for user: %s @ %s',
1167 self.repo_name, self.username, 'PermissionMiddleware')
1168 1168 return True
1169 log.debug('Permission to repo: %s denied for user: %s @ %s'
1170 % (self.repo_name, self.username, 'PermissionMiddleware'))
1169 log.debug('Permission to repo: %s denied for user: %s @ %s',
1170 self.repo_name, self.username, 'PermissionMiddleware')
1171 1171 return False
1172 1172
1173 1173
@@ -1188,8 +1188,8 b' class _BaseApiPerm(object):'
1188 1188 if group_name:
1189 1189 check_scope += ', repo group:%s' % (group_name)
1190 1190
1191 log.debug('checking cls:%s %s %s @ %s'
1192 % (cls_name, self.required_perms, check_scope, check_location))
1191 log.debug('checking cls:%s %s %s @ %s',
1192 cls_name, self.required_perms, check_scope, check_location)
1193 1193 if not user:
1194 1194 log.debug('Empty User passed into arguments')
1195 1195 return False
@@ -1200,13 +1200,13 b' class _BaseApiPerm(object):'
1200 1200 if not check_location:
1201 1201 check_location = 'unspecified'
1202 1202 if self.check_permissions(user.permissions, repo_name, group_name):
1203 log.debug('Permission to %s granted for user: %s @ %s'
1204 % (check_scope, user, check_location))
1203 log.debug('Permission to %s granted for user: %s @ %s',
1204 check_scope, user, check_location)
1205 1205 return True
1206 1206
1207 1207 else:
1208 log.debug('Permission to %s denied for user: %s @ %s'
1209 % (check_scope, user, check_location))
1208 log.debug('Permission to %s denied for user: %s @ %s',
1209 check_scope, user, check_location)
1210 1210 return False
1211 1211
1212 1212 def check_permissions(self, perm_defs, repo_name=None, group_name=None):
@@ -1288,11 +1288,11 b' def check_ip_access(source_ip, allowed_i'
1288 1288 :param allowed_ips: list of allowed ips together with mask
1289 1289 """
1290 1290 from kallithea.lib import ipaddr
1291 log.debug('checking if ip:%s is subnet of %s' % (source_ip, allowed_ips))
1291 log.debug('checking if ip:%s is subnet of %s', source_ip, allowed_ips)
1292 1292 if isinstance(allowed_ips, (tuple, list, set)):
1293 1293 for ip in allowed_ips:
1294 1294 if ipaddr.IPAddress(source_ip) in ipaddr.IPNetwork(ip):
1295 log.debug('IP %s is network %s' %
1296 (ipaddr.IPAddress(source_ip), ipaddr.IPNetwork(ip)))
1295 log.debug('IP %s is network %s',
1296 ipaddr.IPAddress(source_ip), ipaddr.IPNetwork(ip))
1297 1297 return True
1298 1298 return False
@@ -74,7 +74,7 b' class KallitheaAuthPluginBase(object):'
74 74 def __call__(self, *args, **kwargs):
75 75 from kallithea.model import validators as v
76 76 obj = getattr(v, self.validator_name)
77 #log.debug('Initializing lazy formencode object: %s' % obj)
77 #log.debug('Initializing lazy formencode object: %s', obj)
78 78 return LazyFormencode(obj, *args, **kwargs)
79 79
80 80
@@ -120,8 +120,8 b' class KallitheaAuthPluginBase(object):'
120 120 return False
121 121
122 122 if user and user.extern_type and user.extern_type != plugin_name:
123 log.debug('User %s should authenticate using %s this is %s, skipping'
124 % (user, user.extern_type, plugin_name))
123 log.debug('User %s should authenticate using %s this is %s, skipping',
124 user, user.extern_type, plugin_name)
125 125
126 126 return False
127 127 return True
@@ -136,15 +136,15 b' class KallitheaAuthPluginBase(object):'
136 136 :param kwargs: extra arguments needed for user fetching.
137 137 """
138 138 user = None
139 log.debug('Trying to fetch user `%s` from Kallithea database'
140 % (username))
139 log.debug('Trying to fetch user `%s` from Kallithea database',
140 username)
141 141 if username:
142 142 user = User.get_by_username(username)
143 143 if not user:
144 144 log.debug('Fallback to fetch user in case insensitive mode')
145 145 user = User.get_by_username(username, case_insensitive=True)
146 146 else:
147 log.debug('provided username:`%s` is empty skipping...' % username)
147 log.debug('provided username:`%s` is empty skipping...', username)
148 148 return user
149 149
150 150 def settings(self):
@@ -267,8 +267,8 b' class KallitheaExternalAuthPlugin(Kallit'
267 267 # external authentication, but if it's inactive user we shouldn't
268 268 # create that user anyway
269 269 if user_data['active_from_extern'] is False:
270 log.warning("User %s authenticated against %s, but is inactive"
271 % (username, self.__module__))
270 log.warning("User %s authenticated against %s, but is inactive",
271 username, self.__module__)
272 272 return None
273 273
274 274 if self.use_fake_password():
@@ -276,8 +276,8 b' class KallitheaExternalAuthPlugin(Kallit'
276 276 # them blank either
277 277 passwd = PasswordGenerator().gen_password(length=8)
278 278
279 log.debug('Updating or creating user info from %s plugin'
280 % self.name)
279 log.debug('Updating or creating user info from %s plugin',
280 self.name)
281 281 user = UserModel().create_or_update(
282 282 username=username,
283 283 password=passwd,
@@ -309,7 +309,7 b' def importplugin(plugin):'
309 309 TypeError -- if the KallitheaAuthPlugin is not a subclass of ours KallitheaAuthPluginBase
310 310 ImportError -- if we couldn't import the plugin at all
311 311 """
312 log.debug("Importing %s" % plugin)
312 log.debug("Importing %s", plugin)
313 313 if not plugin.startswith(u'kallithea.lib.auth_modules.auth_'):
314 314 parts = plugin.split(u'.lib.auth_modules.auth_', 1)
315 315 if len(parts) == 2:
@@ -326,8 +326,8 b' def importplugin(plugin):'
326 326 # the plugin name, the crash is pretty bad and hard to recover
327 327 raise
328 328
329 log.debug("Loaded auth plugin from %s (module:%s, file:%s)"
330 % (plugin, module.__name__, module.__file__))
329 log.debug("Loaded auth plugin from %s (module:%s, file:%s)",
330 plugin, module.__name__, module.__file__)
331 331
332 332 pluginclass = getattr(module, PLUGIN_CLASS_NAME)
333 333 if not issubclass(pluginclass, KallitheaAuthPluginBase):
@@ -362,14 +362,14 b' def authenticate(username, password, env'
362 362 """
363 363
364 364 auth_plugins = Setting.get_auth_plugins()
365 log.debug('Authentication against %s plugins' % (auth_plugins,))
365 log.debug('Authentication against %s plugins', auth_plugins)
366 366 for module in auth_plugins:
367 367 try:
368 368 plugin = loadplugin(module)
369 369 except (ImportError, AttributeError, TypeError) as e:
370 370 raise ImportError('Failed to load authentication module %s : %s'
371 371 % (module, str(e)))
372 log.debug('Trying authentication using ** %s **' % (module,))
372 log.debug('Trying authentication using ** %s **', module)
373 373 # load plugin settings from Kallithea database
374 374 plugin_name = plugin.name
375 375 plugin_settings = {}
@@ -377,26 +377,26 b' def authenticate(username, password, env'
377 377 conf_key = "auth_%s_%s" % (plugin_name, v["name"])
378 378 setting = Setting.get_by_name(conf_key)
379 379 plugin_settings[v["name"]] = setting.app_settings_value if setting else None
380 log.debug('Plugin settings \n%s' % formatted_json(plugin_settings))
380 log.debug('Plugin settings \n%s', formatted_json(plugin_settings))
381 381
382 382 if not str2bool(plugin_settings["enabled"]):
383 log.info("Authentication plugin %s is disabled, skipping for %s"
384 % (module, username))
383 log.info("Authentication plugin %s is disabled, skipping for %s",
384 module, username)
385 385 continue
386 386
387 387 # use plugin's method of user extraction.
388 388 user = plugin.get_user(username, environ=environ,
389 389 settings=plugin_settings)
390 log.debug('Plugin %s extracted user is `%s`' % (module, user))
390 log.debug('Plugin %s extracted user is `%s`', module, user)
391 391 if not plugin.accepts(user):
392 log.debug('Plugin %s does not accept user `%s` for authentication'
393 % (module, user))
392 log.debug('Plugin %s does not accept user `%s` for authentication',
393 module, user)
394 394 continue
395 395 else:
396 log.debug('Plugin %s accepted user `%s` for authentication'
397 % (module, user))
396 log.debug('Plugin %s accepted user `%s` for authentication',
397 module, user)
398 398
399 log.info('Authenticating user using %s plugin' % plugin.__module__)
399 log.info('Authenticating user using %s plugin', plugin.__module__)
400 400 # _authenticate is a wrapper for .auth() method of plugin.
401 401 # it checks if .auth() sends proper data. For KallitheaExternalAuthPlugin
402 402 # it also maps users to Database and maps the attributes returned
@@ -405,7 +405,7 b' def authenticate(username, password, env'
405 405 user_data = plugin._authenticate(user, username, password,
406 406 plugin_settings,
407 407 environ=environ or {})
408 log.debug('PLUGIN USER DATA: %s' % user_data)
408 log.debug('PLUGIN USER DATA: %s', user_data)
409 409
410 410 if user_data is not None:
411 411 log.debug('Plugin returned proper authentication data')
@@ -413,8 +413,8 b' def authenticate(username, password, env'
413 413
414 414 # we failed to Auth because .auth() method didn't return the user
415 415 if username:
416 log.warning("User `%s` failed to authenticate against %s"
417 % (username, plugin.__module__))
416 log.warning("User `%s` failed to authenticate against %s",
417 username, plugin.__module__)
418 418 return None
419 419
420 420 def get_managed_fields(user):
@@ -95,24 +95,24 b' class KallitheaAuthPlugin(auth_modules.K'
95 95 username = None
96 96 environ = environ or {}
97 97 if not environ:
98 log.debug('got empty environ: %s' % environ)
98 log.debug('got empty environ: %s', environ)
99 99
100 100 settings = settings or {}
101 101 if settings.get('header'):
102 102 header = settings.get('header')
103 103 username = environ.get(header)
104 log.debug('extracted %s:%s' % (header, username))
104 log.debug('extracted %s:%s', header, username)
105 105
106 106 # fallback mode
107 107 if not username and settings.get('fallback_header'):
108 108 header = settings.get('fallback_header')
109 109 username = environ.get(header)
110 log.debug('extracted %s:%s' % (header, username))
110 log.debug('extracted %s:%s', header, username)
111 111
112 112 if username and str2bool(settings.get('clean_username')):
113 log.debug('Received username %s from container' % username)
113 log.debug('Received username %s from container', username)
114 114 username = self._clean_username(username)
115 log.debug('New cleanup user is: %s' % username)
115 log.debug('New cleanup user is: %s', username)
116 116 return username
117 117
118 118 def get_user(self, username=None, **kwargs):
@@ -188,7 +188,7 b' class KallitheaAuthPlugin(auth_modules.K'
188 188 'extern_name': username,
189 189 }
190 190
191 log.info('user `%s` authenticated correctly' % user_data['username'])
191 log.info('user `%s` authenticated correctly', user_data['username'])
192 192 return user_data
193 193
194 194 def get_managed_fields(self):
@@ -85,9 +85,9 b' class CrowdServer(object):'
85 85 _headers["Authorization"] = "Basic %s" % authstring
86 86 if headers:
87 87 _headers.update(headers)
88 log.debug("Sent crowd: \n%s"
89 % (formatted_json({"url": url, "body": body,
90 "headers": _headers})))
88 log.debug("Sent crowd: \n%s",
89 formatted_json({"url": url, "body": body,
90 "headers": _headers}))
91 91 request = urllib2.Request(url, body, _headers)
92 92 if method:
93 93 request.get_method = lambda: method
@@ -202,16 +202,16 b' class KallitheaAuthPlugin(auth_modules.K'
202 202 log.debug('Empty username or password skipping...')
203 203 return None
204 204
205 log.debug("Crowd settings: \n%s" % (formatted_json(settings)))
205 log.debug("Crowd settings: \n%s", formatted_json(settings))
206 206 server = CrowdServer(**settings)
207 207 server.set_credentials(settings["app_name"], settings["app_password"])
208 208 crowd_user = server.user_auth(username, password)
209 log.debug("Crowd returned: \n%s" % (formatted_json(crowd_user)))
209 log.debug("Crowd returned: \n%s", formatted_json(crowd_user))
210 210 if not crowd_user["status"]:
211 211 return None
212 212
213 213 res = server.user_groups(crowd_user["name"])
214 log.debug("Crowd groups: \n%s" % (formatted_json(res)))
214 log.debug("Crowd groups: \n%s", formatted_json(res))
215 215 crowd_user["groups"] = [x["name"] for x in res["groups"]]
216 216
217 217 # old attrs fetched from Kallithea database
@@ -237,8 +237,8 b' class KallitheaAuthPlugin(auth_modules.K'
237 237 for group in settings["admin_groups"].split(","):
238 238 if group in user_data["groups"]:
239 239 user_data["admin"] = True
240 log.debug("Final crowd user object: \n%s" % (formatted_json(user_data)))
241 log.info('user %s authenticated correctly' % user_data['username'])
240 log.debug("Final crowd user object: \n%s", formatted_json(user_data))
241 log.info('user %s authenticated correctly', user_data['username'])
242 242 return user_data
243 243
244 244 def get_managed_fields(self):
@@ -61,11 +61,11 b' class KallitheaAuthPlugin(auth_modules.K'
61 61
62 62 def auth(self, userobj, username, password, settings, **kwargs):
63 63 if not userobj:
64 log.debug('userobj was:%s skipping' % (userobj, ))
64 log.debug('userobj was:%s skipping', userobj)
65 65 return None
66 66 if userobj.extern_type != self.name:
67 log.warning("userobj:%s extern_type mismatch got:`%s` expected:`%s`"
68 % (userobj, userobj.extern_type, self.name))
67 log.warning("userobj:%s extern_type mismatch got:`%s` expected:`%s`",
68 userobj, userobj.extern_type, self.name)
69 69 return None
70 70
71 71 user_data = {
@@ -85,17 +85,17 b' class KallitheaAuthPlugin(auth_modules.K'
85 85 from kallithea.lib import auth
86 86 password_match = auth.KallitheaCrypto.hash_check(password, userobj.password)
87 87 if userobj.username == User.DEFAULT_USER and userobj.active:
88 log.info('user %s authenticated correctly as anonymous user' %
88 log.info('user %s authenticated correctly as anonymous user',
89 89 username)
90 90 return user_data
91 91
92 92 elif userobj.username == username and password_match:
93 log.info('user %s authenticated correctly' % user_data['username'])
93 log.info('user %s authenticated correctly', user_data['username'])
94 94 return user_data
95 log.error("user %s had a bad password" % username)
95 log.error("user %s had a bad password", username)
96 96 return None
97 97 else:
98 log.warning('user %s tried auth but is disabled' % username)
98 log.warning('user %s tried auth but is disabled', username)
99 99 return None
100 100
101 101 def get_managed_fields(self):
@@ -127,8 +127,8 b' class AuthLdap(object):'
127 127 server.start_tls_s()
128 128
129 129 if self.LDAP_BIND_DN and self.LDAP_BIND_PASS:
130 log.debug('Trying simple_bind with password and given DN: %s'
131 % self.LDAP_BIND_DN)
130 log.debug('Trying simple_bind with password and given DN: %s',
131 self.LDAP_BIND_DN)
132 132 server.simple_bind_s(self.LDAP_BIND_DN, self.LDAP_BIND_PASS)
133 133
134 134 filter_ = '(&%s(%s=%s))' % (self.LDAP_FILTER, self.attr_login,
@@ -146,15 +146,15 b' class AuthLdap(object):'
146 146 continue
147 147
148 148 try:
149 log.debug('Trying simple bind with %s' % dn)
149 log.debug('Trying simple bind with %s', dn)
150 150 server.simple_bind_s(dn, safe_str(password))
151 151 attrs = server.search_ext_s(dn, ldap.SCOPE_BASE,
152 152 '(objectClass=*)')[0][1]
153 153 break
154 154
155 155 except ldap.INVALID_CREDENTIALS:
156 log.debug("LDAP rejected password for user '%s' (%s): %s"
157 % (uid, username, dn))
156 log.debug("LDAP rejected password for user '%s' (%s): %s",
157 uid, username, dn)
158 158
159 159 else:
160 160 log.debug("No matching LDAP objects for authentication "
@@ -162,7 +162,7 b' class AuthLdap(object):'
162 162 raise LdapPasswordError()
163 163
164 164 except ldap.NO_SUCH_OBJECT:
165 log.debug("LDAP says no such user '%s' (%s)" % (uid, username))
165 log.debug("LDAP says no such user '%s' (%s)", uid, username)
166 166 raise LdapUsernameError()
167 167 except ldap.SERVER_DOWN:
168 168 raise LdapConnectionError("LDAP can't access authentication server")
@@ -328,7 +328,7 b' class KallitheaAuthPlugin(auth_modules.K'
328 328 try:
329 329 aldap = AuthLdap(**kwargs)
330 330 (user_dn, ldap_attrs) = aldap.authenticate_ldap(username, password)
331 log.debug('Got ldap DN response %s' % user_dn)
331 log.debug('Got ldap DN response %s', user_dn)
332 332
333 333 get_ldap_attr = lambda k: ldap_attrs.get(settings.get(k), [''])[0]
334 334
@@ -350,7 +350,7 b' class KallitheaAuthPlugin(auth_modules.K'
350 350 "active_from_extern": None,
351 351 'extern_name': user_dn,
352 352 }
353 log.info('user %s authenticated correctly' % user_data['username'])
353 log.info('user %s authenticated correctly', user_data['username'])
354 354 return user_data
355 355
356 356 except (LdapUsernameError, LdapPasswordError, LdapImportError):
@@ -98,10 +98,10 b' class KallitheaAuthPlugin(auth_modules.K'
98 98 _pam_lock.release()
99 99
100 100 if not auth_result:
101 log.error("PAM was unable to authenticate user: %s" % (username,))
101 log.error("PAM was unable to authenticate user: %s", username)
102 102 return None
103 103 else:
104 log.debug("Using cached auth for user: %s" % (username,))
104 log.debug("Using cached auth for user: %s", username)
105 105
106 106 # old attrs fetched from Kallithea database
107 107 admin = getattr(userobj, 'admin', False)
@@ -133,8 +133,8 b' class KallitheaAuthPlugin(auth_modules.K'
133 133 log.warning("Cannot extract additional info for PAM user %s", username)
134 134 pass
135 135
136 log.debug("pamuser: \n%s" % formatted_json(user_data))
137 log.info('user %s authenticated correctly' % user_data['username'])
136 log.debug("pamuser: \n%s", formatted_json(user_data))
137 log.info('user %s authenticated correctly', user_data['username'])
138 138 return user_data
139 139
140 140 def get_managed_fields(self):
@@ -74,7 +74,7 b' def _filter_proxy(ip):'
74 74 if ',' in ip:
75 75 _ips = ip.split(',')
76 76 _first_ip = _ips[0].strip()
77 log.debug('Got multiple IPs %s, using %s' % (','.join(_ips), _first_ip))
77 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
78 78 return _first_ip
79 79 return ip
80 80
@@ -224,7 +224,7 b' class BaseVCSController(object):'
224 224 # check IP
225 225 ip_allowed = AuthUser.check_ip_allowed(user, ip_addr)
226 226 if ip_allowed:
227 log.info('Access for IP:%s allowed' % (ip_addr,))
227 log.info('Access for IP:%s allowed', ip_addr)
228 228 else:
229 229 return False
230 230
@@ -256,8 +256,8 b' class BaseVCSController(object):'
256 256 if str2bool(Ui.get_by_key('push_ssl').ui_value):
257 257 org_proto = environ.get('wsgi._org_proto', environ['wsgi.url_scheme'])
258 258 if org_proto != 'https':
259 log.debug('proto is %s and SSL is required BAD REQUEST !'
260 % org_proto)
259 log.debug('proto is %s and SSL is required BAD REQUEST !',
260 org_proto)
261 261 return False
262 262 return True
263 263
@@ -286,7 +286,7 b' class BaseVCSController(object):'
286 286 #check if it's already locked !, if it is compare users
287 287 user_id, _date = repo.locked
288 288 if user.user_id == user_id:
289 log.debug('Got push from user %s, now unlocking' % (user))
289 log.debug('Got push from user %s, now unlocking', user)
290 290 # unlock if we have push from user who locked
291 291 make_lock = False
292 292 else:
@@ -296,13 +296,13 b' class BaseVCSController(object):'
296 296 if repo.locked[0] and repo.locked[1]:
297 297 locked = True
298 298 else:
299 log.debug('Setting lock on repo %s by %s' % (repo, user))
299 log.debug('Setting lock on repo %s by %s', repo, user)
300 300 make_lock = True
301 301
302 302 else:
303 log.debug('Repository %s do not have locking enabled' % (repo))
304 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s'
305 % (make_lock, locked, locked_by))
303 log.debug('Repository %s do not have locking enabled', repo)
304 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
305 make_lock, locked, locked_by)
306 306 return make_lock, locked, locked_by
307 307
308 308 def __call__(self, environ, start_response):
@@ -311,7 +311,7 b' class BaseVCSController(object):'
311 311 return self._handle_request(environ, start_response)
312 312 finally:
313 313 log = logging.getLogger('kallithea.' + self.__class__.__name__)
314 log.debug('Request time: %.3fs' % (time.time() - start))
314 log.debug('Request time: %.3fs', time.time() - start)
315 315 meta.Session.remove()
316 316
317 317
@@ -466,8 +466,8 b' class BaseRepoController(BaseController)'
466 466 if not _dbr:
467 467 return
468 468
469 log.debug('Found repository in database %s with state `%s`'
470 % (safe_unicode(_dbr), safe_unicode(_dbr.repo_state)))
469 log.debug('Found repository in database %s with state `%s`',
470 safe_unicode(_dbr), safe_unicode(_dbr.repo_state))
471 471 route = getattr(request.environ.get('routes.route'), 'name', '')
472 472
473 473 # allow to delete repos that are somehow damages in filesystem
@@ -62,7 +62,7 b' def run_task(task, *args, **kwargs):'
62 62 if CELERY_ON:
63 63 try:
64 64 t = task.apply_async(args=args, kwargs=kwargs)
65 log.info('running task %s:%s' % (t.task_id, task))
65 log.info('running task %s:%s', t.task_id, task)
66 66 return t
67 67
68 68 except socket.error as e:
@@ -76,7 +76,7 b' def run_task(task, *args, **kwargs):'
76 76 except Exception as e:
77 77 log.error(traceback.format_exc())
78 78
79 log.debug('executing task %s in sync mode' % task)
79 log.debug('executing task %s in sync mode', task)
80 80 return ResultWrapper(task(*args, **kwargs))
81 81
82 82
@@ -96,7 +96,7 b' def locked_task(func):'
96 96 lockkey = __get_lockkey(func, *fargs, **fkwargs)
97 97 lockkey_path = config['app_conf']['cache_dir']
98 98
99 log.info('running task with lockkey %s' % lockkey)
99 log.info('running task with lockkey %s', lockkey)
100 100 try:
101 101 l = DaemonLock(file_=jn(lockkey_path, lockkey))
102 102 ret = func(*fargs, **fkwargs)
@@ -87,7 +87,7 b' def get_commits_stats(repo_name, ts_min_'
87 87 ts_max_y)
88 88 lockkey_path = config['app_conf']['cache_dir']
89 89
90 log.info('running task with lockkey %s' % lockkey)
90 log.info('running task with lockkey %s', lockkey)
91 91
92 92 try:
93 93 lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))
@@ -134,15 +134,15 b' def get_commits_stats(repo_name, ts_min_'
134 134 cur_stats.commit_activity_combined))
135 135 co_day_auth_aggr = json.loads(cur_stats.commit_activity)
136 136
137 log.debug('starting parsing %s' % parse_limit)
137 log.debug('starting parsing %s', parse_limit)
138 138 lmktime = mktime
139 139
140 140 last_rev = last_rev + 1 if last_rev >= 0 else 0
141 log.debug('Getting revisions from %s to %s' % (
142 last_rev, last_rev + parse_limit)
141 log.debug('Getting revisions from %s to %s',
142 last_rev, last_rev + parse_limit
143 143 )
144 144 for cs in repo[last_rev:last_rev + parse_limit]:
145 log.debug('parsing %s' % cs)
145 log.debug('parsing %s', cs)
146 146 last_cs = cs # remember last parsed changeset
147 147 k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
148 148 cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
@@ -210,9 +210,9 b' def get_commits_stats(repo_name, ts_min_'
210 210 stats.commit_activity = json.dumps(co_day_auth_aggr)
211 211 stats.commit_activity_combined = json.dumps(overview_data)
212 212
213 log.debug('last revision %s' % last_rev)
213 log.debug('last revision %s', last_rev)
214 214 leftovers = len(repo.revisions[last_rev:])
215 log.debug('revisions to parse %s' % leftovers)
215 log.debug('revisions to parse %s', leftovers)
216 216
217 217 if last_rev == 0 or leftovers < parse_limit:
218 218 log.debug('getting code trending stats')
@@ -103,7 +103,7 b' class DbManage(object):'
103 103
104 104 checkfirst = not override
105 105 Base.metadata.create_all(checkfirst=checkfirst)
106 log.info('Created tables for %s' % self.dbname)
106 log.info('Created tables for %s', self.dbname)
107 107
108 108 def set_db_version(self):
109 109 ver = DbMigrateVersion()
@@ -111,7 +111,7 b' class DbManage(object):'
111 111 ver.repository_id = DB_MIGRATIONS
112 112 ver.repository_path = 'versions'
113 113 self.sa.add(ver)
114 log.info('db version set to: %s' % __dbversion__)
114 log.info('db version set to: %s', __dbversion__)
115 115
116 116 def upgrade(self):
117 117 """
@@ -363,7 +363,7 b' class DbManage(object):'
363 363 for k, v, t in [('auth_plugins', 'kallithea.lib.auth_modules.auth_internal', 'list'),
364 364 ('auth_internal_enabled', 'True', 'bool')]:
365 365 if skip_existing and Setting.get_by_name(k) != None:
366 log.debug('Skipping option %s' % k)
366 log.debug('Skipping option %s', k)
367 367 continue
368 368 setting = Setting(k, v, t)
369 369 self.sa.add(setting)
@@ -379,7 +379,7 b' class DbManage(object):'
379 379 ('default_repo_type', 'hg', 'unicode')]:
380 380
381 381 if skip_existing and Setting.get_by_name(k) is not None:
382 log.debug('Skipping option %s' % k)
382 log.debug('Skipping option %s', k)
383 383 continue
384 384 setting = Setting(k, v, t)
385 385 self.sa.add(setting)
@@ -396,7 +396,7 b' class DbManage(object):'
396 396 .scalar()
397 397
398 398 if default is None:
399 log.debug('missing default permission for group %s adding' % g)
399 log.debug('missing default permission for group %s adding', g)
400 400 perm_obj = RepoGroupModel()._create_default_perms(g)
401 401 self.sa.add(perm_obj)
402 402
@@ -444,20 +444,20 b' class DbManage(object):'
444 444 # check proper dir
445 445 if not os.path.isdir(path):
446 446 path_ok = False
447 log.error('Given path %s is not a valid directory' % (path,))
447 log.error('Given path %s is not a valid directory', path)
448 448
449 449 elif not os.path.isabs(path):
450 450 path_ok = False
451 log.error('Given path %s is not an absolute path' % (path,))
451 log.error('Given path %s is not an absolute path', path)
452 452
453 453 # check if path is at least readable.
454 454 if not os.access(path, os.R_OK):
455 455 path_ok = False
456 log.error('Given path %s is not readable' % (path,))
456 log.error('Given path %s is not readable', path)
457 457
458 458 # check write access, warn user about non writeable paths
459 459 elif not os.access(path, os.W_OK) and path_ok:
460 log.warning('No write permission to given path %s' % (path,))
460 log.warning('No write permission to given path %s', path)
461 461 if not ask_ok('Given path %s is not writeable, do you want to '
462 462 'continue with read only mode ? [y/n]' % (path,)):
463 463 log.error('Canceled by user')
@@ -472,7 +472,7 b' class DbManage(object):'
472 472 real_path = os.path.normpath(os.path.realpath(path))
473 473
474 474 if real_path != os.path.normpath(path):
475 log.warning('Using normalized path %s instead of %s' % (real_path, path))
475 log.warning('Using normalized path %s instead of %s', real_path, path)
476 476
477 477 return real_path
478 478
@@ -520,7 +520,7 b' class DbManage(object):'
520 520 log.info('created ui config')
521 521
522 522 def create_user(self, username, password, email='', admin=False):
523 log.info('creating user %s' % username)
523 log.info('creating user %s', username)
524 524 UserModel().create_or_update(username, password, email,
525 525 firstname='Kallithea', lastname='Admin',
526 526 active=True, admin=admin,
@@ -25,13 +25,13 b' def usage():'
25 25
26 26 def delete_file(filepath):
27 27 """Deletes a file and prints a message."""
28 log.info('Deleting file: %s' % filepath)
28 log.info('Deleting file: %s', filepath)
29 29 os.remove(filepath)
30 30
31 31
32 32 def move_file(src, tgt):
33 33 """Moves a file and prints a message."""
34 log.info('Moving file %s to %s' % (src, tgt))
34 log.info('Moving file %s to %s', src, tgt)
35 35 if os.path.exists(tgt):
36 36 raise Exception(
37 37 'Cannot move file %s because target %s already exists' % \
@@ -41,13 +41,13 b' def move_file(src, tgt):'
41 41
42 42 def delete_directory(dirpath):
43 43 """Delete a directory and print a message."""
44 log.info('Deleting directory: %s' % dirpath)
44 log.info('Deleting directory: %s', dirpath)
45 45 os.rmdir(dirpath)
46 46
47 47
48 48 def migrate_repository(repos):
49 49 """Does the actual migration to the new repository format."""
50 log.info('Migrating repository at: %s to new format' % repos)
50 log.info('Migrating repository at: %s to new format', repos)
51 51 versions = '%s/versions' % repos
52 52 dirs = os.listdir(versions)
53 53 # Only use int's in list.
@@ -55,7 +55,7 b' def migrate_repository(repos):'
55 55 numdirs.sort() # Sort list.
56 56 for dirname in numdirs:
57 57 origdir = '%s/%s' % (versions, dirname)
58 log.info('Working on directory: %s' % origdir)
58 log.info('Working on directory: %s', origdir)
59 59 files = os.listdir(origdir)
60 60 files.sort()
61 61 for filename in files:
@@ -35,7 +35,7 b' class Pathed(KeyedInstance):'
35 35 """Try to initialize this object's parent, if it has one"""
36 36 parent_path = self.__class__._parent_path(path)
37 37 self.parent = self.__class__.parent(parent_path)
38 log.debug("Getting parent %r:%r" % (self.__class__.parent, parent_path))
38 log.debug("Getting parent %r:%r", self.__class__.parent, parent_path)
39 39 self.parent._init_child(path, self)
40 40
41 41 def _init_child(self, child, path):
@@ -74,14 +74,14 b' class Repository(pathed.Pathed):'
74 74 _versions = 'versions'
75 75
76 76 def __init__(self, path):
77 log.debug('Loading repository %s...' % path)
77 log.debug('Loading repository %s...', path)
78 78 self.verify(path)
79 79 super(Repository, self).__init__(path)
80 80 self.config = cfgparse.Config(os.path.join(self.path, self._config))
81 81 self.versions = version.Collection(os.path.join(self.path,
82 82 self._versions))
83 log.debug('Repository %s loaded successfully' % path)
84 log.debug('Config: %r' % self.config.to_dict())
83 log.debug('Repository %s loaded successfully', path)
84 log.debug('Config: %r', self.config.to_dict())
85 85
86 86 @classmethod
87 87 def verify(cls, path):
@@ -24,10 +24,10 b' class BaseScript(pathed.Pathed):'
24 24 """ # TODO: sphinxfy this and implement it correctly
25 25
26 26 def __init__(self, path):
27 log.debug('Loading script %s...' % path)
27 log.debug('Loading script %s...', path)
28 28 self.verify(path)
29 29 super(BaseScript, self).__init__(path)
30 log.debug('Script %s loaded successfully' % path)
30 log.debug('Script %s loaded successfully', path)
31 31
32 32 @classmethod
33 33 def verify(cls, path):
@@ -323,7 +323,7 b' class User(Base, BaseModel):'
323 323 self.last_login = datetime.datetime.now()
324 324 Session.add(self)
325 325 Session.commit()
326 log.debug('updated user %s lastlogin' % self.username)
326 log.debug('updated user %s lastlogin', self.username)
327 327
328 328 @classmethod
329 329 def create(cls, form_data):
@@ -698,7 +698,7 b' class Repository(Base, BaseModel):'
698 698
699 699 try:
700 700 alias = get_scm(repo_full_path)[0]
701 log.debug('Creating instance of %s repository' % alias)
701 log.debug('Creating instance of %s repository', alias)
702 702 backend = get_backend(alias)
703 703 except VCSError:
704 704 log.error(traceback.format_exc())
@@ -785,7 +785,7 b' class Group(Base, BaseModel):'
785 785 break
786 786 if cnt == parents_recursion_limit:
787 787 # this will prevent accidental infinite loops
788 log.error('group nested more than %s' %
788 log.error('group nested more than %s',
789 789 parents_recursion_limit)
790 790 break
791 791
@@ -1060,7 +1060,7 b' class CacheInvalidation(Base, BaseModel)'
1060 1060 :param key:
1061 1061 """
1062 1062
1063 log.debug('marking %s for invalidation' % key)
1063 log.debug('marking %s for invalidation', key)
1064 1064 inv_obj = Session.query(cls)\
1065 1065 .filter(cls.cache_key == key).scalar()
1066 1066 if inv_obj:
@@ -391,7 +391,7 b' class User(Base, BaseModel):'
391 391 """Update user lastlogin"""
392 392 self.last_login = datetime.datetime.now()
393 393 Session.add(self)
394 log.debug('updated user %s lastlogin' % self.username)
394 log.debug('updated user %s lastlogin', self.username)
395 395
396 396 def __json__(self):
397 397 return dict(
@@ -716,7 +716,7 b' class Repository(Base, BaseModel):'
716 716 repo_full_path = self.repo_full_path
717 717 try:
718 718 alias = get_scm(repo_full_path)[0]
719 log.debug('Creating instance of %s repository' % alias)
719 log.debug('Creating instance of %s repository', alias)
720 720 backend = get_backend(alias)
721 721 except VCSError:
722 722 log.error(traceback.format_exc())
@@ -813,7 +813,7 b' class RepoGroup(Base, BaseModel):'
813 813 break
814 814 if cnt == parents_recursion_limit:
815 815 # this will prevent accidental infinite loops
816 log.error('group nested more than %s' %
816 log.error('group nested more than %s',
817 817 parents_recursion_limit)
818 818 break
819 819
@@ -1151,8 +1151,8 b' class CacheInvalidation(Base, BaseModel)'
1151 1151
1152 1152 key, _prefix, _org_key = cls._get_key(key)
1153 1153 inv_objs = Session.query(cls).filter(cls.cache_args == _org_key).all()
1154 log.debug('marking %s key[s] %s for invalidation' % (len(inv_objs),
1155 _org_key))
1154 log.debug('marking %s key[s] %s for invalidation', len(inv_objs),
1155 _org_key)
1156 1156 try:
1157 1157 for inv_obj in inv_objs:
1158 1158 if inv_obj:
@@ -437,7 +437,7 b' class User(Base, BaseModel):'
437 437 """Update user lastlogin"""
438 438 self.last_login = datetime.datetime.now()
439 439 Session().add(self)
440 log.debug('updated user %s lastlogin' % self.username)
440 log.debug('updated user %s lastlogin', self.username)
441 441
442 442 def get_api_data(self):
443 443 """
@@ -863,7 +863,7 b' class Repository(Base, BaseModel):'
863 863 if last_change is None:
864 864 last_change = datetime.datetime.now()
865 865 if self.updated_on is None or self.updated_on != last_change:
866 log.debug('updated repo %s with new date %s' % (self, last_change))
866 log.debug('updated repo %s with new date %s', self, last_change)
867 867 self.updated_on = last_change
868 868 Session().add(self)
869 869 Session().commit()
@@ -977,7 +977,7 b' class Repository(Base, BaseModel):'
977 977 repo_full_path = self.repo_full_path
978 978 try:
979 979 alias = get_scm(repo_full_path)[0]
980 log.debug('Creating instance of %s repository' % alias)
980 log.debug('Creating instance of %s repository', alias)
981 981 backend = get_backend(alias)
982 982 except VCSError:
983 983 log.error(traceback.format_exc())
@@ -1081,7 +1081,7 b' class RepoGroup(Base, BaseModel):'
1081 1081 break
1082 1082 if cnt == parents_recursion_limit:
1083 1083 # this will prevent accidental infinite loops
1084 log.error('group nested more than %s' %
1084 log.error('group nested more than %s',
1085 1085 parents_recursion_limit)
1086 1086 break
1087 1087
@@ -1505,8 +1505,8 b' class CacheInvalidation(Base, BaseModel)'
1505 1505 elif repo_name:
1506 1506 inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
1507 1507
1508 log.debug('marking %s key[s] for invalidation based on key=%s,repo_name=%s'
1509 % (len(inv_objs), key, repo_name))
1508 log.debug('marking %s key[s] for invalidation based on key=%s,repo_name=%s',
1509 len(inv_objs), key, repo_name)
1510 1510 try:
1511 1511 for inv_obj in inv_objs:
1512 1512 inv_obj.cache_active = False
@@ -455,7 +455,7 b' class User(Base, BaseModel):'
455 455 """Update user lastlogin"""
456 456 self.last_login = datetime.datetime.now()
457 457 Session().add(self)
458 log.debug('updated user %s lastlogin' % self.username)
458 log.debug('updated user %s lastlogin', self.username)
459 459
460 460 def get_api_data(self):
461 461 """
@@ -882,7 +882,7 b' class Repository(Base, BaseModel):'
882 882 if last_change is None:
883 883 last_change = datetime.datetime.now()
884 884 if self.updated_on is None or self.updated_on != last_change:
885 log.debug('updated repo %s with new date %s' % (self, last_change))
885 log.debug('updated repo %s with new date %s', self, last_change)
886 886 self.updated_on = last_change
887 887 Session().add(self)
888 888 Session().commit()
@@ -996,7 +996,7 b' class Repository(Base, BaseModel):'
996 996 repo_full_path = self.repo_full_path
997 997 try:
998 998 alias = get_scm(repo_full_path)[0]
999 log.debug('Creating instance of %s repository' % alias)
999 log.debug('Creating instance of %s repository', alias)
1000 1000 backend = get_backend(alias)
1001 1001 except VCSError:
1002 1002 log.error(traceback.format_exc())
@@ -1100,7 +1100,7 b' class RepoGroup(Base, BaseModel):'
1100 1100 break
1101 1101 if cnt == parents_recursion_limit:
1102 1102 # this will prevent accidental infinite loops
1103 log.error('group nested more than %s' %
1103 log.error('group nested more than %s',
1104 1104 parents_recursion_limit)
1105 1105 break
1106 1106
@@ -1532,8 +1532,8 b' class CacheInvalidation(Base, BaseModel)'
1532 1532 elif repo_name:
1533 1533 inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
1534 1534
1535 log.debug('marking %s key[s] for invalidation based on key=%s,repo_name=%s'
1536 % (len(inv_objs), key, repo_name))
1535 log.debug('marking %s key[s] for invalidation based on key=%s,repo_name=%s',
1536 len(inv_objs), key, repo_name)
1537 1537 try:
1538 1538 for inv_obj in inv_objs:
1539 1539 inv_obj.cache_active = False
@@ -462,7 +462,7 b' class User(Base, BaseModel):'
462 462 """Update user lastlogin"""
463 463 self.last_login = datetime.datetime.now()
464 464 Session().add(self)
465 log.debug('updated user %s lastlogin' % self.username)
465 log.debug('updated user %s lastlogin', self.username)
466 466
467 467 def get_api_data(self):
468 468 """
@@ -995,15 +995,15 b' class Repository(Base, BaseModel):'
995 995 if (cs_cache != self.changeset_cache or not self.changeset_cache):
996 996 _default = datetime.datetime.fromtimestamp(0)
997 997 last_change = cs_cache.get('date') or _default
998 log.debug('updated repo %s with new cs cache %s'
999 % (self.repo_name, cs_cache))
998 log.debug('updated repo %s with new cs cache %s',
999 self.repo_name, cs_cache)
1000 1000 self.updated_on = last_change
1001 1001 self.changeset_cache = cs_cache
1002 1002 Session().add(self)
1003 1003 Session().commit()
1004 1004 else:
1005 log.debug('Skipping repo:%s already with latest changes'
1006 % self.repo_name)
1005 log.debug('Skipping repo:%s already with latest changes',
1006 self.repo_name)
1007 1007
1008 1008 @property
1009 1009 def tip(self):
@@ -1117,7 +1117,7 b' class Repository(Base, BaseModel):'
1117 1117 repo_full_path = self.repo_full_path
1118 1118 try:
1119 1119 alias = get_scm(repo_full_path)[0]
1120 log.debug('Creating instance of %s repository' % alias)
1120 log.debug('Creating instance of %s repository', alias)
1121 1121 backend = get_backend(alias)
1122 1122 except VCSError:
1123 1123 log.error(traceback.format_exc())
@@ -1221,7 +1221,7 b' class RepoGroup(Base, BaseModel):'
1221 1221 break
1222 1222 if cnt == parents_recursion_limit:
1223 1223 # this will prevent accidental infinite loops
1224 log.error('group nested more than %s' %
1224 log.error('group nested more than %s',
1225 1225 parents_recursion_limit)
1226 1226 break
1227 1227
@@ -1645,8 +1645,8 b' class CacheInvalidation(Base, BaseModel)'
1645 1645 elif repo_name:
1646 1646 inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
1647 1647
1648 log.debug('marking %s key[s] for invalidation based on key=%s,repo_name=%s'
1649 % (len(inv_objs), key, repo_name))
1648 log.debug('marking %s key[s] for invalidation based on key=%s,repo_name=%s',
1649 len(inv_objs), key, repo_name)
1650 1650 try:
1651 1651 for inv_obj in inv_objs:
1652 1652 inv_obj.cache_active = False
@@ -491,7 +491,7 b' class User(Base, BaseModel):'
491 491 """Update user lastlogin"""
492 492 self.last_login = datetime.datetime.now()
493 493 Session().add(self)
494 log.debug('updated user %s lastlogin' % self.username)
494 log.debug('updated user %s lastlogin', self.username)
495 495
496 496 def get_api_data(self):
497 497 """
@@ -1065,15 +1065,15 b' class Repository(Base, BaseModel):'
1065 1065 if (cs_cache != self.changeset_cache or not self.changeset_cache):
1066 1066 _default = datetime.datetime.fromtimestamp(0)
1067 1067 last_change = cs_cache.get('date') or _default
1068 log.debug('updated repo %s with new cs cache %s'
1069 % (self.repo_name, cs_cache))
1068 log.debug('updated repo %s with new cs cache %s',
1069 self.repo_name, cs_cache)
1070 1070 self.updated_on = last_change
1071 1071 self.changeset_cache = cs_cache
1072 1072 Session().add(self)
1073 1073 Session().commit()
1074 1074 else:
1075 log.debug('Skipping repo:%s already with latest changes'
1076 % self.repo_name)
1075 log.debug('Skipping repo:%s already with latest changes',
1076 self.repo_name)
1077 1077
1078 1078 @property
1079 1079 def tip(self):
@@ -1192,8 +1192,8 b' class Repository(Base, BaseModel):'
1192 1192 repo_full_path = self.repo_full_path
1193 1193 try:
1194 1194 alias = get_scm(repo_full_path)[0]
1195 log.debug('Creating instance of %s repository from %s'
1196 % (alias, repo_full_path))
1195 log.debug('Creating instance of %s repository from %s',
1196 alias, repo_full_path)
1197 1197 backend = get_backend(alias)
1198 1198 except VCSError:
1199 1199 log.error(traceback.format_exc())
@@ -1296,7 +1296,7 b' class RepoGroup(Base, BaseModel):'
1296 1296 break
1297 1297 if cnt == parents_recursion_limit:
1298 1298 # this will prevent accidental infinite loops
1299 log.error('group nested more than %s' %
1299 log.error('group nested more than %s',
1300 1300 parents_recursion_limit)
1301 1301 break
1302 1302
@@ -1723,8 +1723,8 b' class CacheInvalidation(Base, BaseModel)'
1723 1723 try:
1724 1724 for inv_obj in inv_objs:
1725 1725 inv_obj.cache_active = False
1726 log.debug('marking %s key for invalidation based on key=%s,repo_name=%s'
1727 % (inv_obj, key, safe_str(repo_name)))
1726 log.debug('marking %s key for invalidation based on key=%s,repo_name=%s',
1727 inv_obj, key, safe_str(repo_name))
1728 1728 invalidated_keys.append(inv_obj.cache_key)
1729 1729 Session().add(inv_obj)
1730 1730 Session().commit()
@@ -493,7 +493,7 b' class User(Base, BaseModel):'
493 493 """Update user lastlogin"""
494 494 self.last_login = datetime.datetime.now()
495 495 Session().add(self)
496 log.debug('updated user %s lastlogin' % self.username)
496 log.debug('updated user %s lastlogin', self.username)
497 497
498 498 @classmethod
499 499 def get_first_admin(cls):
@@ -1096,15 +1096,15 b' class Repository(Base, BaseModel):'
1096 1096 if (cs_cache != self.changeset_cache or not self.changeset_cache):
1097 1097 _default = datetime.datetime.fromtimestamp(0)
1098 1098 last_change = cs_cache.get('date') or _default
1099 log.debug('updated repo %s with new cs cache %s'
1100 % (self.repo_name, cs_cache))
1099 log.debug('updated repo %s with new cs cache %s',
1100 self.repo_name, cs_cache)
1101 1101 self.updated_on = last_change
1102 1102 self.changeset_cache = cs_cache
1103 1103 Session().add(self)
1104 1104 Session().commit()
1105 1105 else:
1106 log.debug('Skipping repo:%s already with latest changes'
1107 % self.repo_name)
1106 log.debug('Skipping repo:%s already with latest changes',
1107 self.repo_name)
1108 1108
1109 1109 @property
1110 1110 def tip(self):
@@ -1199,18 +1199,18 b' class Repository(Base, BaseModel):'
1199 1199
1200 1200 valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
1201 1201 if not valid:
1202 log.debug('Cache for %s invalidated, getting new object' % (rn))
1202 log.debug('Cache for %s invalidated, getting new object', rn)
1203 1203 region_invalidate(_c, None, rn)
1204 1204 else:
1205 log.debug('Getting obj for %s from cache' % (rn))
1205 log.debug('Getting obj for %s from cache', rn)
1206 1206 return _c(rn)
1207 1207
1208 1208 def __get_instance(self):
1209 1209 repo_full_path = self.repo_full_path
1210 1210 try:
1211 1211 alias = get_scm(repo_full_path)[0]
1212 log.debug('Creating instance of %s repository from %s'
1213 % (alias, repo_full_path))
1212 log.debug('Creating instance of %s repository from %s',
1213 alias, repo_full_path)
1214 1214 backend = get_backend(alias)
1215 1215 except VCSError:
1216 1216 log.error(traceback.format_exc())
@@ -1314,7 +1314,7 b' class RepoGroup(Base, BaseModel):'
1314 1314 break
1315 1315 if cnt == parents_recursion_limit:
1316 1316 # this will prevent accidental infinite loops
1317 log.error('group nested more than %s' %
1317 log.error('group nested more than %s',
1318 1318 parents_recursion_limit)
1319 1319 break
1320 1320
@@ -1828,8 +1828,8 b' class CacheInvalidation(Base, BaseModel)'
1828 1828
1829 1829 try:
1830 1830 for inv_obj in inv_objs:
1831 log.debug('marking %s key for invalidation based on repo_name=%s'
1832 % (inv_obj, safe_str(repo_name)))
1831 log.debug('marking %s key for invalidation based on repo_name=%s',
1832 inv_obj, safe_str(repo_name))
1833 1833 inv_obj.cache_active = False
1834 1834 Session().add(inv_obj)
1835 1835 Session().commit()
@@ -516,7 +516,7 b' class User(Base, BaseModel):'
516 516 """Update user lastlogin"""
517 517 self.last_login = datetime.datetime.now()
518 518 Session().add(self)
519 log.debug('updated user %s lastlogin' % self.username)
519 log.debug('updated user %s lastlogin', self.username)
520 520
521 521 @classmethod
522 522 def get_first_admin(cls):
@@ -1126,15 +1126,15 b' class Repository(Base, BaseModel):'
1126 1126 if (cs_cache != self.changeset_cache or not self.changeset_cache):
1127 1127 _default = datetime.datetime.fromtimestamp(0)
1128 1128 last_change = cs_cache.get('date') or _default
1129 log.debug('updated repo %s with new cs cache %s'
1130 % (self.repo_name, cs_cache))
1129 log.debug('updated repo %s with new cs cache %s',
1130 self.repo_name, cs_cache)
1131 1131 self.updated_on = last_change
1132 1132 self.changeset_cache = cs_cache
1133 1133 Session().add(self)
1134 1134 Session().commit()
1135 1135 else:
1136 log.debug('Skipping repo:%s already with latest changes'
1137 % self.repo_name)
1136 log.debug('Skipping repo:%s already with latest changes',
1137 self.repo_name)
1138 1138
1139 1139 @property
1140 1140 def tip(self):
@@ -1229,18 +1229,18 b' class Repository(Base, BaseModel):'
1229 1229
1230 1230 valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
1231 1231 if not valid:
1232 log.debug('Cache for %s invalidated, getting new object' % (rn))
1232 log.debug('Cache for %s invalidated, getting new object', rn)
1233 1233 region_invalidate(_c, None, rn)
1234 1234 else:
1235 log.debug('Getting obj for %s from cache' % (rn))
1235 log.debug('Getting obj for %s from cache', rn)
1236 1236 return _c(rn)
1237 1237
1238 1238 def __get_instance(self):
1239 1239 repo_full_path = self.repo_full_path
1240 1240 try:
1241 1241 alias = get_scm(repo_full_path)[0]
1242 log.debug('Creating instance of %s repository from %s'
1243 % (alias, repo_full_path))
1242 log.debug('Creating instance of %s repository from %s',
1243 alias, repo_full_path)
1244 1244 backend = get_backend(alias)
1245 1245 except VCSError:
1246 1246 log.error(traceback.format_exc())
@@ -1344,7 +1344,7 b' class RepoGroup(Base, BaseModel):'
1344 1344 break
1345 1345 if cnt == parents_recursion_limit:
1346 1346 # this will prevent accidental infinite loops
1347 log.error('group nested more than %s' %
1347 log.error('group nested more than %s',
1348 1348 parents_recursion_limit)
1349 1349 break
1350 1350
@@ -1874,8 +1874,8 b' class CacheInvalidation(Base, BaseModel)'
1874 1874
1875 1875 try:
1876 1876 for inv_obj in inv_objs:
1877 log.debug('marking %s key for invalidation based on repo_name=%s'
1878 % (inv_obj, safe_str(repo_name)))
1877 log.debug('marking %s key for invalidation based on repo_name=%s',
1878 inv_obj, safe_str(repo_name))
1879 1879 inv_obj.cache_active = False
1880 1880 Session().add(inv_obj)
1881 1881 Session().commit()
@@ -568,7 +568,7 b' class User(Base, BaseModel):'
568 568 """Update user lastlogin"""
569 569 self.last_login = datetime.datetime.now()
570 570 Session().add(self)
571 log.debug('updated user %s lastlogin' % self.username)
571 log.debug('updated user %s lastlogin', self.username)
572 572
573 573 @classmethod
574 574 def get_first_admin(cls):
@@ -1183,15 +1183,15 b' class Repository(Base, BaseModel):'
1183 1183 if (cs_cache != self.changeset_cache or not self.changeset_cache):
1184 1184 _default = datetime.datetime.fromtimestamp(0)
1185 1185 last_change = cs_cache.get('date') or _default
1186 log.debug('updated repo %s with new cs cache %s'
1187 % (self.repo_name, cs_cache))
1186 log.debug('updated repo %s with new cs cache %s',
1187 self.repo_name, cs_cache)
1188 1188 self.updated_on = last_change
1189 1189 self.changeset_cache = cs_cache
1190 1190 Session().add(self)
1191 1191 Session().commit()
1192 1192 else:
1193 log.debug('Skipping repo:%s already with latest changes'
1194 % self.repo_name)
1193 log.debug('Skipping repo:%s already with latest changes',
1194 self.repo_name)
1195 1195
1196 1196 @property
1197 1197 def tip(self):
@@ -1286,18 +1286,18 b' class Repository(Base, BaseModel):'
1286 1286
1287 1287 valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
1288 1288 if not valid:
1289 log.debug('Cache for %s invalidated, getting new object' % (rn))
1289 log.debug('Cache for %s invalidated, getting new object', rn)
1290 1290 region_invalidate(_c, None, rn)
1291 1291 else:
1292 log.debug('Getting obj for %s from cache' % (rn))
1292 log.debug('Getting obj for %s from cache', rn)
1293 1293 return _c(rn)
1294 1294
1295 1295 def __get_instance(self):
1296 1296 repo_full_path = self.repo_full_path
1297 1297 try:
1298 1298 alias = get_scm(repo_full_path)[0]
1299 log.debug('Creating instance of %s repository from %s'
1300 % (alias, repo_full_path))
1299 log.debug('Creating instance of %s repository from %s',
1300 alias, repo_full_path)
1301 1301 backend = get_backend(alias)
1302 1302 except VCSError:
1303 1303 log.error(traceback.format_exc())
@@ -1401,7 +1401,7 b' class RepoGroup(Base, BaseModel):'
1401 1401 break
1402 1402 if cnt == parents_recursion_limit:
1403 1403 # this will prevent accidental infinite loops
1404 log.error('group nested more than %s' %
1404 log.error('group nested more than %s',
1405 1405 parents_recursion_limit)
1406 1406 break
1407 1407
@@ -1931,8 +1931,8 b' class CacheInvalidation(Base, BaseModel)'
1931 1931
1932 1932 try:
1933 1933 for inv_obj in inv_objs:
1934 log.debug('marking %s key for invalidation based on repo_name=%s'
1935 % (inv_obj, safe_str(repo_name)))
1934 log.debug('marking %s key for invalidation based on repo_name=%s',
1935 inv_obj, safe_str(repo_name))
1936 1936 if delete:
1937 1937 Session().delete(inv_obj)
1938 1938 else:
@@ -568,7 +568,7 b' class User(Base, BaseModel):'
568 568 """Update user lastlogin"""
569 569 self.last_login = datetime.datetime.now()
570 570 Session().add(self)
571 log.debug('updated user %s lastlogin' % self.username)
571 log.debug('updated user %s lastlogin', self.username)
572 572
573 573 @classmethod
574 574 def get_first_admin(cls):
@@ -1182,15 +1182,15 b' class Repository(Base, BaseModel):'
1182 1182 if (cs_cache != self.changeset_cache or not self.changeset_cache):
1183 1183 _default = datetime.datetime.fromtimestamp(0)
1184 1184 last_change = cs_cache.get('date') or _default
1185 log.debug('updated repo %s with new cs cache %s'
1186 % (self.repo_name, cs_cache))
1185 log.debug('updated repo %s with new cs cache %s',
1186 self.repo_name, cs_cache)
1187 1187 self.updated_on = last_change
1188 1188 self.changeset_cache = cs_cache
1189 1189 Session().add(self)
1190 1190 Session().commit()
1191 1191 else:
1192 log.debug('Skipping repo:%s already with latest changes'
1193 % self.repo_name)
1192 log.debug('Skipping repo:%s already with latest changes',
1193 self.repo_name)
1194 1194
1195 1195 @property
1196 1196 def tip(self):
@@ -1285,18 +1285,18 b' class Repository(Base, BaseModel):'
1285 1285
1286 1286 valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
1287 1287 if not valid:
1288 log.debug('Cache for %s invalidated, getting new object' % (rn))
1288 log.debug('Cache for %s invalidated, getting new object', rn)
1289 1289 region_invalidate(_c, None, rn)
1290 1290 else:
1291 log.debug('Getting obj for %s from cache' % (rn))
1291 log.debug('Getting obj for %s from cache', rn)
1292 1292 return _c(rn)
1293 1293
1294 1294 def __get_instance(self):
1295 1295 repo_full_path = self.repo_full_path
1296 1296 try:
1297 1297 alias = get_scm(repo_full_path)[0]
1298 log.debug('Creating instance of %s repository from %s'
1299 % (alias, repo_full_path))
1298 log.debug('Creating instance of %s repository from %s',
1299 alias, repo_full_path)
1300 1300 backend = get_backend(alias)
1301 1301 except VCSError:
1302 1302 log.error(traceback.format_exc())
@@ -1402,7 +1402,7 b' class RepoGroup(Base, BaseModel):'
1402 1402 break
1403 1403 if cnt == parents_recursion_limit:
1404 1404 # this will prevent accidental infinite loops
1405 log.error('group nested more than %s' %
1405 log.error('group nested more than %s',
1406 1406 parents_recursion_limit)
1407 1407 break
1408 1408
@@ -1932,8 +1932,8 b' class CacheInvalidation(Base, BaseModel)'
1932 1932
1933 1933 try:
1934 1934 for inv_obj in inv_objs:
1935 log.debug('marking %s key for invalidation based on repo_name=%s'
1936 % (inv_obj, safe_str(repo_name)))
1935 log.debug('marking %s key for invalidation based on repo_name=%s',
1936 inv_obj, safe_str(repo_name))
1937 1937 if delete:
1938 1938 Session().delete(inv_obj)
1939 1939 else:
@@ -568,7 +568,7 b' class User(Base, BaseModel):'
568 568 """Update user lastlogin"""
569 569 self.last_login = datetime.datetime.now()
570 570 Session().add(self)
571 log.debug('updated user %s lastlogin' % self.username)
571 log.debug('updated user %s lastlogin', self.username)
572 572
573 573 @classmethod
574 574 def get_first_admin(cls):
@@ -1202,15 +1202,15 b' class Repository(Base, BaseModel):'
1202 1202 if (cs_cache != self.changeset_cache or not self.changeset_cache):
1203 1203 _default = datetime.datetime.fromtimestamp(0)
1204 1204 last_change = cs_cache.get('date') or _default
1205 log.debug('updated repo %s with new cs cache %s'
1206 % (self.repo_name, cs_cache))
1205 log.debug('updated repo %s with new cs cache %s',
1206 self.repo_name, cs_cache)
1207 1207 self.updated_on = last_change
1208 1208 self.changeset_cache = cs_cache
1209 1209 Session().add(self)
1210 1210 Session().commit()
1211 1211 else:
1212 log.debug('Skipping repo:%s already with latest changes'
1213 % self.repo_name)
1212 log.debug('Skipping repo:%s already with latest changes',
1213 self.repo_name)
1214 1214
1215 1215 @property
1216 1216 def tip(self):
@@ -1305,18 +1305,18 b' class Repository(Base, BaseModel):'
1305 1305
1306 1306 valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
1307 1307 if not valid:
1308 log.debug('Cache for %s invalidated, getting new object' % (rn))
1308 log.debug('Cache for %s invalidated, getting new object', rn)
1309 1309 region_invalidate(_c, None, rn)
1310 1310 else:
1311 log.debug('Getting obj for %s from cache' % (rn))
1311 log.debug('Getting obj for %s from cache', rn)
1312 1312 return _c(rn)
1313 1313
1314 1314 def __get_instance(self):
1315 1315 repo_full_path = self.repo_full_path
1316 1316 try:
1317 1317 alias = get_scm(repo_full_path)[0]
1318 log.debug('Creating instance of %s repository from %s'
1319 % (alias, repo_full_path))
1318 log.debug('Creating instance of %s repository from %s',
1319 alias, repo_full_path)
1320 1320 backend = get_backend(alias)
1321 1321 except VCSError:
1322 1322 log.error(traceback.format_exc())
@@ -1423,7 +1423,7 b' class RepoGroup(Base, BaseModel):'
1423 1423 break
1424 1424 if cnt == parents_recursion_limit:
1425 1425 # this will prevent accidental infinite loops
1426 log.error('group nested more than %s' %
1426 log.error('group nested more than %s',
1427 1427 parents_recursion_limit)
1428 1428 break
1429 1429
@@ -1953,8 +1953,8 b' class CacheInvalidation(Base, BaseModel)'
1953 1953
1954 1954 try:
1955 1955 for inv_obj in inv_objs:
1956 log.debug('marking %s key for invalidation based on repo_name=%s'
1957 % (inv_obj, safe_str(repo_name)))
1956 log.debug('marking %s key for invalidation based on repo_name=%s',
1957 inv_obj, safe_str(repo_name))
1958 1958 if delete:
1959 1959 Session().delete(inv_obj)
1960 1960 else:
@@ -583,7 +583,7 b' class User(Base, BaseModel):'
583 583 """Update user lastlogin"""
584 584 self.last_login = datetime.datetime.now()
585 585 Session().add(self)
586 log.debug('updated user %s lastlogin' % self.username)
586 log.debug('updated user %s lastlogin', self.username)
587 587
588 588 @classmethod
589 589 def get_first_admin(cls):
@@ -1241,15 +1241,15 b' class Repository(Base, BaseModel):'
1241 1241 if (cs_cache != self.changeset_cache or not self.changeset_cache):
1242 1242 _default = datetime.datetime.fromtimestamp(0)
1243 1243 last_change = cs_cache.get('date') or _default
1244 log.debug('updated repo %s with new cs cache %s'
1245 % (self.repo_name, cs_cache))
1244 log.debug('updated repo %s with new cs cache %s',
1245 self.repo_name, cs_cache)
1246 1246 self.updated_on = last_change
1247 1247 self.changeset_cache = cs_cache
1248 1248 Session().add(self)
1249 1249 Session().commit()
1250 1250 else:
1251 log.debug('Skipping repo:%s already with latest changes'
1252 % self.repo_name)
1251 log.debug('Skipping repo:%s already with latest changes',
1252 self.repo_name)
1253 1253
1254 1254 @property
1255 1255 def tip(self):
@@ -1344,18 +1344,18 b' class Repository(Base, BaseModel):'
1344 1344
1345 1345 valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
1346 1346 if not valid:
1347 log.debug('Cache for %s invalidated, getting new object' % (rn))
1347 log.debug('Cache for %s invalidated, getting new object', rn)
1348 1348 region_invalidate(_c, None, rn)
1349 1349 else:
1350 log.debug('Getting obj for %s from cache' % (rn))
1350 log.debug('Getting obj for %s from cache', rn)
1351 1351 return _c(rn)
1352 1352
1353 1353 def __get_instance(self):
1354 1354 repo_full_path = self.repo_full_path
1355 1355 try:
1356 1356 alias = get_scm(repo_full_path)[0]
1357 log.debug('Creating instance of %s repository from %s'
1358 % (alias, repo_full_path))
1357 log.debug('Creating instance of %s repository from %s',
1358 alias, repo_full_path)
1359 1359 backend = get_backend(alias)
1360 1360 except VCSError:
1361 1361 log.error(traceback.format_exc())
@@ -1462,7 +1462,7 b' class RepoGroup(Base, BaseModel):'
1462 1462 break
1463 1463 if cnt == parents_recursion_limit:
1464 1464 # this will prevent accidental infinite loops
1465 log.error('group nested more than %s' %
1465 log.error('group nested more than %s',
1466 1466 parents_recursion_limit)
1467 1467 break
1468 1468
@@ -1992,8 +1992,8 b' class CacheInvalidation(Base, BaseModel)'
1992 1992
1993 1993 try:
1994 1994 for inv_obj in inv_objs:
1995 log.debug('marking %s key for invalidation based on repo_name=%s'
1996 % (inv_obj, safe_str(repo_name)))
1995 log.debug('marking %s key for invalidation based on repo_name=%s',
1996 inv_obj, safe_str(repo_name))
1997 1997 if delete:
1998 1998 Session().delete(inv_obj)
1999 1999 else:
@@ -610,7 +610,7 b' class User(Base, BaseModel):'
610 610 """Update user lastlogin"""
611 611 self.last_login = datetime.datetime.now()
612 612 Session().add(self)
613 log.debug('updated user %s lastlogin' % self.username)
613 log.debug('updated user %s lastlogin', self.username)
614 614
615 615 @classmethod
616 616 def get_first_admin(cls):
@@ -1287,15 +1287,15 b' class Repository(Base, BaseModel):'
1287 1287 if (cs_cache != self.changeset_cache or not self.changeset_cache):
1288 1288 _default = datetime.datetime.fromtimestamp(0)
1289 1289 last_change = cs_cache.get('date') or _default
1290 log.debug('updated repo %s with new cs cache %s'
1291 % (self.repo_name, cs_cache))
1290 log.debug('updated repo %s with new cs cache %s',
1291 self.repo_name, cs_cache)
1292 1292 self.updated_on = last_change
1293 1293 self.changeset_cache = cs_cache
1294 1294 Session().add(self)
1295 1295 Session().commit()
1296 1296 else:
1297 log.debug('Skipping repo:%s already with latest changes'
1298 % self.repo_name)
1297 log.debug('Skipping repo:%s already with latest changes',
1298 self.repo_name)
1299 1299
1300 1300 @property
1301 1301 def tip(self):
@@ -1390,18 +1390,18 b' class Repository(Base, BaseModel):'
1390 1390
1391 1391 valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
1392 1392 if not valid:
1393 log.debug('Cache for %s invalidated, getting new object' % (rn))
1393 log.debug('Cache for %s invalidated, getting new object', rn)
1394 1394 region_invalidate(_c, None, rn)
1395 1395 else:
1396 log.debug('Getting obj for %s from cache' % (rn))
1396 log.debug('Getting obj for %s from cache', rn)
1397 1397 return _c(rn)
1398 1398
1399 1399 def __get_instance(self):
1400 1400 repo_full_path = self.repo_full_path
1401 1401 try:
1402 1402 alias = get_scm(repo_full_path)[0]
1403 log.debug('Creating instance of %s repository from %s'
1404 % (alias, repo_full_path))
1403 log.debug('Creating instance of %s repository from %s',
1404 alias, repo_full_path)
1405 1405 backend = get_backend(alias)
1406 1406 except VCSError:
1407 1407 log.error(traceback.format_exc())
@@ -1512,7 +1512,7 b' class RepoGroup(Base, BaseModel):'
1512 1512 break
1513 1513 if cnt == parents_recursion_limit:
1514 1514 # this will prevent accidental infinite loops
1515 log.error('group nested more than %s' %
1515 log.error('group nested more than %s',
1516 1516 parents_recursion_limit)
1517 1517 break
1518 1518
@@ -2042,11 +2042,11 b' class CacheInvalidation(Base, BaseModel)'
2042 2042 Mark all caches of a repo as invalid in the database.
2043 2043 """
2044 2044 inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
2045 log.debug('for repo %s got %s invalidation objects' % (repo_name, inv_objs))
2045 log.debug('for repo %s got %s invalidation objects', repo_name, inv_objs)
2046 2046 try:
2047 2047 for inv_obj in inv_objs:
2048 log.debug('marking %s key for invalidation based on repo_name=%s'
2049 % (inv_obj, safe_str(repo_name)))
2048 log.debug('marking %s key for invalidation based on repo_name=%s',
2049 inv_obj, safe_str(repo_name))
2050 2050 if delete:
2051 2051 Session().delete(inv_obj)
2052 2052 else:
@@ -610,7 +610,7 b' class User(Base, BaseModel):'
610 610 """Update user lastlogin"""
611 611 self.last_login = datetime.datetime.now()
612 612 Session().add(self)
613 log.debug('updated user %s lastlogin' % self.username)
613 log.debug('updated user %s lastlogin', self.username)
614 614
615 615 @classmethod
616 616 def get_first_admin(cls):
@@ -1314,15 +1314,15 b' class Repository(Base, BaseModel):'
1314 1314 if (cs_cache != self.changeset_cache or not self.changeset_cache):
1315 1315 _default = datetime.datetime.fromtimestamp(0)
1316 1316 last_change = cs_cache.get('date') or _default
1317 log.debug('updated repo %s with new cs cache %s'
1318 % (self.repo_name, cs_cache))
1317 log.debug('updated repo %s with new cs cache %s',
1318 self.repo_name, cs_cache)
1319 1319 self.updated_on = last_change
1320 1320 self.changeset_cache = cs_cache
1321 1321 Session().add(self)
1322 1322 Session().commit()
1323 1323 else:
1324 log.debug('Skipping repo:%s already with latest changes'
1325 % self.repo_name)
1324 log.debug('Skipping repo:%s already with latest changes',
1325 self.repo_name)
1326 1326
1327 1327 @property
1328 1328 def tip(self):
@@ -1417,18 +1417,18 b' class Repository(Base, BaseModel):'
1417 1417
1418 1418 valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
1419 1419 if not valid:
1420 log.debug('Cache for %s invalidated, getting new object' % (rn))
1420 log.debug('Cache for %s invalidated, getting new object', rn)
1421 1421 region_invalidate(_c, None, rn)
1422 1422 else:
1423 log.debug('Getting obj for %s from cache' % (rn))
1423 log.debug('Getting obj for %s from cache', rn)
1424 1424 return _c(rn)
1425 1425
1426 1426 def __get_instance(self):
1427 1427 repo_full_path = self.repo_full_path
1428 1428 try:
1429 1429 alias = get_scm(repo_full_path)[0]
1430 log.debug('Creating instance of %s repository from %s'
1431 % (alias, repo_full_path))
1430 log.debug('Creating instance of %s repository from %s',
1431 alias, repo_full_path)
1432 1432 backend = get_backend(alias)
1433 1433 except VCSError:
1434 1434 log.error(traceback.format_exc())
@@ -1539,7 +1539,7 b' class RepoGroup(Base, BaseModel):'
1539 1539 break
1540 1540 if cnt == parents_recursion_limit:
1541 1541 # this will prevent accidental infinite loops
1542 log.error('group nested more than %s' %
1542 log.error('group nested more than %s',
1543 1543 parents_recursion_limit)
1544 1544 break
1545 1545
@@ -2087,12 +2087,12 b' class CacheInvalidation(Base, BaseModel)'
2087 2087 Mark all caches of a repo as invalid in the database.
2088 2088 """
2089 2089 inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
2090 log.debug('for repo %s got %s invalidation objects'
2091 % (safe_str(repo_name), inv_objs))
2090 log.debug('for repo %s got %s invalidation objects',
2091 safe_str(repo_name), inv_objs)
2092 2092 try:
2093 2093 for inv_obj in inv_objs:
2094 log.debug('marking %s key for invalidation based on repo_name=%s'
2095 % (inv_obj, safe_str(repo_name)))
2094 log.debug('marking %s key for invalidation based on repo_name=%s',
2095 inv_obj, safe_str(repo_name))
2096 2096 if delete:
2097 2097 Session().delete(inv_obj)
2098 2098 else:
@@ -76,7 +76,7 b' class User(Base):'
76 76 self.last_login = datetime.datetime.now()
77 77 session.add(self)
78 78 session.commit()
79 log.debug('updated user %s lastlogin' % self.username)
79 log.debug('updated user %s lastlogin', self.username)
80 80 except (DatabaseError,):
81 81 session.rollback()
82 82
@@ -90,7 +90,7 b' def fixups(models, _SESSION):'
90 90 perms = models.UserToPerm.query().filter(models.UserToPerm.user == user).all()
91 91 defined_perms_groups = map(_get_group,
92 92 (x.permission.permission_name for x in perms))
93 log.debug('GOT ALREADY DEFINED:%s' % perms)
93 log.debug('GOT ALREADY DEFINED:%s', perms)
94 94 DEFAULT_PERMS = models.Permission.DEFAULT_USER_PERMISSIONS
95 95
96 96 # for every default permission that needs to be created, we check if
@@ -98,8 +98,8 b' def fixups(models, _SESSION):'
98 98 for perm_name in DEFAULT_PERMS:
99 99 gr = _get_group(perm_name)
100 100 if gr not in defined_perms_groups:
101 log.debug('GR:%s not found, creating permission %s'
102 % (gr, perm_name))
101 log.debug('GR:%s not found, creating permission %s',
102 gr, perm_name)
103 103 new_perm = _make_perm(perm_name)
104 104 _SESSION().add(new_perm)
105 105 _SESSION().commit()
@@ -115,7 +115,7 b' def fixups(models, _SESSION):'
115 115 ('default_repo_type', 'hg')]:
116 116
117 117 if skip_existing and models.Setting.get_by_name(k) is not None:
118 log.debug('Skipping option %s' % k)
118 log.debug('Skipping option %s', k)
119 119 continue
120 120 setting = models.Setting(k, v)
121 121 _SESSION().add(setting)
@@ -93,7 +93,7 b' def fixups(models, _SESSION):'
93 93 perms = models.UserToPerm.query().filter(models.UserToPerm.user == user).all()
94 94 defined_perms_groups = map(_get_group,
95 95 (x.permission.permission_name for x in perms))
96 log.debug('GOT ALREADY DEFINED:%s' % perms)
96 log.debug('GOT ALREADY DEFINED:%s', perms)
97 97 DEFAULT_PERMS = models.Permission.DEFAULT_USER_PERMISSIONS
98 98
99 99 # for every default permission that needs to be created, we check if
@@ -101,8 +101,8 b' def fixups(models, _SESSION):'
101 101 for perm_name in DEFAULT_PERMS:
102 102 gr = _get_group(perm_name)
103 103 if gr not in defined_perms_groups:
104 log.debug('GR:%s not found, creating permission %s'
105 % (gr, perm_name))
104 log.debug('GR:%s not found, creating permission %s',
105 gr, perm_name)
106 106 new_perm = _make_perm(perm_name)
107 107 _SESSION().add(new_perm)
108 108 _SESSION().commit()
@@ -646,7 +646,7 b' def action_parser(user_log, feed=False, '
646 646 _rev = repo.get_changeset(rev)
647 647 revs.append(_rev)
648 648 except ChangesetDoesNotExistError:
649 log.error('cannot find revision %s in this repo' % rev)
649 log.error('cannot find revision %s in this repo', rev)
650 650 revs.append(rev)
651 651 else:
652 652 _rev = AttributeDict({
@@ -1324,17 +1324,17 b' def urlify_issues(newtext, repository, l'
1324 1324 ]
1325 1325
1326 1326 if valid_indices:
1327 log.debug('found issue server suffixes `%s` during valuation of: %s'
1328 % (','.join(valid_indices), newtext))
1327 log.debug('found issue server suffixes `%s` during valuation of: %s',
1328 ','.join(valid_indices), newtext)
1329 1329
1330 1330 for pattern_index in valid_indices:
1331 1331 ISSUE_PATTERN = conf.get('issue_pat%s' % pattern_index)
1332 1332 ISSUE_SERVER_LNK = conf.get('issue_server_link%s' % pattern_index)
1333 1333 ISSUE_PREFIX = conf.get('issue_prefix%s' % pattern_index)
1334 1334
1335 log.debug('pattern suffix `%s` PAT:%s SERVER_LINK:%s PREFIX:%s'
1336 % (pattern_index, ISSUE_PATTERN, ISSUE_SERVER_LNK,
1337 ISSUE_PREFIX))
1335 log.debug('pattern suffix `%s` PAT:%s SERVER_LINK:%s PREFIX:%s',
1336 pattern_index, ISSUE_PATTERN, ISSUE_SERVER_LNK,
1337 ISSUE_PREFIX)
1338 1338
1339 1339 URL_PAT = re.compile(ISSUE_PATTERN)
1340 1340
@@ -1363,7 +1363,7 b' def urlify_issues(newtext, repository, l'
1363 1363 'serv': ISSUE_SERVER_LNK,
1364 1364 }
1365 1365 newtext = URL_PAT.sub(url_func, newtext)
1366 log.debug('processed prefix:`%s` => %s' % (pattern_index, newtext))
1366 log.debug('processed prefix:`%s` => %s', pattern_index, newtext)
1367 1367
1368 1368 # if we actually did something above
1369 1369 if link_:
@@ -138,7 +138,7 b' class WhooshResultWrapper(object):'
138 138
139 139 def get_full_content(self, docid):
140 140 res = self.searcher.stored_fields(docid[0])
141 log.debug('result: %s' % res)
141 log.debug('result: %s', res)
142 142 if self.search_type == 'content':
143 143 full_repo_path = jn(self.repo_location, res['repository'])
144 144 f_path = res['path'].split(full_repo_path)[-1]
@@ -156,7 +156,7 b' class WhooshResultWrapper(object):'
156 156 elif self.search_type == 'message':
157 157 res.update({'message_hl': self.highlight(res['message'])})
158 158
159 log.debug('result: %s' % res)
159 log.debug('result: %s', res)
160 160
161 161 return res
162 162
@@ -182,11 +182,11 b' class WhooshIndexingDaemon(object):'
182 182 'replacing with empty content' % path)
183 183 u_content = u''
184 184 else:
185 log.debug(' >> %s [WITH CONTENT]' % path)
185 log.debug(' >> %s [WITH CONTENT]', path)
186 186 indexed_w_content += 1
187 187
188 188 else:
189 log.debug(' >> %s' % path)
189 log.debug(' >> %s', path)
190 190 # just index file name without it's content
191 191 u_content = u''
192 192 indexed += 1
@@ -221,14 +221,14 b' class WhooshIndexingDaemon(object):'
221 221 if start_rev is None:
222 222 start_rev = repo[0].raw_id
223 223
224 log.debug('indexing changesets in %s starting at rev: %s' %
225 (repo_name, start_rev))
224 log.debug('indexing changesets in %s starting at rev: %s',
225 repo_name, start_rev)
226 226
227 227 indexed = 0
228 228 cs_iter = repo.get_changesets(start=start_rev)
229 229 total = len(cs_iter)
230 230 for cs in cs_iter:
231 log.debug(' >> %s/%s' % (cs, total))
231 log.debug(' >> %s/%s', cs, total)
232 232 writer.add_document(
233 233 raw_id=unicode(cs.raw_id),
234 234 owner=unicode(repo.contact),
@@ -244,7 +244,7 b' class WhooshIndexingDaemon(object):'
244 244 )
245 245 indexed += 1
246 246
247 log.debug('indexed %d changesets for repo %s' % (indexed, repo_name))
247 log.debug('indexed %d changesets for repo %s', indexed, repo_name)
248 248 return indexed
249 249
250 250 def index_files(self, file_idx_writer, repo_name, repo):
@@ -256,16 +256,16 b' class WhooshIndexingDaemon(object):'
256 256 :param repo: instance of vcs repo
257 257 """
258 258 i_cnt = iwc_cnt = 0
259 log.debug('building index for %s @revision:%s' % (repo.path,
260 self._get_index_revision(repo)))
259 log.debug('building index for %s @revision:%s', repo.path,
260 self._get_index_revision(repo))
261 261 index_rev = self._get_index_revision(repo)
262 262 for idx_path in self.get_paths(repo):
263 263 i, iwc = self.add_doc(file_idx_writer, idx_path, repo, repo_name, index_rev)
264 264 i_cnt += i
265 265 iwc_cnt += iwc
266 266
267 log.debug('added %s files %s with content for repo %s' %
268 (i_cnt + iwc_cnt, iwc_cnt, repo.path))
267 log.debug('added %s files %s with content for repo %s',
268 i_cnt + iwc_cnt, iwc_cnt, repo.path)
269 269 return i_cnt, iwc_cnt
270 270
271 271 def update_changeset_index(self):
@@ -311,8 +311,8 b' class WhooshIndexingDaemon(object):'
311 311 indexed_total += self.index_changesets(writer,
312 312 repo_name, repo, start_id)
313 313 writer_is_dirty = True
314 log.debug('indexed %s changesets for repo %s' % (
315 indexed_total, repo_name)
314 log.debug('indexed %s changesets for repo %s',
315 indexed_total, repo_name
316 316 )
317 317 finally:
318 318 if writer_is_dirty:
@@ -357,8 +357,8 b' class WhooshIndexingDaemon(object):'
357 357 # The file has changed, delete it and add it to
358 358 # the list of files to reindex
359 359 log.debug(
360 'adding to reindex list %s mtime: %s vs %s' % (
361 indexed_path, mtime, indexed_time)
360 'adding to reindex list %s mtime: %s vs %s',
361 indexed_path, mtime, indexed_time
362 362 )
363 363 writer.delete_by_term('fileid', indexed_path)
364 364 writer_is_dirty = True
@@ -366,7 +366,7 b' class WhooshIndexingDaemon(object):'
366 366 to_index.add(indexed_path)
367 367 except (ChangesetError, NodeDoesNotExistError):
368 368 # This file was deleted since it was indexed
369 log.debug('removing from index %s' % indexed_path)
369 log.debug('removing from index %s', indexed_path)
370 370 writer.delete_by_term('path', indexed_path)
371 371 writer_is_dirty = True
372 372
@@ -389,16 +389,16 b' class WhooshIndexingDaemon(object):'
389 389 # that wasn't indexed before. So index it!
390 390 i, iwc = self.add_doc(writer, path, repo, repo_name)
391 391 writer_is_dirty = True
392 log.debug('re indexing %s' % path)
392 log.debug('re indexing %s', path)
393 393 ri_cnt += i
394 394 ri_cnt_total += 1
395 395 riwc_cnt += iwc
396 396 riwc_cnt_total += iwc
397 log.debug('added %s files %s with content for repo %s' % (
398 ri_cnt + riwc_cnt, riwc_cnt, repo.path)
397 log.debug('added %s files %s with content for repo %s',
398 ri_cnt + riwc_cnt, riwc_cnt, repo.path
399 399 )
400 log.debug('indexed %s files in total and %s with content' % (
401 ri_cnt_total, riwc_cnt_total)
400 log.debug('indexed %s files in total and %s with content',
401 ri_cnt_total, riwc_cnt_total
402 402 )
403 403 finally:
404 404 if writer_is_dirty:
@@ -69,7 +69,7 b' class GitRepository(object):'
69 69
70 70 git_command = request.GET.get('service')
71 71 if git_command not in self.commands:
72 log.debug('command %s not allowed' % git_command)
72 log.debug('command %s not allowed', git_command)
73 73 return exc.HTTPMethodNotAllowed()
74 74
75 75 # note to self:
@@ -108,7 +108,7 b' class GitRepository(object):'
108 108 _git_path = kallithea.CONFIG.get('git_path', 'git')
109 109 git_command = self._get_fixedpath(request.path_info)
110 110 if git_command not in self.commands:
111 log.debug('command %s not allowed' % git_command)
111 log.debug('command %s not allowed', git_command)
112 112 return exc.HTTPMethodNotAllowed()
113 113
114 114 if 'CONTENT_LENGTH' in environ:
@@ -55,8 +55,8 b" GIT_PROTO_PAT = re.compile(r'^/(.+)/(inf"
55 55 def is_git(environ):
56 56 path_info = environ['PATH_INFO']
57 57 isgit_path = GIT_PROTO_PAT.match(path_info)
58 log.debug('pathinfo: %s detected as Git %s' % (
59 path_info, isgit_path is not None)
58 log.debug('pathinfo: %s detected as Git %s',
59 path_info, isgit_path is not None
60 60 )
61 61 return isgit_path
62 62
@@ -80,7 +80,7 b' class SimpleGit(BaseVCSController):'
80 80 #======================================================================
81 81 try:
82 82 repo_name = self.__get_repository(environ)
83 log.debug('Extracted repo name is %s' % repo_name)
83 log.debug('Extracted repo name is %s', repo_name)
84 84 except Exception:
85 85 return HTTPInternalServerError()(environ, start_response)
86 86
@@ -126,7 +126,7 b' class SimpleGit(BaseVCSController):'
126 126 pre_auth = auth_modules.authenticate('', '', environ)
127 127 if pre_auth is not None and pre_auth.get('username'):
128 128 username = pre_auth['username']
129 log.debug('PRE-AUTH got %s as username' % username)
129 log.debug('PRE-AUTH got %s as username', username)
130 130
131 131 # If not authenticated by the container, running basic auth
132 132 if not username:
@@ -178,7 +178,7 b' class SimpleGit(BaseVCSController):'
178 178 #===================================================================
179 179 str_repo_name = safe_str(repo_name)
180 180 repo_path = os.path.join(safe_str(self.basepath),str_repo_name)
181 log.debug('Repository path is %s' % repo_path)
181 log.debug('Repository path is %s', repo_path)
182 182
183 183 # CHECK LOCKING only if it's not ANONYMOUS USER
184 184 if username != User.DEFAULT_USER:
@@ -194,19 +194,19 b' class SimpleGit(BaseVCSController):'
194 194 'locked_by': locked_by})
195 195
196 196 fix_PATH()
197 log.debug('HOOKS extras is %s' % extras)
197 log.debug('HOOKS extras is %s', extras)
198 198 baseui = make_ui('db')
199 199 self.__inject_extras(repo_path, baseui, extras)
200 200
201 201 try:
202 202 self._handle_githooks(repo_name, action, baseui, environ)
203 log.info('%s action on Git repo "%s" by "%s" from %s' %
204 (action, str_repo_name, safe_str(username), ip_addr))
203 log.info('%s action on Git repo "%s" by "%s" from %s',
204 action, str_repo_name, safe_str(username), ip_addr)
205 205 app = self.__make_app(repo_name, repo_path, extras)
206 206 return app(environ, start_response)
207 207 except HTTPLockedRC as e:
208 208 _code = CONFIG.get('lock_ret_code')
209 log.debug('Repository LOCKED ret code %s!' % (_code))
209 log.debug('Repository LOCKED ret code %s!', _code)
210 210 return e(environ, start_response)
211 211 except Exception:
212 212 log.error(traceback.format_exc())
@@ -60,8 +60,8 b' def is_mercurial(environ):'
60 60 else:
61 61 ishg_path = False
62 62
63 log.debug('pathinfo: %s detected as Mercurial %s' % (
64 path_info, ishg_path)
63 log.debug('pathinfo: %s detected as Mercurial %s',
64 path_info, ishg_path
65 65 )
66 66 return ishg_path
67 67
@@ -84,7 +84,7 b' class SimpleHg(BaseVCSController):'
84 84 #======================================================================
85 85 try:
86 86 repo_name = environ['REPO_NAME'] = self.__get_repository(environ)
87 log.debug('Extracted repo name is %s' % repo_name)
87 log.debug('Extracted repo name is %s', repo_name)
88 88 except Exception:
89 89 return HTTPInternalServerError()(environ, start_response)
90 90
@@ -130,7 +130,7 b' class SimpleHg(BaseVCSController):'
130 130 pre_auth = auth_modules.authenticate('', '', environ)
131 131 if pre_auth is not None and pre_auth.get('username'):
132 132 username = pre_auth['username']
133 log.debug('PRE-AUTH got %s as username' % username)
133 log.debug('PRE-AUTH got %s as username', username)
134 134
135 135 # If not authenticated by the container, running basic auth
136 136 if not username:
@@ -181,7 +181,7 b' class SimpleHg(BaseVCSController):'
181 181 #======================================================================
182 182 str_repo_name = safe_str(repo_name)
183 183 repo_path = os.path.join(safe_str(self.basepath), str_repo_name)
184 log.debug('Repository path is %s' % repo_path)
184 log.debug('Repository path is %s', repo_path)
185 185
186 186 # CHECK LOCKING only if it's not ANONYMOUS USER
187 187 if username != User.DEFAULT_USER:
@@ -197,13 +197,13 b' class SimpleHg(BaseVCSController):'
197 197 'locked_by': locked_by})
198 198
199 199 fix_PATH()
200 log.debug('HOOKS extras is %s' % extras)
200 log.debug('HOOKS extras is %s', extras)
201 201 baseui = make_ui('db')
202 202 self.__inject_extras(repo_path, baseui, extras)
203 203
204 204 try:
205 log.info('%s action on Mercurial repo "%s" by "%s" from %s' %
206 (action, str_repo_name, safe_str(username), ip_addr))
205 log.info('%s action on Mercurial repo "%s" by "%s" from %s',
206 action, str_repo_name, safe_str(username), ip_addr)
207 207 app = self.__make_app(repo_path, baseui, extras)
208 208 return app(environ, start_response)
209 209 except RepoError as e:
@@ -211,7 +211,7 b' class SimpleHg(BaseVCSController):'
211 211 return HTTPNotFound()(environ, start_response)
212 212 except HTTPLockedRC as e:
213 213 _code = CONFIG.get('lock_ret_code')
214 log.debug('Repository LOCKED ret code %s!' % (_code))
214 log.debug('Repository LOCKED ret code %s!', _code)
215 215 return e(environ, start_response)
216 216 except Exception:
217 217 log.error(traceback.format_exc())
@@ -62,7 +62,7 b' class Command(BasePasterCommand):'
62 62 remove_obsolete=rm_obsolete)
63 63 added = ', '.join(added) or '-'
64 64 removed = ', '.join(removed) or '-'
65 log.info('Scan completed added: %s removed: %s' % (added, removed))
65 log.info('Scan completed added: %s removed: %s', added, removed)
66 66
67 67 def update_parser(self):
68 68 self.parser.add_option(
@@ -202,8 +202,8 b' def action_logger(user, action, repo, ip'
202 202 user_log.user_ip = ipaddr
203 203 sa.add(user_log)
204 204
205 log.info('Logging action:%s on %s by user:%s ip:%s' %
206 (action, safe_unicode(repo), user_obj, ipaddr))
205 log.info('Logging action:%s on %s by user:%s ip:%s',
206 action, safe_unicode(repo), user_obj, ipaddr)
207 207 if commit:
208 208 sa.commit()
209 209
@@ -218,14 +218,14 b' def get_filesystem_repos(path, recursive'
218 218
219 219 # remove ending slash for better results
220 220 path = path.rstrip(os.sep)
221 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
221 log.debug('now scanning in %s location recursive:%s...', path, recursive)
222 222
223 223 def _get_repos(p):
224 224 if not os.access(p, os.R_OK) or not os.access(p, os.X_OK):
225 log.warning('ignoring repo path without access: %s' % (p,))
225 log.warning('ignoring repo path without access: %s', p)
226 226 return
227 227 if not os.access(p, os.W_OK):
228 log.warning('repo path without write access: %s' % (p,))
228 log.warning('repo path without write access: %s', p)
229 229 for dirpath in os.listdir(p):
230 230 if os.path.isfile(os.path.join(p, dirpath)):
231 231 continue
@@ -350,14 +350,14 b" def make_ui(read_from='file', path=None,"
350 350
351 351 if read_from == 'file':
352 352 if not os.path.isfile(path):
353 log.debug('hgrc file is not present at %s, skipping...' % path)
353 log.debug('hgrc file is not present at %s, skipping...', path)
354 354 return False
355 log.debug('reading hgrc from %s' % path)
355 log.debug('reading hgrc from %s', path)
356 356 cfg = config.config()
357 357 cfg.read(path)
358 358 for section in ui_sections:
359 359 for k, v in cfg.items(section):
360 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
360 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
361 361 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
362 362
363 363 elif read_from == 'db':
@@ -448,8 +448,8 b' def map_groups(path):'
448 448 break
449 449
450 450 if group is None:
451 log.debug('creating group level: %s group_name: %s'
452 % (lvl, group_name))
451 log.debug('creating group level: %s group_name: %s',
452 lvl, group_name)
453 453 group = RepoGroup(group_name, parent)
454 454 group.group_description = desc
455 455 group.user = owner
@@ -495,7 +495,7 b' def repo2db_mapper(initial_repo_list, re'
495 495 db_repo = repo_model.get_by_repo_name(unicode_name)
496 496 # found repo that is on filesystem not in Kallithea database
497 497 if not db_repo:
498 log.info('repository %s not found, creating now' % name)
498 log.info('repository %s not found, creating now', name)
499 499 added.append(name)
500 500 desc = (repo.description
501 501 if repo.description != 'unknown'
@@ -532,7 +532,7 b' def repo2db_mapper(initial_repo_list, re'
532 532 for repo in sa.query(Repository).all():
533 533 if repo.repo_name not in initial_repo_list.keys():
534 534 if remove_obsolete:
535 log.debug("Removing non-existing repository found in db `%s`" %
535 log.debug("Removing non-existing repository found in db `%s`",
536 536 repo.repo_name)
537 537 try:
538 538 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
@@ -581,7 +581,7 b' def load_rcextensions(root_path):'
581 581 if os.path.isfile(path):
582 582 rcext = create_module('rc', path)
583 583 EXT = kallithea.EXTENSIONS = rcext
584 log.debug('Found rcextensions now loading %s...' % rcext)
584 log.debug('Found rcextensions now loading %s...', rcext)
585 585
586 586 # Additional mappings that are not present in the pygments lexers
587 587 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
@@ -657,11 +657,11 b' def create_test_env(repos_test_path, con'
657 657
658 658 # PART ONE create db
659 659 dbconf = config['sqlalchemy.db1.url']
660 log.debug('making test db %s' % dbconf)
660 log.debug('making test db %s', dbconf)
661 661
662 662 # create test dir if it doesn't exist
663 663 if not os.path.isdir(repos_test_path):
664 log.debug('Creating testdir %s' % repos_test_path)
664 log.debug('Creating testdir %s', repos_test_path)
665 665 os.makedirs(repos_test_path)
666 666
667 667 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
@@ -682,11 +682,11 b' def create_test_env(repos_test_path, con'
682 682
683 683 #clean index and data
684 684 if idx_path and os.path.exists(idx_path):
685 log.debug('remove %s' % idx_path)
685 log.debug('remove %s', idx_path)
686 686 shutil.rmtree(idx_path)
687 687
688 688 if data_path and os.path.exists(data_path):
689 log.debug('remove %s' % data_path)
689 log.debug('remove %s', data_path)
690 690 shutil.rmtree(data_path)
691 691
692 692 #CREATE DEFAULT TEST REPOS
@@ -810,10 +810,10 b' def check_git_version():'
810 810
811 811 req_ver = StrictVersion('1.7.4')
812 812
813 log.debug('Git executable: "%s" version %s detected: %s'
814 % (settings.GIT_EXECUTABLE_PATH, ver, stdout))
813 log.debug('Git executable: "%s" version %s detected: %s',
814 settings.GIT_EXECUTABLE_PATH, ver, stdout)
815 815 if stderr:
816 log.warning('Error detecting git version: %r' % stderr)
816 log.warning('Error detecting git version: %r', stderr)
817 817 elif ver < req_ver:
818 818 log.warning('Kallithea detected git version %s, which is too old '
819 819 'for the system to function properly. '
@@ -60,7 +60,7 b' def init_model(engine):'
60 60 :param engine: engine to bind to
61 61 """
62 62 engine_str = obfuscate_url_pw(str(engine.url))
63 log.info("initializing db for %s" % engine_str)
63 log.info("initializing db for %s", engine_str)
64 64 meta.Base.metadata.bind = engine
65 65
66 66
@@ -635,7 +635,7 b' class User(Base, BaseModel):'
635 635 """Update user lastlogin"""
636 636 self.last_login = datetime.datetime.now()
637 637 Session().add(self)
638 log.debug('updated user %s lastlogin' % self.username)
638 log.debug('updated user %s lastlogin', self.username)
639 639
640 640 @classmethod
641 641 def get_first_admin(cls):
@@ -1352,15 +1352,15 b' class Repository(Base, BaseModel):'
1352 1352 if (not self.changeset_cache or cs_cache['raw_id'] != self.changeset_cache['raw_id']):
1353 1353 _default = datetime.datetime.fromtimestamp(0)
1354 1354 last_change = cs_cache.get('date') or _default
1355 log.debug('updated repo %s with new cs cache %s'
1356 % (self.repo_name, cs_cache))
1355 log.debug('updated repo %s with new cs cache %s',
1356 self.repo_name, cs_cache)
1357 1357 self.updated_on = last_change
1358 1358 self.changeset_cache = cs_cache
1359 1359 Session().add(self)
1360 1360 Session().commit()
1361 1361 else:
1362 log.debug('changeset_cache for %s already up to date with %s'
1363 % (self.repo_name, cs_cache['raw_id']))
1362 log.debug('changeset_cache for %s already up to date with %s',
1363 self.repo_name, cs_cache['raw_id'])
1364 1364
1365 1365 @property
1366 1366 def tip(self):
@@ -1451,18 +1451,18 b' class Repository(Base, BaseModel):'
1451 1451
1452 1452 valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
1453 1453 if not valid:
1454 log.debug('Cache for %s invalidated, getting new object' % (rn))
1454 log.debug('Cache for %s invalidated, getting new object', rn)
1455 1455 region_invalidate(_c, None, rn)
1456 1456 else:
1457 log.debug('Getting scm_instance of %s from cache' % (rn))
1457 log.debug('Getting scm_instance of %s from cache', rn)
1458 1458 return _c(rn)
1459 1459
1460 1460 def __get_instance(self):
1461 1461 repo_full_path = self.repo_full_path
1462 1462
1463 1463 alias = get_scm(repo_full_path)[0]
1464 log.debug('Creating instance of %s repository from %s'
1465 % (alias, repo_full_path))
1464 log.debug('Creating instance of %s repository from %s',
1465 alias, repo_full_path)
1466 1466 backend = get_backend(alias)
1467 1467
1468 1468 if alias == 'hg':
@@ -2111,12 +2111,12 b' class CacheInvalidation(Base, BaseModel)'
2111 2111 Mark all caches of a repo as invalid in the database.
2112 2112 """
2113 2113 inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
2114 log.debug('for repo %s got %s invalidation objects'
2115 % (safe_str(repo_name), inv_objs))
2114 log.debug('for repo %s got %s invalidation objects',
2115 safe_str(repo_name), inv_objs)
2116 2116
2117 2117 for inv_obj in inv_objs:
2118 log.debug('marking %s key for invalidation based on repo_name=%s'
2119 % (inv_obj, safe_str(repo_name)))
2118 log.debug('marking %s key for invalidation based on repo_name=%s',
2119 inv_obj, safe_str(repo_name))
2120 2120 if delete:
2121 2121 Session().delete(inv_obj)
2122 2122 else:
@@ -65,7 +65,7 b' class GistModel(BaseModel):'
65 65 """
66 66 root_path = RepoModel().repos_path
67 67 rm_path = os.path.join(root_path, GIST_STORE_LOC, gist.gist_access_id)
68 log.info("Removing %s" % (rm_path,))
68 log.info("Removing %s", rm_path)
69 69 shutil.rmtree(rm_path)
70 70
71 71 def _store_metadata(self, repo, gist_id, gist_access_id, user_id, gist_type,
@@ -113,9 +113,9 b' class GistModel(BaseModel):'
113 113 gist_id = safe_unicode(unique_id(20))
114 114 lifetime = safe_int(lifetime, -1)
115 115 gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
116 log.debug('set GIST expiration date to: %s'
117 % (time_to_datetime(gist_expires)
118 if gist_expires != -1 else 'forever'))
116 log.debug('set GIST expiration date to: %s',
117 time_to_datetime(gist_expires)
118 if gist_expires != -1 else 'forever')
119 119 #create the Database version
120 120 gist = Gist()
121 121 gist.gist_description = description
@@ -132,7 +132,7 b' class GistModel(BaseModel):'
132 132 self.sa.add(gist)
133 133
134 134 gist_repo_path = os.path.join(GIST_STORE_LOC, gist_id)
135 log.debug('Creating new %s GIST repo in %s' % (gist_type, gist_repo_path))
135 log.debug('Creating new %s GIST repo in %s', gist_type, gist_repo_path)
136 136 repo = RepoModel()._create_filesystem_repo(
137 137 repo_name=gist_id, repo_type='hg', repo_group=GIST_STORE_LOC)
138 138
@@ -91,14 +91,14 b' class NotificationModel(BaseModel):'
91 91 # TODO: inform user that requested operation couldn't be completed
92 92 log.error('cannot email unknown user %r', u)
93 93 recipients_objs = set(recipients_objs)
94 log.debug('sending notifications %s to %s' % (
95 type_, recipients_objs)
94 log.debug('sending notifications %s to %s',
95 type_, recipients_objs
96 96 )
97 97 elif recipients is None:
98 98 # empty recipients means to all admins
99 99 recipients_objs = User.query().filter(User.admin == True).all()
100 log.debug('sending notifications %s to admins: %s' % (
101 type_, recipients_objs)
100 log.debug('sending notifications %s to admins: %s',
101 type_, recipients_objs
102 102 )
103 103 #else: silently skip notification mails?
104 104
@@ -338,5 +338,5 b' class EmailNotificationModel(BaseModel):'
338 338 'h': h,
339 339 'c': c}
340 340 _kwargs.update(kwargs)
341 log.debug('rendering tmpl %s with kwargs %s' % (base, _kwargs))
341 log.debug('rendering tmpl %s with kwargs %s', base, _kwargs)
342 342 return email_template.render(**_kwargs)
@@ -79,7 +79,7 b' class PermissionModel(BaseModel):'
79 79 perms = UserToPerm.query().filter(UserToPerm.user == user).all()
80 80 defined_perms_groups = map(_get_group,
81 81 (x.permission.permission_name for x in perms))
82 log.debug('GOT ALREADY DEFINED:%s' % perms)
82 log.debug('GOT ALREADY DEFINED:%s', perms)
83 83 DEFAULT_PERMS = Permission.DEFAULT_USER_PERMISSIONS
84 84
85 85 if force:
@@ -92,8 +92,8 b' class PermissionModel(BaseModel):'
92 92 for perm_name in DEFAULT_PERMS:
93 93 gr = _get_group(perm_name)
94 94 if gr not in defined_perms_groups:
95 log.debug('GR:%s not found, creating permission %s'
96 % (gr, perm_name))
95 log.debug('GR:%s not found, creating permission %s',
96 gr, perm_name)
97 97 new_perm = _make_perm(perm_name)
98 98 self.sa.add(new_perm)
99 99
@@ -108,7 +108,7 b' class PermissionModel(BaseModel):'
108 108
109 109 # stage 2 reset defaults and set them from form data
110 110 def _make_new(usr, perm_name):
111 log.debug('Creating new permission:%s' % (perm_name))
111 log.debug('Creating new permission:%s', perm_name)
112 112 new = UserToPerm()
113 113 new.user = usr
114 114 new.permission = Permission.get_by_key(perm_name)
@@ -179,7 +179,7 b' class PullRequestModel(BaseModel):'
179 179 mention_recipients.difference_update(User.get_by_username(username, case_insensitive=True)
180 180 for username in extract_mentioned_users(old_description))
181 181
182 log.debug("Mentioning %s" % mention_recipients)
182 log.debug("Mentioning %s", mention_recipients)
183 183 self.__add_reviewers(pr, [], mention_recipients)
184 184
185 185 def update_reviewers(self, pull_request, reviewers_ids):
@@ -194,10 +194,10 b' class PullRequestModel(BaseModel):'
194 194 to_add = reviewers_ids.difference(current_reviewers_ids)
195 195 to_remove = current_reviewers_ids.difference(reviewers_ids)
196 196
197 log.debug("Adding %s reviewers" % to_add)
197 log.debug("Adding %s reviewers", to_add)
198 198 self.__add_reviewers(pull_request, to_add)
199 199
200 log.debug("Removing %s reviewers" % to_remove)
200 log.debug("Removing %s reviewers", to_remove)
201 201 for uid in to_remove:
202 202 reviewer = PullRequestReviewers.query()\
203 203 .filter(PullRequestReviewers.user_id==uid,
@@ -324,7 +324,7 b' class RepoModel(BaseModel):'
324 324
325 325 if 'repo_group' in kwargs:
326 326 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
327 log.debug('Updating repo %s with params:%s' % (cur_repo, kwargs))
327 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
328 328 for k in ['repo_enable_downloads',
329 329 'repo_description',
330 330 'repo_enable_locking',
@@ -584,7 +584,7 b' class RepoModel(BaseModel):'
584 584 obj.user = user
585 585 obj.permission = permission
586 586 self.sa.add(obj)
587 log.debug('Granted perm %s to %s on %s' % (perm, user, repo))
587 log.debug('Granted perm %s to %s on %s', perm, user, repo)
588 588 return obj
589 589
590 590 def revoke_user_permission(self, repo, user):
@@ -604,7 +604,7 b' class RepoModel(BaseModel):'
604 604 .scalar()
605 605 if obj is not None:
606 606 self.sa.delete(obj)
607 log.debug('Revoked perm on %s on %s' % (repo, user))
607 log.debug('Revoked perm on %s on %s', repo, user)
608 608
609 609 def grant_user_group_permission(self, repo, group_name, perm):
610 610 """
@@ -634,7 +634,7 b' class RepoModel(BaseModel):'
634 634 obj.users_group = group_name
635 635 obj.permission = permission
636 636 self.sa.add(obj)
637 log.debug('Granted perm %s to %s on %s' % (perm, group_name, repo))
637 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
638 638 return obj
639 639
640 640 def revoke_user_group_permission(self, repo, group_name):
@@ -654,7 +654,7 b' class RepoModel(BaseModel):'
654 654 .scalar()
655 655 if obj is not None:
656 656 self.sa.delete(obj)
657 log.debug('Revoked perm to %s on %s' % (repo, group_name))
657 log.debug('Revoked perm to %s on %s', repo, group_name)
658 658
659 659 def delete_stats(self, repo_name):
660 660 """
@@ -710,9 +710,9 b' class RepoModel(BaseModel):'
710 710 if is_valid_repo_group(repo_path, self.repos_path):
711 711 raise Exception('This path %s is a valid group' % repo_path)
712 712
713 log.info('creating repo %s in %s from url: `%s`' % (
713 log.info('creating repo %s in %s from url: `%s`',
714 714 repo_name, safe_unicode(repo_path),
715 obfuscate_url_pw(clone_uri)))
715 obfuscate_url_pw(clone_uri))
716 716
717 717 backend = get_backend(repo_type)
718 718
@@ -731,8 +731,8 b' class RepoModel(BaseModel):'
731 731 else:
732 732 raise Exception('Not supported repo_type %s expected hg/git' % repo_type)
733 733
734 log.debug('Created repo %s with %s backend'
735 % (safe_unicode(repo_name), safe_unicode(repo_type)))
734 log.debug('Created repo %s with %s backend',
735 safe_unicode(repo_name), safe_unicode(repo_type))
736 736 return repo
737 737
738 738 def _rename_filesystem_repo(self, old, new):
@@ -742,7 +742,7 b' class RepoModel(BaseModel):'
742 742 :param old: old name
743 743 :param new: new name
744 744 """
745 log.info('renaming repo from %s to %s' % (old, new))
745 log.info('renaming repo from %s to %s', old, new)
746 746
747 747 old_path = os.path.join(self.repos_path, old)
748 748 new_path = os.path.join(self.repos_path, new)
@@ -761,7 +761,7 b' class RepoModel(BaseModel):'
761 761 :param repo: repo object
762 762 """
763 763 rm_path = os.path.join(self.repos_path, repo.repo_name)
764 log.info("Removing %s" % (rm_path))
764 log.info("Removing %s", rm_path)
765 765
766 766 _now = datetime.now()
767 767 _ms = str(_now.microsecond).rjust(6, '0')
@@ -87,13 +87,13 b' class RepoGroupModel(BaseModel):'
87 87 """
88 88
89 89 create_path = os.path.join(self.repos_path, group_name)
90 log.debug('creating new group in %s' % create_path)
90 log.debug('creating new group in %s', create_path)
91 91
92 92 if os.path.isdir(create_path):
93 93 raise Exception('That directory already exists !')
94 94
95 95 os.makedirs(create_path)
96 log.debug('Created group in %s' % create_path)
96 log.debug('Created group in %s', create_path)
97 97
98 98 def _rename_group(self, old, new):
99 99 """
@@ -106,12 +106,12 b' class RepoGroupModel(BaseModel):'
106 106 log.debug('skipping group rename')
107 107 return
108 108
109 log.debug('renaming repository group from %s to %s' % (old, new))
109 log.debug('renaming repository group from %s to %s', old, new)
110 110
111 111 old_path = os.path.join(self.repos_path, old)
112 112 new_path = os.path.join(self.repos_path, new)
113 113
114 log.debug('renaming repos paths from %s to %s' % (old_path, new_path))
114 log.debug('renaming repos paths from %s to %s', old_path, new_path)
115 115
116 116 if os.path.isdir(new_path):
117 117 raise Exception('Was trying to rename to already '
@@ -129,7 +129,7 b' class RepoGroupModel(BaseModel):'
129 129 paths = os.sep.join(paths)
130 130
131 131 rm_path = os.path.join(self.repos_path, paths)
132 log.info("Removing group %s" % (rm_path))
132 log.info("Removing group %s", rm_path)
133 133 # delete only if that path really exists
134 134 if os.path.isdir(rm_path):
135 135 if force_delete:
@@ -235,8 +235,8 b' class RepoGroupModel(BaseModel):'
235 235
236 236 # start updates
237 237 updates = []
238 log.debug('Now updating permissions for %s in recursive mode:%s'
239 % (repo_group, recursive))
238 log.debug('Now updating permissions for %s in recursive mode:%s',
239 repo_group, recursive)
240 240
241 241 for obj in repo_group.recursive_groups_and_repos():
242 242 # iterated obj is an instance of a repos group or repository in
@@ -336,7 +336,7 b' class RepoGroupModel(BaseModel):'
336 336 self.sa.delete(repo_group)
337 337 self._delete_group(repo_group, force_delete)
338 338 except Exception:
339 log.error('Error removing repo_group %s' % repo_group)
339 log.error('Error removing repo_group %s', repo_group)
340 340 raise
341 341
342 342 def add_permission(self, repo_group, obj, obj_type, perm, recursive):
@@ -466,7 +466,7 b' class RepoGroupModel(BaseModel):'
466 466 obj.user = user
467 467 obj.permission = permission
468 468 self.sa.add(obj)
469 log.debug('Granted perm %s to %s on %s' % (perm, user, repo_group))
469 log.debug('Granted perm %s to %s on %s', perm, user, repo_group)
470 470 return obj
471 471
472 472 def revoke_user_permission(self, repo_group, user):
@@ -487,7 +487,7 b' class RepoGroupModel(BaseModel):'
487 487 .scalar()
488 488 if obj is not None:
489 489 self.sa.delete(obj)
490 log.debug('Revoked perm on %s on %s' % (repo_group, user))
490 log.debug('Revoked perm on %s on %s', repo_group, user)
491 491
492 492 def grant_user_group_permission(self, repo_group, group_name, perm):
493 493 """
@@ -518,7 +518,7 b' class RepoGroupModel(BaseModel):'
518 518 obj.users_group = group_name
519 519 obj.permission = permission
520 520 self.sa.add(obj)
521 log.debug('Granted perm %s to %s on %s' % (perm, group_name, repo_group))
521 log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group)
522 522 return obj
523 523
524 524 def revoke_user_group_permission(self, repo_group, group_name):
@@ -539,4 +539,4 b' class RepoGroupModel(BaseModel):'
539 539 .scalar()
540 540 if obj is not None:
541 541 self.sa.delete(obj)
542 log.debug('Revoked perm to %s on %s' % (repo_group, group_name))
542 log.debug('Revoked perm to %s on %s', repo_group, group_name)
@@ -272,7 +272,7 b' class ScmModel(BaseModel):'
272 272 if repos_path is None:
273 273 repos_path = self.repos_path
274 274
275 log.info('scanning for repositories in %s' % repos_path)
275 log.info('scanning for repositories in %s', repos_path)
276 276
277 277 baseui = make_ui('db')
278 278 repos = {}
@@ -297,7 +297,7 b' class ScmModel(BaseModel):'
297 297 repos[name] = klass(path[1])
298 298 except OSError:
299 299 continue
300 log.debug('found %s paths with repositories' % (len(repos)))
300 log.debug('found %s paths with repositories', len(repos))
301 301 return repos
302 302
303 303 def get_repos(self, all_repos=None, sort_key=None, simple=False):
@@ -854,7 +854,7 b' class ScmModel(BaseModel):'
854 854 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
855 855 _hook_file = jn(loc, '%s-receive' % h_type)
856 856 has_hook = False
857 log.debug('Installing git hook in repo %s' % repo)
857 log.debug('Installing git hook in repo %s', repo)
858 858 if os.path.exists(_hook_file):
859 859 # let's take a look at this hook, maybe it's kallithea ?
860 860 log.debug('hook exists, checking if it is from kallithea')
@@ -865,7 +865,7 b' class ScmModel(BaseModel):'
865 865 if matches:
866 866 try:
867 867 ver = matches.groups()[0]
868 log.debug('got %s it is kallithea' % (ver))
868 log.debug('got %s it is kallithea', ver)
869 869 has_hook = True
870 870 except Exception:
871 871 log.error(traceback.format_exc())
@@ -874,14 +874,14 b' class ScmModel(BaseModel):'
874 874 has_hook = True
875 875
876 876 if has_hook or force_create:
877 log.debug('writing %s hook file !' % (h_type,))
877 log.debug('writing %s hook file !', h_type)
878 878 try:
879 879 with open(_hook_file, 'wb') as f:
880 880 tmpl = tmpl.replace('_TMPL_', kallithea.__version__)
881 881 f.write(tmpl)
882 882 os.chmod(_hook_file, 0755)
883 883 except IOError as e:
884 log.error('error writing %s: %s' % (_hook_file, e))
884 log.error('error writing %s: %s', _hook_file, e)
885 885 else:
886 886 log.debug('skipping writing hook file')
887 887
@@ -125,14 +125,14 b' class UserModel(BaseModel):'
125 125 # raises UserCreationError if it's not allowed
126 126 check_allowed_create_user(user_data, cur_user)
127 127
128 log.debug('Checking for %s account in Kallithea database' % username)
128 log.debug('Checking for %s account in Kallithea database', username)
129 129 user = User.get_by_username(username, case_insensitive=True)
130 130 if user is None:
131 log.debug('creating new user %s' % username)
131 log.debug('creating new user %s', username)
132 132 new_user = User()
133 133 edit = False
134 134 else:
135 log.debug('updating user %s' % username)
135 log.debug('updating user %s', username)
136 136 new_user = user
137 137 edit = True
138 138
@@ -156,7 +156,7 b' class UserModel(BaseModel):'
156 156 not check_password(password, new_user.password)
157 157 if not edit or password_change:
158 158 reason = 'new password' if edit else 'new user'
159 log.debug('Updating password reason=>%s' % (reason,))
159 log.debug('Updating password reason=>%s', reason)
160 160 new_user.password = get_crypt_password(password) \
161 161 if password else None
162 162
@@ -279,7 +279,7 b' class UserModel(BaseModel):'
279 279 user_email = data['email']
280 280 user = User.get_by_email(user_email)
281 281 if user is not None:
282 log.debug('password reset user found %s' % user)
282 log.debug('password reset user found %s', user)
283 283 link = h.canonical_url('reset_password_confirmation',
284 284 key=user.api_key)
285 285 reg_type = EmailNotificationModel.TYPE_PASSWORD_RESET
@@ -294,9 +294,9 b' class UserModel(BaseModel):'
294 294 log.debug('sending email')
295 295 run_task(tasks.send_email, [user_email],
296 296 _("Password reset link"), body, html_body)
297 log.info('send new password mail to %s' % user_email)
297 log.info('send new password mail to %s', user_email)
298 298 else:
299 log.debug("password reset email %s not found" % user_email)
299 log.debug("password reset email %s not found", user_email)
300 300
301 301 return True
302 302
@@ -311,14 +311,14 b' class UserModel(BaseModel):'
311 311 user.password = auth.get_crypt_password(new_passwd)
312 312 Session().add(user)
313 313 Session().commit()
314 log.info('change password for %s' % user_email)
314 log.info('change password for %s', user_email)
315 315 if new_passwd is None:
316 316 raise Exception('unable to generate new password')
317 317
318 318 run_task(tasks.send_email, [user_email],
319 319 _('Your new password'),
320 320 _('Your new Kallithea password:%s') % (new_passwd,))
321 log.info('send new password mail to %s' % user_email)
321 log.info('send new password mail to %s', user_email)
322 322
323 323 return True
324 324
@@ -283,7 +283,7 b' class UserGroupModel(BaseModel):'
283 283 obj.user = user
284 284 obj.permission = permission
285 285 self.sa.add(obj)
286 log.debug('Granted perm %s to %s on %s' % (perm, user, user_group))
286 log.debug('Granted perm %s to %s on %s', perm, user, user_group)
287 287 return obj
288 288
289 289 def revoke_user_permission(self, user_group, user):
@@ -304,7 +304,7 b' class UserGroupModel(BaseModel):'
304 304 .scalar()
305 305 if obj is not None:
306 306 self.sa.delete(obj)
307 log.debug('Revoked perm on %s on %s' % (user_group, user))
307 log.debug('Revoked perm on %s on %s', user_group, user)
308 308
309 309 def grant_user_group_permission(self, target_user_group, user_group, perm):
310 310 """
@@ -334,7 +334,7 b' class UserGroupModel(BaseModel):'
334 334 obj.target_user_group = target_user_group
335 335 obj.permission = permission
336 336 self.sa.add(obj)
337 log.debug('Granted perm %s to %s on %s' % (perm, target_user_group, user_group))
337 log.debug('Granted perm %s to %s on %s', perm, target_user_group, user_group)
338 338 return obj
339 339
340 340 def revoke_user_group_permission(self, target_user_group, user_group):
@@ -353,11 +353,11 b' class UserGroupModel(BaseModel):'
353 353 .scalar()
354 354 if obj is not None:
355 355 self.sa.delete(obj)
356 log.debug('Revoked perm on %s on %s' % (target_user_group, user_group))
356 log.debug('Revoked perm on %s on %s', target_user_group, user_group)
357 357
358 358 def enforce_groups(self, user, groups, extern_type=None):
359 359 user = self._get_user(user)
360 log.debug('Enforcing groups %s on user %s' % (user, groups))
360 log.debug('Enforcing groups %s on user %s', user, groups)
361 361 current_groups = user.group_member
362 362 # find the external created groups
363 363 externals = [x.users_group for x in current_groups
@@ -367,7 +367,7 b' class UserGroupModel(BaseModel):'
367 367 # externals that are not in groups
368 368 for gr in externals:
369 369 if gr.users_group_name not in groups:
370 log.debug('Removing user %s from user group %s' % (user, gr))
370 log.debug('Removing user %s from user group %s', user, gr)
371 371 self.remove_user_from_group(gr, user)
372 372
373 373 # now we calculate in which groups user should be == groups params
@@ -383,7 +383,7 b' class UserGroupModel(BaseModel):'
383 383 # we can only add users to special groups created via plugins
384 384 managed = 'extern_type' in existing_group.group_data
385 385 if managed:
386 log.debug('Adding user %s to user group %s' % (user, gr))
386 log.debug('Adding user %s to user group %s', user, gr)
387 387 UserGroupModel().add_user_to_group(existing_group, user)
388 388 else:
389 389 log.debug('Skipping addition to group %s since it is '
@@ -312,13 +312,13 b' def ValidAuth():'
312 312 if auth_modules.authenticate(username, password) is None:
313 313 user = User.get_by_username(username)
314 314 if user and not user.active:
315 log.warning('user %s is disabled' % username)
315 log.warning('user %s is disabled', username)
316 316 msg = M(self, 'invalid_auth', state)
317 317 raise formencode.Invalid(msg, value, state,
318 318 error_dict=dict(username=' ', password=msg)
319 319 )
320 320 else:
321 log.warning('user %s failed to authenticate' % username)
321 log.warning('user %s failed to authenticate', username)
322 322 msg = M(self, 'invalid_auth', state)
323 323 raise formencode.Invalid(msg, value, state,
324 324 error_dict=dict(username=' ', password=msg)
@@ -72,7 +72,7 b' class Command(object):'
72 72 """
73 73
74 74 command = cmd + ' ' + ' '.join(args)
75 log.debug('Executing %s' % command)
75 log.debug('Executing %s', command)
76 76 if DEBUG:
77 77 print command
78 78 p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd)
General Comments 0
You need to be logged in to leave comments. Login now