##// END OF EJS Templates
models: major update for python3,...
super-admin -
r5070:175fe6cb default
parent child Browse files
Show More
@@ -178,13 +178,14 b' class ChangesetStatusModel(BaseModel):'
178 178 """
179 179
180 180 def group_rule(element):
181 review_obj = element[0]
182 rule_data = review_obj.rule_user_group_data()
181 _review_obj = element[0]
182 rule_data = _review_obj.rule_user_group_data()
183 183 if rule_data and rule_data['id']:
184 184 return rule_data['id']
185 # don't return None, as we cant compare this
186 return 0
185 187
186 voting_groups = itertools.groupby(
187 sorted(statuses_by_reviewers, key=group_rule), group_rule)
188 voting_groups = itertools.groupby(sorted(statuses_by_reviewers, key=group_rule), group_rule)
188 189
189 190 voting_by_groups = [(x, list(y)) for x, y in voting_groups]
190 191
@@ -296,9 +296,6 b' class CommentsModel(BaseModel):'
296 296 :param extra_recipients: list of extra users to be added to recipients
297 297 """
298 298
299 if not text:
300 log.warning('Missing text for comment, skipping...')
301 return
302 299 request = get_current_request()
303 300 _ = request.translate
304 301
@@ -31,7 +31,7 b' import shutil'
31 31 from pyramid.threadlocal import get_current_request
32 32
33 33 from rhodecode.lib.utils2 import (
34 safe_unicode, unique_id, safe_int, time_to_datetime, AttributeDict)
34 unique_id, safe_int, safe_str, time_to_datetime, AttributeDict)
35 35 from rhodecode.lib.ext_json import json
36 36 from rhodecode.lib.vcs import VCSError
37 37 from rhodecode.model import BaseModel
@@ -121,7 +121,7 b' class GistModel(BaseModel):'
121 121 :param gist_acl_level: acl level for this gist
122 122 """
123 123 owner = self._get_user(owner)
124 gist_id = safe_unicode(gist_id or unique_id(20))
124 gist_id = safe_str(gist_id or unique_id(20))
125 125 lifetime = safe_int(lifetime, -1)
126 126 gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
127 127 expiration = (time_to_datetime(gist_expires)
@@ -133,13 +133,13 b' class GistModel(BaseModel):'
133 133 gist.gist_access_id = gist_id
134 134 gist.gist_owner = owner.user_id
135 135 gist.gist_expires = gist_expires
136 gist.gist_type = safe_unicode(gist_type)
136 gist.gist_type = safe_str(gist_type)
137 137 gist.acl_level = gist_acl_level
138 138 self.sa.add(gist)
139 139 self.sa.flush()
140 140 if gist_type == Gist.GIST_PUBLIC:
141 141 # use DB ID for easy to use GIST ID
142 gist_id = safe_unicode(gist.gist_id)
142 gist_id = safe_str(gist.gist_id)
143 143 gist.gist_access_id = gist_id
144 144 self.sa.add(gist)
145 145
@@ -152,7 +152,7 b' class GistModel(BaseModel):'
152 152 # now create single multifile commit
153 153 message = 'added file'
154 154 message += 's: ' if len(gist_mapping) > 1 else ': '
155 message += ', '.join([x for x in gist_mapping])
155 message += ', '.join([safe_str(x) for x in gist_mapping])
156 156
157 157 # fake RhodeCode Repository object
158 158 fake_repo = AttributeDict({
@@ -218,7 +218,7 b' class GistModel(BaseModel):'
218 218
219 219 message = 'updated file'
220 220 message += 's: ' if len(gist_mapping) > 1 else ': '
221 message += ', '.join([x for x in gist_mapping])
221 message += ', '.join([safe_str(x) for x in gist_mapping])
222 222
223 223 # fake RhodeCode Repository object
224 224 fake_repo = AttributeDict({
@@ -28,12 +28,11 b' import logging'
28 28
29 29 from sqlalchemy import or_, and_
30 30
31 import rhodecode
32 31 from rhodecode import events
33 32 from rhodecode.integrations.types.base import EEIntegration
34 33 from rhodecode.lib.caching_query import FromCache
35 34 from rhodecode.model import BaseModel
36 from rhodecode.model.db import Integration, Repository, RepoGroup, true, false, case
35 from rhodecode.model.db import Integration, Repository, RepoGroup, true, false, case, null
37 36 from rhodecode.integrations import integration_type_registry
38 37
39 38 log = logging.getLogger(__name__)
@@ -53,8 +52,7 b' class IntegrationModel(BaseModel):'
53 52 raise Exception('integration must be int or Instance'
54 53 ' of Integration got %s' % type(integration))
55 54
56 def create(self, IntegrationType, name, enabled, repo, repo_group,
57 child_repos_only, settings):
55 def create(self, IntegrationType, name, enabled, repo, repo_group, child_repos_only, settings):
58 56 """ Create an IntegrationType integration """
59 57 integration = Integration()
60 58 integration.integration_type = IntegrationType.key
@@ -163,15 +161,15 b' class IntegrationModel(BaseModel):'
163 161 )
164 162
165 163 global_integrations_filter = and_(
166 Integration.repo_id == None,
167 Integration.repo_group_id == None,
164 Integration.repo_id == null(),
165 Integration.repo_group_id == null(),
168 166 Integration.child_repos_only == false(),
169 167 )
170 168
171 169 if isinstance(event, events.RepoEvent):
172 170 root_repos_integrations_filter = and_(
173 Integration.repo_id == None,
174 Integration.repo_group_id == None,
171 Integration.repo_id == null(),
172 Integration.repo_group_id == null(),
175 173 Integration.child_repos_only == true(),
176 174 )
177 175
@@ -225,7 +223,7 b' class IntegrationModel(BaseModel):'
225 223 query = query.order_by(order_by_criterion)
226 224
227 225 if cache:
228 cache_key = "get_enabled_repo_integrations_%i" % event.repo.repo_id
226 cache_key = f"get_enabled_repo_integrations_{event.repo.repo_id}"
229 227 query = query.options(
230 228 FromCache("sql_cache_short", cache_key))
231 229 else: # only global integrations
@@ -332,6 +332,7 b" EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE = ''"
332 332 import cssutils
333 333 # hijack css utils logger and replace with ours
334 334 log = logging.getLogger('rhodecode.cssutils.premailer')
335 log.setLevel(logging.INFO)
335 336 cssutils.log.setLog(log)
336 337
337 338
@@ -377,7 +378,10 b' class EmailNotificationModel(BaseModel):'
377 378 'rhodecode:templates/email_templates/pull_request_update.mako',
378 379 }
379 380
380 premailer_instance = premailer.Premailer()
381 premailer_instance = premailer.Premailer(
382 #cssutils_logging_handler=log.handlers[0],
383 #cssutils_logging_level=logging.INFO
384 )
381 385
382 386 def __init__(self):
383 387 """
@@ -180,16 +180,24 b' class PermissionModel(BaseModel):'
180 180
181 181 def _make_new_user_perm(self, user, perm_name):
182 182 log.debug('Creating new user permission:%s', perm_name)
183 new_perm = Permission.get_by_key(perm_name)
184 if not new_perm:
185 raise ValueError(f'permission with name {perm_name} not found')
186
183 187 new = UserToPerm()
184 188 new.user = user
185 new.permission = Permission.get_by_key(perm_name)
189 new.permission = new_perm
186 190 return new
187 191
188 192 def _make_new_user_group_perm(self, user_group, perm_name):
189 193 log.debug('Creating new user group permission:%s', perm_name)
194 new_perm = Permission.get_by_key(perm_name)
195 if not new_perm:
196 raise ValueError(f'permission with name {perm_name} not found')
197
190 198 new = UserGroupToPerm()
191 199 new.users_group = user_group
192 new.permission = Permission.get_by_key(perm_name)
200 new.permission = new_perm
193 201 return new
194 202
195 203 def _keep_perm(self, perm_name, keep_fields):
@@ -278,10 +286,10 b' class PermissionModel(BaseModel):'
278 286 raise ValueError('Missing permission for %s' % (_perm_key,))
279 287
280 288 if obj_type == 'user':
281 p = self._make_new_user_perm(object, perm_value)
289 p = self._make_new_user_perm(to_object, perm_value)
282 290 self.sa.add(p)
283 291 if obj_type == 'user_group':
284 p = self._make_new_user_group_perm(object, perm_value)
292 p = self._make_new_user_group_perm(to_object, perm_value)
285 293 self.sa.add(p)
286 294
287 295 def _set_new_user_perms(self, user, form_result, preserve=None):
@@ -321,8 +329,8 b' class PermissionModel(BaseModel):'
321 329 def _get_group(perm_name):
322 330 return '.'.join(perm_name.split('.')[:1])
323 331
324 defined_perms_groups = map(
325 _get_group, (x.permission.permission_name for x in obj_perms))
332 defined_perms_groups = list(map(
333 _get_group, (x.permission.permission_name for x in obj_perms)))
326 334 log.debug('GOT ALREADY DEFINED:%s', obj_perms)
327 335
328 336 if force:
@@ -23,15 +23,16 b''
23 23 pull request model for RhodeCode
24 24 """
25 25
26
27 import json
28 26 import logging
29 27 import os
30 28
31 29 import datetime
32 import urllib.request, urllib.parse, urllib.error
30 import urllib.request
31 import urllib.parse
32 import urllib.error
33 33 import collections
34 34
35 import dataclasses as dataclasses
35 36 from pyramid.threadlocal import get_current_request
36 37
37 38 from rhodecode.lib.vcs.nodes import FileNode
@@ -40,11 +41,12 b' from rhodecode.lib import helpers as h, '
40 41 from rhodecode.lib import audit_logger
41 42 from collections import OrderedDict
42 43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 from rhodecode.lib.ext_json import sjson as json
43 45 from rhodecode.lib.markup_renderer import (
44 46 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
45 from rhodecode.lib.utils2 import (
46 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
47 get_current_rhodecode_user)
47 from rhodecode.lib.hash_utils import md5_safe
48 from rhodecode.lib.str_utils import safe_str
49 from rhodecode.lib.utils2 import AttributeDict, get_current_rhodecode_user
48 50 from rhodecode.lib.vcs.backends.base import (
49 51 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
50 52 TargetRefMissing, SourceRefMissing)
@@ -55,7 +57,7 b' from rhodecode.model import BaseModel'
55 57 from rhodecode.model.changeset_status import ChangesetStatusModel
56 58 from rhodecode.model.comment import CommentsModel
57 59 from rhodecode.model.db import (
58 aliased, null, lazyload, and_, or_, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 aliased, null, lazyload, and_, or_, select, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
59 61 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
60 62 from rhodecode.model.meta import Session
61 63 from rhodecode.model.notification import NotificationModel, \
@@ -116,9 +118,8 b' def get_diff_info('
116 118 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
117 119 ignore_whitespace=False, context=3)
118 120
119 diff_processor = diffs.DiffProcessor(
120 vcs_diff, format='newdiff', diff_limit=None,
121 file_limit=None, show_full_diff=True)
121 diff_processor = diffs.DiffProcessor(vcs_diff, diff_format='newdiff',
122 diff_limit=0, file_limit=0, show_full_diff=True)
122 123
123 124 _parsed = diff_processor.prepare()
124 125
@@ -317,7 +318,7 b' class PullRequestModel(BaseModel):'
317 318 q = PullRequest.query()
318 319
319 320 if search_q:
320 like_expression = u'%{}%'.format(safe_unicode(search_q))
321 like_expression = u'%{}%'.format(safe_str(search_q))
321 322 q = q.join(User, User.user_id == PullRequest.user_id)
322 323 q = q.filter(or_(
323 324 cast(PullRequest.pull_request_id, String).ilike(like_expression),
@@ -489,7 +490,7 b' class PullRequestModel(BaseModel):'
489 490 q = q.filter(pull_request_alias.status.in_(statuses))
490 491
491 492 if search_q:
492 like_expression = u'%{}%'.format(safe_unicode(search_q))
493 like_expression = u'%{}%'.format(safe_str(search_q))
493 494 q = q.join(User, User.user_id == pull_request_alias.user_id)
494 495 q = q.filter(or_(
495 496 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
@@ -562,12 +563,14 b' class PullRequestModel(BaseModel):'
562 563 """
563 564 q = PullRequest.query()
564 565 if user_id:
565 reviewers_subquery = Session().query(
566 PullRequestReviewers.pull_request_id).filter(
567 PullRequestReviewers.user_id == user_id).subquery()
566
567 base_query = select(PullRequestReviewers)\
568 .where(PullRequestReviewers.user_id == user_id)\
569 .with_only_columns(PullRequestReviewers.pull_request_id)
570
568 571 user_filter = or_(
569 572 PullRequest.user_id == user_id,
570 PullRequest.pull_request_id.in_(reviewers_subquery)
573 PullRequest.pull_request_id.in_(base_query)
571 574 )
572 575 q = PullRequest.query().filter(user_filter)
573 576
@@ -576,7 +579,7 b' class PullRequestModel(BaseModel):'
576 579 q = q.filter(PullRequest.status.in_(statuses))
577 580
578 581 if query:
579 like_expression = u'%{}%'.format(safe_unicode(query))
582 like_expression = u'%{}%'.format(safe_str(query))
580 583 q = q.join(User, User.user_id == PullRequest.user_id)
581 584 q = q.filter(or_(
582 585 cast(PullRequest.pull_request_id, String).ilike(like_expression),
@@ -656,7 +659,7 b' class PullRequestModel(BaseModel):'
656 659 q = q.filter(pull_request_alias.status.in_(statuses))
657 660
658 661 if query:
659 like_expression = u'%{}%'.format(safe_unicode(query))
662 like_expression = u'%{}%'.format(safe_str(query))
660 663 q = q.join(User, User.user_id == pull_request_alias.user_id)
661 664 q = q.filter(or_(
662 665 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
@@ -939,7 +942,8 b' class PullRequestModel(BaseModel):'
939 942
940 943 def merge_repo(self, pull_request, user, extras):
941 944 repo_type = pull_request.source_repo.repo_type
942 log.debug("Merging pull request %s", pull_request.pull_request_id)
945 log.debug("Merging pull request %s", pull_request)
946
943 947 extras['user_agent'] = '{}/internal-merge'.format(repo_type)
944 948 merge_state = self._merge_pull_request(pull_request, user, extras)
945 949 if merge_state.executed:
@@ -952,14 +956,14 b' class PullRequestModel(BaseModel):'
952 956 user, pull_request)
953 957
954 958 else:
955 log.warn("Merge failed, not updating the pull request.")
959 log.warning("Merge failed, not updating the pull request.")
956 960 return merge_state
957 961
958 962 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
959 963 target_vcs = pull_request.target_repo.scm_instance()
960 964 source_vcs = pull_request.source_repo.scm_instance()
961 965
962 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
966 message = safe_str(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
963 967 pr_id=pull_request.pull_request_id,
964 968 pr_title=pull_request.title,
965 969 pr_desc=pull_request.description,
@@ -995,6 +999,7 b' class PullRequestModel(BaseModel):'
995 999 user_name=user_name, user_email=user.email,
996 1000 message=message, use_rebase=use_rebase,
997 1001 close_branch=close_branch)
1002
998 1003 return merge_state
999 1004
1000 1005 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
@@ -1003,7 +1008,7 b' class PullRequestModel(BaseModel):'
1003 1008 close_msg = close_msg or 'Pull request merged and closed'
1004 1009
1005 1010 CommentsModel().create(
1006 text=safe_unicode(close_msg),
1011 text=safe_str(close_msg),
1007 1012 repo=pull_request.target_repo.repo_id,
1008 1013 user=user.user_id,
1009 1014 pull_request=pull_request.pull_request_id,
@@ -1289,9 +1294,10 b' class PullRequestModel(BaseModel):'
1289 1294 source_repo, source_ref_id, target_ref_id,
1290 1295 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1291 1296
1292 old_diff_data = diffs.DiffProcessor(old_diff)
1297 # NOTE: this was using diff_format='gitdiff'
1298 old_diff_data = diffs.DiffProcessor(old_diff, diff_format='newdiff')
1293 1299 old_diff_data.prepare()
1294 new_diff_data = diffs.DiffProcessor(new_diff)
1300 new_diff_data = diffs.DiffProcessor(new_diff, diff_format='newdiff')
1295 1301 new_diff_data.prepare()
1296 1302
1297 1303 return old_diff_data, new_diff_data
@@ -1598,7 +1604,7 b' class PullRequestModel(BaseModel):'
1598 1604 return None
1599 1605 else:
1600 1606 pr_url = urllib.parse.unquote(self.get_url(pull_request, request=request))
1601 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1607 return safe_str('{pr_url}/repository'.format(pr_url=pr_url))
1602 1608
1603 1609 def _notify_reviewers(self, pull_request, user_ids, role, user):
1604 1610 # notification to reviewers/observers
@@ -2032,8 +2038,8 b' class PullRequestModel(BaseModel):'
2032 2038 _ = translator or get_current_request().translate
2033 2039
2034 2040 commit_id = safe_str(commit_id) if commit_id else None
2035 branch = safe_unicode(branch) if branch else None
2036 bookmark = safe_unicode(bookmark) if bookmark else None
2041 branch = safe_str(branch) if branch else None
2042 bookmark = safe_str(bookmark) if bookmark else None
2037 2043
2038 2044 selected = None
2039 2045
@@ -2072,8 +2078,8 b' class PullRequestModel(BaseModel):'
2072 2078 u'No commit refs could be found matching: {}'.format(ref))
2073 2079 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2074 2080 selected = u'branch:{}:{}'.format(
2075 safe_unicode(repo.DEFAULT_BRANCH_NAME),
2076 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
2081 safe_str(repo.DEFAULT_BRANCH_NAME),
2082 safe_str(repo.branches[repo.DEFAULT_BRANCH_NAME])
2077 2083 )
2078 2084 elif repo.commit_ids:
2079 2085 # make the user select in this case
@@ -2113,7 +2119,7 b' class PullRequestModel(BaseModel):'
2113 2119 log.debug('calculating diff between '
2114 2120 'source_ref:%s and target_ref:%s for repo `%s`',
2115 2121 target_ref_id, source_ref_id,
2116 safe_unicode(vcs_repo.path))
2122 safe_str(vcs_repo.path))
2117 2123
2118 2124 vcs_diff = vcs_repo.get_diff(
2119 2125 commit1=target_commit, commit2=source_commit,
@@ -2373,8 +2379,16 b' class MergeCheck(object):'
2373 2379 return merge_details
2374 2380
2375 2381
2376 ChangeTuple = collections.namedtuple(
2377 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2382 @dataclasses.dataclass
2383 class ChangeTuple:
2384 added: list
2385 common: list
2386 removed: list
2387 total: list
2378 2388
2379 FileChangeTuple = collections.namedtuple(
2380 'FileChangeTuple', ['added', 'modified', 'removed'])
2389
2390 @dataclasses.dataclass
2391 class FileChangeTuple:
2392 added: list
2393 modified: list
2394 removed: list
@@ -26,6 +26,7 b' import traceback'
26 26 import datetime
27 27
28 28 from pyramid.threadlocal import get_current_request
29 from sqlalchemy.orm import aliased
29 30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 31
31 32 from rhodecode import events
@@ -36,7 +37,7 b' from rhodecode.lib import hooks_base'
36 37 from rhodecode.lib.user_log_filter import user_log_filter
37 38 from rhodecode.lib.utils import make_db_config
38 39 from rhodecode.lib.utils2 import (
39 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
40 safe_str, remove_prefix, obfuscate_url_pw,
40 41 get_current_rhodecode_user, safe_int, action_logger_generic)
41 42 from rhodecode.lib.vcs.backends import get_backend
42 43 from rhodecode.model import BaseModel
@@ -78,7 +79,7 b' class RepoModel(BaseModel):'
78 79 repo_to_perm.permission = Permission.get_by_key(default_perm)
79 80
80 81 repo_to_perm.repository = repository
81 repo_to_perm.user_id = def_user.user_id
82 repo_to_perm.user = def_user
82 83
83 84 return repo_to_perm
84 85
@@ -112,7 +113,7 b' class RepoModel(BaseModel):'
112 113 def _extract_id_from_repo_name(self, repo_name):
113 114 if repo_name.startswith('/'):
114 115 repo_name = repo_name.lstrip('/')
115 by_id_match = re.match(r'^_(\d{1,})', repo_name)
116 by_id_match = re.match(r'^_(\d+)', repo_name)
116 117 if by_id_match:
117 118 return by_id_match.groups()[0]
118 119
@@ -138,7 +139,7 b' class RepoModel(BaseModel):'
138 139
139 140 def get_repos_for_root(self, root, traverse=False):
140 141 if traverse:
141 like_expression = u'{}%'.format(safe_unicode(root))
142 like_expression = u'{}%'.format(safe_str(root))
142 143 repos = Repository.query().filter(
143 144 Repository.repo_name.like(like_expression)).all()
144 145 else:
@@ -209,12 +210,12 b' class RepoModel(BaseModel):'
209 210 def quick_menu(repo_name):
210 211 return _render('quick_menu', repo_name)
211 212
212 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
213 def repo_lnk(name, rtype, rstate, private, archived, fork_repo_name):
213 214 if short_name is not None:
214 215 short_name_var = short_name
215 216 else:
216 217 short_name_var = not admin
217 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
218 return _render('repo_name', name, rtype, rstate, private, archived, fork_repo_name,
218 219 short_name=short_name_var, admin=False)
219 220
220 221 def last_change(last_change):
@@ -259,7 +260,7 b' class RepoModel(BaseModel):'
259 260 "menu": quick_menu(repo.repo_name),
260 261
261 262 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
262 repo.private, repo.archived, repo.fork),
263 repo.private, repo.archived, repo.fork_repo_name),
263 264
264 265 "desc": desc(h.escape(repo.description)),
265 266
@@ -268,7 +269,7 b' class RepoModel(BaseModel):'
268 269 "last_changeset": last_rev(repo.repo_name, changeset_cache),
269 270 "last_changeset_raw": changeset_cache.get('revision'),
270 271
271 "owner": user_profile(repo.User.username),
272 "owner": user_profile(repo.owner_username),
272 273
273 274 "state": state(repo.repo_state),
274 275 "rss": rss_lnk(repo.repo_name),
@@ -309,6 +310,8 b' class RepoModel(BaseModel):'
309 310 ) \
310 311 .count()
311 312
313 RepoFork = aliased(Repository)
314 OwnerUser = aliased(User)
312 315 base_q = Session.query(
313 316 Repository.repo_id,
314 317 Repository.repo_name,
@@ -317,18 +320,18 b' class RepoModel(BaseModel):'
317 320 Repository.repo_state,
318 321 Repository.private,
319 322 Repository.archived,
320 Repository.fork,
321 323 Repository.updated_on,
322 324 Repository._changeset_cache,
323 User,
325 RepoFork.repo_name.label('fork_repo_name'),
326 OwnerUser.username.label('owner_username'),
324 327 ) \
325 328 .filter(Repository.group_id == repo_group_id) \
326 329 .filter(or_(
327 330 # generate multiple IN to fix limitation problems
328 331 *in_filter_generator(Repository.repo_id, allowed_ids))
329 332 ) \
330 .join(User, User.user_id == Repository.user_id) \
331 .group_by(Repository, User)
333 .outerjoin(RepoFork, Repository.fork_id == RepoFork.repo_id) \
334 .join(OwnerUser, Repository.user_id == OwnerUser.user_id)
332 335
333 336 repos_data_total_filtered_count = base_q.count()
334 337
@@ -515,8 +518,8 b' class RepoModel(BaseModel):'
515 518 landing_rev = landing_rev or default_landing_ref
516 519
517 520 try:
518 repo_name = safe_unicode(repo_name)
519 description = safe_unicode(description)
521 repo_name = safe_str(repo_name)
522 description = safe_str(description)
520 523 # repo name is just a name of repository
521 524 # while repo_name_full is a full qualified name that is combined
522 525 # with name and path of group
@@ -979,14 +982,14 b' class RepoModel(BaseModel):'
979 982
980 983 # check if this path is not a repository
981 984 if is_valid_repo(repo_path, self.repos_path):
982 raise Exception('This path %s is a valid repository' % repo_path)
985 raise Exception(f'This path {repo_path} is a valid repository')
983 986
984 987 # check if this path is a group
985 988 if is_valid_repo_group(repo_path, self.repos_path):
986 raise Exception('This path %s is a valid group' % repo_path)
989 raise Exception(f'This path {repo_path} is a valid group')
987 990
988 991 log.info('creating repo %s in %s from url: `%s`',
989 repo_name, safe_unicode(repo_path),
992 repo_name, safe_str(repo_path),
990 993 obfuscate_url_pw(clone_uri))
991 994
992 995 backend = get_backend(repo_type)
@@ -1016,7 +1019,7 b' class RepoModel(BaseModel):'
1016 1019 repo.install_hooks()
1017 1020
1018 1021 log.debug('Created repo %s with %s backend',
1019 safe_unicode(repo_name), safe_unicode(repo_type))
1022 safe_str(repo_name), safe_str(repo_type))
1020 1023 return repo
1021 1024
1022 1025 def _rename_filesystem_repo(self, old, new):
@@ -1038,8 +1041,8 b' class RepoModel(BaseModel):'
1038 1041
1039 1042 def _delete_filesystem_repo(self, repo):
1040 1043 """
1041 removes repo from filesystem, the removal is acctually made by
1042 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
1044 removes repo from filesystem, the removal is actually made by
1045 added rm__ prefix into dir, and rename internal .hg/.git dirs so this
1043 1046 repository is no longer valid for rhodecode, can be undeleted later on
1044 1047 by reverting the renames on this repository
1045 1048
@@ -1047,7 +1050,7 b' class RepoModel(BaseModel):'
1047 1050 """
1048 1051 rm_path = os.path.join(self.repos_path, repo.repo_name)
1049 1052 repo_group = repo.group
1050 log.info("Removing repository %s", rm_path)
1053 log.info("delete_filesystem_repo: removing repository %s", rm_path)
1051 1054 # disable hg/git internal that it doesn't get detected as repo
1052 1055 alias = repo.repo_type
1053 1056
@@ -1094,19 +1097,19 b' class ReadmeFinder:'
1094 1097 path_re = re.compile(r'^docs?', re.IGNORECASE)
1095 1098
1096 1099 default_priorities = {
1097 None: 0,
1098 '.text': 2,
1099 '.txt': 3,
1100 '.rst': 1,
1101 '.rest': 2,
1102 '.md': 1,
1103 '.mkdn': 2,
1104 '.mdown': 3,
1100 None: 0,
1101 '.rst': 1,
1102 '.md': 1,
1103 '.rest': 2,
1104 '.mkdn': 2,
1105 '.text': 2,
1106 '.txt': 3,
1107 '.mdown': 3,
1105 1108 '.markdown': 4,
1106 1109 }
1107 1110
1108 1111 path_priority = {
1109 'doc': 0,
1112 'doc': 0,
1110 1113 'docs': 1,
1111 1114 }
1112 1115
@@ -1122,7 +1125,7 b' class ReadmeFinder:'
1122 1125 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1123 1126 default_renderer, [])
1124 1127
1125 def search(self, commit, path=u'/'):
1128 def search(self, commit, path='/'):
1126 1129 """
1127 1130 Find a readme in the given `commit`.
1128 1131 """
@@ -131,7 +131,7 b' class RepoGroupModel(BaseModel):'
131 131 repo_group_to_perm.permission = Permission.get_by_key(default_perm)
132 132
133 133 repo_group_to_perm.group = new_group
134 repo_group_to_perm.user_id = def_user.user_id
134 repo_group_to_perm.user = def_user
135 135 return repo_group_to_perm
136 136
137 137 def _get_group_name_and_parent(self, group_name_full, repo_in_path=False,
@@ -780,10 +780,10 b' class RepoGroupModel(BaseModel):'
780 780 }
781 781 if admin:
782 782 repo_count = group.repositories.count()
783 children_groups = map(
784 h.safe_unicode,
783 children_groups = list(map(
784 h.safe_str,
785 785 itertools.chain((g.name for g in group.parents),
786 (x.name for x in [group])))
786 (x.name for x in [group]))))
787 787 row.update({
788 788 "action": repo_group_actions(
789 789 group.group_id, group.group_name, repo_count),
@@ -30,6 +30,7 b' from sqlalchemy import func'
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 import rhodecode
33 from rhodecode.lib.str_utils import safe_bytes
33 34 from rhodecode.lib.vcs import get_backend
34 35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 36 from rhodecode.lib.vcs.nodes import FileNode
@@ -42,7 +43,7 b' from rhodecode.lib.exceptions import Non'
42 43 from rhodecode.lib import hooks_utils
43 44 from rhodecode.lib.utils import (
44 45 get_filesystem_repos, make_db_config)
45 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
46 from rhodecode.lib.str_utils import safe_str
46 47 from rhodecode.lib.system_info import get_system_info
47 48 from rhodecode.model import BaseModel
48 49 from rhodecode.model.db import (
@@ -284,8 +285,7 b' class ScmModel(BaseModel):'
284 285 repo.update_commit_cache(config=config, cs_cache=None)
285 286 if delete:
286 287 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
287 rc_cache.clear_cache_namespace(
288 'cache_repo', cache_namespace_uid, invalidate=True)
288 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
289 289
290 290 def toggle_following_repo(self, follow_repo_id, user_id):
291 291
@@ -443,25 +443,18 b' class ScmModel(BaseModel):'
443 443 raise
444 444
445 445 def commit_change(self, repo, repo_name, commit, user, author, message,
446 content, f_path):
446 content: bytes, f_path: bytes):
447 447 """
448 448 Commits changes
449
450 :param repo: SCM instance
451
452 449 """
453 450 user = self._get_user(user)
454 451
455 # decoding here will force that we have proper encoded values
456 # in any other case this will throw exceptions and deny commit
457 content = safe_str(content)
458 path = safe_str(f_path)
459 452 # message and author needs to be unicode
460 453 # proper backend should then translate that into required type
461 message = safe_unicode(message)
462 author = safe_unicode(author)
454 message = safe_str(message)
455 author = safe_str(author)
463 456 imc = repo.in_memory_commit
464 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
457 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
465 458 try:
466 459 # TODO: handle pre-push action !
467 460 tip = imc.commit(
@@ -480,9 +473,9 b' class ScmModel(BaseModel):'
480 473 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
481 474 return tip
482 475
483 def _sanitize_path(self, f_path):
484 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
485 raise NonRelativePathError('%s is not an relative path' % f_path)
476 def _sanitize_path(self, f_path: bytes):
477 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
478 raise NonRelativePathError(b'%b is not an relative path' % f_path)
486 479 if f_path:
487 480 f_path = os.path.normpath(f_path)
488 481 return f_path
@@ -531,15 +524,24 b' class ScmModel(BaseModel):'
531 524 """
532 525 _files = list()
533 526 _dirs = list()
527
534 528 try:
535 529 _repo = self._get_repo(repo_name)
536 530 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
537 531 root_path = root_path.lstrip('/')
538 for __, dirs, files in commit.walk(root_path):
532
533 # get RootNode, inject pre-load options before walking
534 top_node = commit.get_node(root_path)
535 extended_info_pre_load = []
536 if extended_info:
537 extended_info_pre_load += ['md5']
538 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
539
540 for __, dirs, files in commit.walk(top_node):
539 541
540 542 for f in files:
541 543 _content = None
542 _data = f_name = f.unicode_path
544 _data = f_name = f.str_path
543 545
544 546 if not flat:
545 547 _data = {
@@ -561,7 +563,7 b' class ScmModel(BaseModel):'
561 563 and f.size > max_file_bytes)
562 564 full_content = None
563 565 if not f.is_binary and not over_size_limit:
564 full_content = safe_str(f.content)
566 full_content = f.str_content
565 567
566 568 _data.update({
567 569 "content": full_content,
@@ -569,7 +571,7 b' class ScmModel(BaseModel):'
569 571 _files.append(_data)
570 572
571 573 for d in dirs:
572 _data = d_name = d.unicode_path
574 _data = d_name = d.str_path
573 575 if not flat:
574 576 _data = {
575 577 "name": h.escape(d_name),
@@ -577,10 +579,10 b' class ScmModel(BaseModel):'
577 579 }
578 580 if extended_info:
579 581 _data.update({
580 "md5": None,
581 "binary": None,
582 "size": None,
583 "extension": None,
582 "md5": "",
583 "binary": False,
584 "size": 0,
585 "extension": "",
584 586 })
585 587 if content:
586 588 _data.update({
@@ -609,7 +611,7 b' class ScmModel(BaseModel):'
609 611 for f in files:
610 612
611 613 _data = {
612 "name": h.escape(f.unicode_path),
614 "name": h.escape(f.str_path),
613 615 "type": "file",
614 616 }
615 617
@@ -618,7 +620,7 b' class ScmModel(BaseModel):'
618 620 for d in dirs:
619 621
620 622 _data = {
621 "name": h.escape(d.unicode_path),
623 "name": h.escape(d.str_path),
622 624 "type": "dir",
623 625 }
624 626
@@ -634,6 +636,7 b' class ScmModel(BaseModel):'
634 636 """
635 637 retrieve single node from commit
636 638 """
639
637 640 try:
638 641
639 642 _repo = self._get_repo(repo_name)
@@ -644,7 +647,7 b' class ScmModel(BaseModel):'
644 647 raise RepositoryError('The given path is a directory')
645 648
646 649 _content = None
647 f_name = file_node.unicode_path
650 f_name = file_node.str_path
648 651
649 652 file_data = {
650 653 "name": h.escape(f_name),
@@ -677,7 +680,7 b' class ScmModel(BaseModel):'
677 680 full_content = None
678 681 all_lines = 0
679 682 if not file_node.is_binary and not over_size_limit:
680 full_content = safe_unicode(file_node.content)
683 full_content = safe_str(file_node.content)
681 684 all_lines, empty_lines = file_node.count_lines(full_content)
682 685
683 686 file_data.update({
@@ -693,7 +696,7 b' class ScmModel(BaseModel):'
693 696 full_content = None
694 697 all_lines = 0
695 698 if not is_binary and not over_size_limit:
696 full_content = safe_unicode(_content)
699 full_content = safe_str(_content)
697 700 all_lines, empty_lines = file_node.count_lines(full_content)
698 701
699 702 file_data.update({
@@ -718,12 +721,15 b' class ScmModel(BaseModel):'
718 721 _repo = self._get_repo(repo_name)
719 722 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
720 723 root_path = root_path.lstrip('/')
721 for __, dirs, files in commit.walk(root_path):
724 top_node = commit.get_node(root_path)
725 top_node.default_pre_load = []
726
727 for __, dirs, files in commit.walk(top_node):
722 728
723 729 for f in files:
724 730 is_binary, md5, size, _content = f.metadata_uncached()
725 731 _data = {
726 "name": f.unicode_path,
732 "name": f.str_path,
727 733 "md5": md5,
728 734 "extension": f.extension,
729 735 "binary": is_binary,
@@ -759,26 +765,9 b' class ScmModel(BaseModel):'
759 765 user = self._get_user(user)
760 766 scm_instance = repo.scm_instance(cache=False)
761 767
762 processed_nodes = []
763 for f_path in nodes:
764 f_path = self._sanitize_path(f_path)
765 content = nodes[f_path]['content']
766 f_path = safe_str(f_path)
767 # decoding here will force that we have proper encoded values
768 # in any other case this will throw exceptions and deny commit
769 if isinstance(content, (str,)):
770 content = safe_str(content)
771 elif isinstance(content, (file, cStringIO.OutputType,)):
772 content = content.read()
773 else:
774 raise Exception('Content is of unrecognized type %s' % (
775 type(content)
776 ))
777 processed_nodes.append((f_path, content))
778
779 message = safe_unicode(message)
768 message = safe_str(message)
780 769 commiter = user.full_contact
781 author = safe_unicode(author) if author else commiter
770 author = safe_str(author) if author else commiter
782 771
783 772 imc = scm_instance.in_memory_commit
784 773
@@ -786,13 +775,39 b' class ScmModel(BaseModel):'
786 775 parent_commit = EmptyCommit(alias=scm_instance.alias)
787 776
788 777 if isinstance(parent_commit, EmptyCommit):
789 # EmptyCommit means we we're editing empty repository
778 # EmptyCommit means we're editing empty repository
790 779 parents = None
791 780 else:
792 781 parents = [parent_commit]
782
783 upload_file_types = (io.BytesIO, io.BufferedRandom)
784 processed_nodes = []
785 for filename, content_dict in nodes.items():
786 if not isinstance(filename, bytes):
787 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
788 content = content_dict['content']
789 if not isinstance(content, upload_file_types + (bytes,)):
790 raise ValueError('content key value in nodes needs to be bytes')
791
792 for f_path in nodes:
793 f_path = self._sanitize_path(f_path)
794 content = nodes[f_path]['content']
795
796 # decoding here will force that we have proper encoded values
797 # in any other case this will throw exceptions and deny commit
798
799 if isinstance(content, bytes):
800 pass
801 elif isinstance(content, upload_file_types):
802 content = content.read()
803 else:
804 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
805 processed_nodes.append((f_path, content))
806
793 807 # add multiple nodes
794 808 for path, content in processed_nodes:
795 809 imc.add(FileNode(path, content=content))
810
796 811 # TODO: handle pre push scenario
797 812 tip = imc.commit(message=message,
798 813 author=author,
@@ -813,9 +828,9 b' class ScmModel(BaseModel):'
813 828 user = self._get_user(user)
814 829 scm_instance = repo.scm_instance(cache=False)
815 830
816 message = safe_unicode(message)
831 message = safe_str(message)
817 832 commiter = user.full_contact
818 author = safe_unicode(author) if author else commiter
833 author = safe_str(author) if author else commiter
819 834
820 835 imc = scm_instance.in_memory_commit
821 836
@@ -897,14 +912,14 b' class ScmModel(BaseModel):'
897 912 processed_nodes = []
898 913 for f_path in nodes:
899 914 f_path = self._sanitize_path(f_path)
900 # content can be empty but for compatabilty it allows same dicts
915 # content can be empty but for compatibility it allows same dicts
901 916 # structure as add_nodes
902 917 content = nodes[f_path].get('content')
903 processed_nodes.append((f_path, content))
918 processed_nodes.append((safe_bytes(f_path), content))
904 919
905 message = safe_unicode(message)
920 message = safe_str(message)
906 921 commiter = user.full_contact
907 author = safe_unicode(author) if author else commiter
922 author = safe_str(author) if author else commiter
908 923
909 924 imc = scm_instance.in_memory_commit
910 925
@@ -994,7 +1009,7 b' class ScmModel(BaseModel):'
994 1009 choices = [default_landing_ref]
995 1010
996 1011 # branches
997 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
1012 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
998 1013 if not branch_group:
999 1014 # new repo, or without maybe a branch?
1000 1015 branch_group = default_ref_options
@@ -1006,7 +1021,7 b' class ScmModel(BaseModel):'
1006 1021 # bookmarks for HG
1007 1022 if repo.alias == 'hg':
1008 1023 bookmarks_group = (
1009 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1024 [(f'book:{safe_str(b)}', safe_str(b))
1010 1025 for b in repo.bookmarks],
1011 1026 _("Bookmarks"))
1012 1027 ref_options.append(bookmarks_group)
@@ -1014,7 +1029,7 b' class ScmModel(BaseModel):'
1014 1029
1015 1030 # tags
1016 1031 tags_group = (
1017 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1032 [(f'tag:{safe_str(t)}', safe_str(t))
1018 1033 for t in repo.tags],
1019 1034 _("Tags"))
1020 1035 ref_options.append(tags_group)
@@ -19,16 +19,16 b''
19 19
20 20 import os
21 21 import re
22 import hashlib
23 22 import logging
24 23 import time
25 24 import functools
26 25 import bleach
27 26 from collections import namedtuple
28 27
29 from pyramid.threadlocal import get_current_request, get_current_registry
28 from pyramid.threadlocal import get_current_request
30 29
31 30 from rhodecode.lib import rc_cache
31 from rhodecode.lib.hash_utils import sha1_safe
32 32 from rhodecode.lib.utils2 import (
33 33 Optional, AttributeDict, safe_str, remove_prefix, str2bool)
34 34 from rhodecode.lib.vcs.backends import base
@@ -132,10 +132,9 b' class SettingsModel(BaseModel):'
132 132 if not key:
133 133 # keys are unique so they need appended info
134 134 if self.repo:
135 key = hashlib.sha1(
136 '{}{}{}'.format(section, val, repository_id)).hexdigest()
135 key = sha1_safe(f'{section}{val}{repository_id}')
137 136 else:
138 key = hashlib.sha1('{}{}'.format(section, val)).hexdigest()
137 key = sha1_safe(f'{section}{val}')
139 138
140 139 new_ui.ui_key = key
141 140
@@ -212,30 +211,20 b' class SettingsModel(BaseModel):'
212 211
213 212 def get_cache_region(self):
214 213 repo = self._get_repo(self.repo) if self.repo else None
215 cache_key = "repo.{}".format(repo.repo_id) if repo else "general_settings"
216 cache_namespace_uid = 'cache_settings.{}'.format(cache_key)
214 cache_key = f"repo.{repo.repo_id}" if repo else "repo.ALL"
215 cache_namespace_uid = f'cache_settings.{cache_key}'
217 216 region = rc_cache.get_or_create_region('cache_general', cache_namespace_uid)
218 return region, cache_key
219
220 def invalidate_settings_cache(self):
221 region, cache_key = self.get_cache_region()
222 log.debug('Invalidation cache region %s for cache_key: %s', region, cache_key)
223 region.invalidate()
217 return region, cache_namespace_uid
224 218
225 def get_all_settings(self, cache=False, from_request=True):
226 # defines if we use GLOBAL, or PER_REPO
227 repo = self._get_repo(self.repo) if self.repo else None
219 def invalidate_settings_cache(self, hard=False):
220 region, namespace_key = self.get_cache_region()
221 log.debug('Invalidation cache [%s] region %s for cache_key: %s',
222 'invalidate_settings_cache', region, namespace_key)
228 223
229 # initially try the requests context, this is the fastest
230 # we only fetch global config
231 if from_request:
232 request = get_current_request()
224 # we use hard cleanup if invalidation is sent
225 rc_cache.clear_cache_namespace(region, namespace_key, method=rc_cache.CLEAR_DELETE)
233 226
234 if request and not repo and hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
235 rc_config = request.call_context.rc_config
236 if rc_config:
237 return rc_config
238
227 def get_cache_call_method(self, cache=True):
239 228 region, cache_key = self.get_cache_region()
240 229
241 230 @region.conditional_cache_on_arguments(condition=cache)
@@ -245,10 +234,28 b' class SettingsModel(BaseModel):'
245 234 raise Exception('Could not get application settings !')
246 235
247 236 settings = {
248 'rhodecode_' + res.app_settings_name: res.app_settings_value
237 f'rhodecode_{res.app_settings_name}': res.app_settings_value
249 238 for res in q
250 239 }
251 240 return settings
241 return _get_all_settings
242
243 def get_all_settings(self, cache=False, from_request=True):
244 # defines if we use GLOBAL, or PER_REPO
245 repo = self._get_repo(self.repo) if self.repo else None
246
247 # initially try the requests context, this is the fastest
248 # we only fetch global config, NOT for repo-specific
249 if from_request and not repo:
250 request = get_current_request()
251
252 if request and hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
253 rc_config = request.call_context.rc_config
254 if rc_config:
255 return rc_config
256
257 _region, cache_key = self.get_cache_region()
258 _get_all_settings = self.get_cache_call_method(cache=cache)
252 259
253 260 start = time.time()
254 261 result = _get_all_settings('rhodecode_settings', cache_key)
@@ -318,8 +325,7 b' class SettingsModel(BaseModel):'
318 325 def list_enabled_social_plugins(self, settings):
319 326 enabled = []
320 327 for plug in SOCIAL_PLUGINS_LIST:
321 if str2bool(settings.get('rhodecode_auth_{}_enabled'.format(plug)
322 )):
328 if str2bool(settings.get(f'rhodecode_auth_{plug}_enabled')):
323 329 enabled.append(plug)
324 330 return enabled
325 331
@@ -28,6 +28,7 b' from cryptography.hazmat.primitives.asym'
28 28 from cryptography.hazmat.primitives import serialization as crypto_serialization
29 29 from cryptography.hazmat.backends import default_backend as crypto_default_backend
30 30
31 from rhodecode.lib.str_utils import safe_bytes, safe_str
31 32 from rhodecode.model import BaseModel
32 33 from rhodecode.model.db import UserSshKeys
33 34 from rhodecode.model.meta import Session
@@ -85,10 +86,13 b' class SshKeyModel(BaseModel):'
85 86 crypto_serialization.Encoding.PEM,
86 87 private_format,
87 88 crypto_serialization.NoEncryption())
89 private_key = safe_str(private_key)
90
88 91 public_key = key.public_key().public_bytes(
89 92 crypto_serialization.Encoding.OpenSSH,
90 93 crypto_serialization.PublicFormat.OpenSSH
91 94 )
95 public_key = safe_str(public_key)
92 96
93 97 if comment:
94 98 public_key = public_key + " " + comment
@@ -19,7 +19,9 b''
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 import urllib.request, urllib.error, urllib.parse
22 import urllib.request
23 import urllib.error
24 import urllib.parse
23 25 from packaging.version import Version
24 26
25 27 import rhodecode
@@ -32,8 +32,9 b' from sqlalchemy.exc import DatabaseError'
32 32 from rhodecode import events
33 33 from rhodecode.lib.user_log_filter import user_log_filter
34 34 from rhodecode.lib.utils2 import (
35 safe_unicode, get_current_rhodecode_user, action_logger_generic,
35 get_current_rhodecode_user, action_logger_generic,
36 36 AttributeDict, str2bool)
37 from rhodecode.lib.str_utils import safe_str
37 38 from rhodecode.lib.exceptions import (
38 39 DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException,
39 40 UserOwnsUserGroupsException, NotAllowedToCreateUserError,
@@ -87,7 +88,7 b' class UserModel(BaseModel):'
87 88 query = query.filter(User.active == true())
88 89
89 90 if name_contains:
90 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
91 ilike_expression = u'%{}%'.format(safe_str(name_contains))
91 92 query = query.filter(
92 93 or_(
93 94 User.name.ilike(ilike_expression),
@@ -358,8 +359,8 b' class UserModel(BaseModel):'
358 359 new_user.admin = admin
359 360 new_user.email = email
360 361 new_user.active = active
361 new_user.extern_name = safe_unicode(extern_name)
362 new_user.extern_type = safe_unicode(extern_type)
362 new_user.extern_name = safe_str(extern_name)
363 new_user.extern_type = safe_str(extern_type)
363 364 new_user.name = firstname
364 365 new_user.lastname = lastname
365 366 new_user.description = description
@@ -533,7 +534,7 b' class UserModel(BaseModel):'
533 534
534 535 left_overs = False
535 536
536 # if nothing is done we have left overs left
537 # if nothing is done we have leftovers left
537 538 return left_overs
538 539
539 540 def _handle_user_artifacts(self, username, artifacts, handle_user,
@@ -909,8 +910,8 b' class UserModel(BaseModel):'
909 910 ip_range = ip_range.strip()
910 911 if '-' in ip_range:
911 912 start_ip, end_ip = ip_range.split('-', 1)
912 start_ip = ipaddress.ip_address(safe_unicode(start_ip.strip()))
913 end_ip = ipaddress.ip_address(safe_unicode(end_ip.strip()))
913 start_ip = ipaddress.ip_address(safe_str(start_ip.strip()))
914 end_ip = ipaddress.ip_address(safe_str(end_ip.strip()))
914 915 parsed_ip_range = []
915 916
916 917 for index in range(int(start_ip), int(end_ip) + 1):
@@ -21,7 +21,7 b''
21 21 import logging
22 22 import traceback
23 23
24 from rhodecode.lib.utils2 import safe_str, safe_unicode
24 from rhodecode.lib.utils2 import safe_str
25 25 from rhodecode.lib.exceptions import (
26 26 UserGroupAssignedException, RepoGroupAssignmentError)
27 27 from rhodecode.lib.utils2 import (
@@ -58,7 +58,7 b' class UserGroupModel(BaseModel):'
58 58 user_group_to_perm.permission = Permission.get_by_key(default_perm)
59 59
60 60 user_group_to_perm.user_group = user_group
61 user_group_to_perm.user_id = def_user.user_id
61 user_group_to_perm.user = def_user
62 62 return user_group_to_perm
63 63
64 64 def update_permissions(
@@ -710,7 +710,7 b' class UserGroupModel(BaseModel):'
710 710 query = query.filter(UserGroup.users_group_active == true())
711 711
712 712 if name_contains:
713 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
713 ilike_expression = u'%{}%'.format(safe_str(name_contains))
714 714 query = query.filter(
715 715 UserGroup.users_group_name.ilike(ilike_expression))\
716 716 .order_by(func.length(UserGroup.users_group_name))\
General Comments 0
You need to be logged in to leave comments. Login now